summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/common_time/ICommonClock.cpp4
-rw-r--r--media/common_time/utils.cpp2
-rw-r--r--media/img_utils/Android.mk15
-rw-r--r--media/img_utils/include/img_utils/ByteArrayOutput.h82
-rw-r--r--media/img_utils/include/img_utils/DngUtils.h132
-rw-r--r--media/img_utils/include/img_utils/EndianUtils.h250
-rw-r--r--media/img_utils/include/img_utils/FileInput.h76
-rw-r--r--media/img_utils/include/img_utils/FileOutput.h46
-rw-r--r--media/img_utils/include/img_utils/Input.h71
-rw-r--r--media/img_utils/include/img_utils/Orderable.h57
-rw-r--r--media/img_utils/include/img_utils/Output.h61
-rw-r--r--media/img_utils/include/img_utils/Pair.h (renamed from media/libstagefright/chromium_http/chromium_http_stub.cpp)38
-rw-r--r--media/img_utils/include/img_utils/SortedEntryVector.h53
-rw-r--r--media/img_utils/include/img_utils/StripSource.h53
-rw-r--r--media/img_utils/include/img_utils/TagDefinitions.h1392
-rw-r--r--media/img_utils/include/img_utils/TiffEntry.h130
-rw-r--r--media/img_utils/include/img_utils/TiffEntryImpl.h218
-rw-r--r--media/img_utils/include/img_utils/TiffHelpers.h132
-rw-r--r--media/img_utils/include/img_utils/TiffIfd.h166
-rw-r--r--media/img_utils/include/img_utils/TiffWritable.h60
-rw-r--r--media/img_utils/include/img_utils/TiffWriter.h324
-rw-r--r--media/img_utils/src/Android.mk62
-rw-r--r--media/img_utils/src/ByteArrayOutput.cpp54
-rw-r--r--media/img_utils/src/DngUtils.cpp282
-rw-r--r--media/img_utils/src/EndianUtils.cpp83
-rw-r--r--media/img_utils/src/FileInput.cpp85
-rw-r--r--media/img_utils/src/FileOutput.cpp79
-rw-r--r--media/img_utils/src/Input.cpp57
-rw-r--r--media/img_utils/src/Orderable.cpp39
-rw-r--r--media/img_utils/src/Output.cpp28
-rw-r--r--media/img_utils/src/SortedEntryVector.cpp44
-rw-r--r--media/img_utils/src/StripSource.cpp25
-rw-r--r--media/img_utils/src/TiffEntry.cpp234
-rw-r--r--media/img_utils/src/TiffEntryImpl.cpp25
-rw-r--r--media/img_utils/src/TiffIfd.cpp386
-rw-r--r--media/img_utils/src/TiffWritable.cpp31
-rw-r--r--media/img_utils/src/TiffWriter.cpp390
-rw-r--r--media/libcpustats/Android.mk4
-rw-r--r--media/libcpustats/ThreadCpuUsage.cpp5
-rw-r--r--media/libeffects/downmix/Android.mk6
-rw-r--r--media/libeffects/downmix/EffectDownmix.c140
-rw-r--r--media/libeffects/downmix/EffectDownmix.h1
-rw-r--r--media/libeffects/loudness/Android.mk5
-rw-r--r--media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp524
-rw-r--r--media/libeffects/lvm/wrapper/Bundle/EffectBundle.h17
-rw-r--r--media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp29
-rw-r--r--media/libeffects/preprocessing/Android.mk5
-rw-r--r--media/libeffects/preprocessing/PreProcessing.cpp6
-rw-r--r--media/libeffects/proxy/Android.mk1
-rw-r--r--media/libeffects/testlibs/Android.mk_35
-rw-r--r--media/libeffects/visualizer/Android.mk1
-rw-r--r--media/libeffects/visualizer/EffectVisualizer.cpp38
-rw-r--r--media/libmedia/Android.mk43
-rw-r--r--media/libmedia/AudioEffect.cpp21
-rw-r--r--media/libmedia/AudioPolicy.cpp115
-rw-r--r--media/libmedia/AudioRecord.cpp352
-rw-r--r--media/libmedia/AudioSystem.cpp520
-rw-r--r--media/libmedia/AudioTrack.cpp864
-rw-r--r--media/libmedia/AudioTrackShared.cpp200
-rw-r--r--media/libmedia/CharacterEncodingDetector.cpp473
-rw-r--r--media/libmedia/CharacterEncodingDetectorTables.h2092
-rw-r--r--media/libmedia/IAudioFlinger.cpp550
-rw-r--r--media/libmedia/IAudioFlingerClient.cpp4
-rw-r--r--media/libmedia/IAudioPolicyService.cpp605
-rw-r--r--media/libmedia/IAudioPolicyServiceClient.cpp83
-rw-r--r--media/libmedia/IAudioRecord.cpp19
-rw-r--r--media/libmedia/IAudioTrack.cpp16
-rw-r--r--media/libmedia/ICrypto.cpp26
-rw-r--r--media/libmedia/IDrm.cpp123
-rw-r--r--media/libmedia/IEffect.cpp5
-rw-r--r--media/libmedia/IHDCP.cpp2
-rw-r--r--media/libmedia/IMediaCodecList.cpp163
-rw-r--r--media/libmedia/IMediaDeathNotifier.cpp4
-rw-r--r--media/libmedia/IMediaHTTPConnection.cpp182
-rw-r--r--media/libmedia/IMediaHTTPService.cpp58
-rw-r--r--media/libmedia/IMediaLogService.cpp8
-rw-r--r--media/libmedia/IMediaMetadataRetriever.cpp35
-rw-r--r--media/libmedia/IMediaPlayer.cpp25
-rw-r--r--media/libmedia/IMediaPlayerService.cpp149
-rw-r--r--media/libmedia/IMediaRecorder.cpp18
-rw-r--r--media/libmedia/IOMX.cpp194
-rw-r--r--media/libmedia/IRemoteDisplayClient.cpp2
-rw-r--r--media/libmedia/IStreamSource.cpp20
-rw-r--r--media/libmedia/JetPlayer.cpp28
-rw-r--r--media/libmedia/MediaCodecInfo.cpp266
-rw-r--r--media/libmedia/MediaProfiles.cpp47
-rw-r--r--media/libmedia/MediaScanner.cpp20
-rw-r--r--media/libmedia/MediaScannerClient.cpp203
-rw-r--r--media/libmedia/MemoryLeakTrackUtil.cpp2
-rw-r--r--media/libmedia/MidiIoWrapper.cpp92
-rw-r--r--media/libmedia/SingleStateQueue.cpp1
-rw-r--r--media/libmedia/SoundPool.cpp909
-rw-r--r--media/libmedia/SoundPoolThread.cpp114
-rw-r--r--media/libmedia/SoundPoolThread.h66
-rw-r--r--media/libmedia/StringArray.h83
-rw-r--r--media/libmedia/ToneGenerator.cpp1426
-rw-r--r--media/libmedia/Visualizer.cpp19
-rw-r--r--media/libmedia/autodetect.cpp885
-rw-r--r--media/libmedia/autodetect.h37
-rw-r--r--media/libmedia/mediametadataretriever.cpp17
-rw-r--r--media/libmedia/mediaplayer.cpp152
-rw-r--r--media/libmedia/mediarecorder.cpp12
-rw-r--r--media/libmediaplayerservice/Android.mk9
-rw-r--r--media/libmediaplayerservice/Crypto.cpp8
-rw-r--r--media/libmediaplayerservice/Crypto.h2
-rw-r--r--media/libmediaplayerservice/Drm.cpp101
-rw-r--r--media/libmediaplayerservice/Drm.h18
-rw-r--r--media/libmediaplayerservice/HDCP.cpp6
-rw-r--r--media/libmediaplayerservice/MediaPlayerFactory.cpp150
-rw-r--r--media/libmediaplayerservice/MediaPlayerFactory.h23
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp480
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h90
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp3
-rw-r--r--media/libmediaplayerservice/MetadataRetrieverClient.cpp28
-rw-r--r--media/libmediaplayerservice/MetadataRetrieverClient.h5
-rw-r--r--media/libmediaplayerservice/MidiFile.cpp558
-rw-r--r--media/libmediaplayerservice/MidiFile.h113
-rw-r--r--media/libmediaplayerservice/MidiMetadataRetriever.cpp92
-rw-r--r--media/libmediaplayerservice/MidiMetadataRetriever.h51
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.cpp8
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.h4
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp509
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h38
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.cpp8
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.h4
-rw-r--r--media/libmediaplayerservice/VideoFrameScheduler.cpp482
-rw-r--r--media/libmediaplayerservice/VideoFrameScheduler.h99
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk6
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.cpp1497
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.h160
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp54
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.h14
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp1643
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h114
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp361
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h59
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp888
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h82
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp200
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h97
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp425
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h89
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp286
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.h17
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp1129
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h150
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerSource.h44
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp89
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.h9
-rw-r--r--media/libmediaplayerservice/nuplayer/StreamingSource.cpp136
-rw-r--r--media/libmediaplayerservice/nuplayer/StreamingSource.h16
-rw-r--r--media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp144
-rw-r--r--media/libmediaplayerservice/nuplayer/mp4/MP4Source.h53
-rw-r--r--media/libnbaio/Android.mk7
-rw-r--r--media/libnbaio/AudioBufferProviderSource.cpp8
-rw-r--r--media/libnbaio/AudioStreamInSource.cpp23
-rw-r--r--media/libnbaio/AudioStreamOutSink.cpp21
-rw-r--r--media/libnbaio/MonoPipe.cpp42
-rw-r--r--media/libnbaio/MonoPipeReader.cpp4
-rw-r--r--media/libnbaio/NBAIO.cpp134
-rw-r--r--media/libnbaio/NBLog.cpp86
-rw-r--r--media/libnbaio/Pipe.cpp15
-rw-r--r--media/libnbaio/PipeReader.cpp6
-rw-r--r--media/libnbaio/SourceAudioBufferProvider.cpp10
-rw-r--r--media/libstagefright/AACExtractor.cpp4
-rw-r--r--media/libstagefright/AACWriter.cpp21
-rw-r--r--media/libstagefright/ACodec.cpp1950
-rw-r--r--media/libstagefright/AMRExtractor.cpp8
-rw-r--r--media/libstagefright/AMRWriter.cpp14
-rw-r--r--media/libstagefright/Android.mk24
-rw-r--r--media/libstagefright/AudioPlayer.cpp31
-rw-r--r--media/libstagefright/AudioSource.cpp24
-rw-r--r--media/libstagefright/AwesomePlayer.cpp174
-rw-r--r--media/libstagefright/CameraSource.cpp154
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp13
-rw-r--r--media/libstagefright/ClockEstimator.cpp177
-rw-r--r--media/libstagefright/CodecBase.cpp (renamed from media/libstagefright/include/chromium_http_stub.h)26
-rw-r--r--media/libstagefright/DataSource.cpp80
-rw-r--r--media/libstagefright/DataURISource.cpp109
-rw-r--r--media/libstagefright/ESDS.cpp2
-rw-r--r--media/libstagefright/FLACExtractor.cpp70
-rw-r--r--media/libstagefright/HTTPBase.cpp53
-rw-r--r--media/libstagefright/MP3Extractor.cpp3
-rw-r--r--media/libstagefright/MPEG2TSWriter.cpp7
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp699
-rw-r--r--media/libstagefright/MPEG4Writer.cpp171
-rw-r--r--media/libstagefright/MediaAdapter.cpp4
-rw-r--r--media/libstagefright/MediaBuffer.cpp7
-rw-r--r--media/libstagefright/MediaBufferGroup.cpp8
-rw-r--r--media/libstagefright/MediaCodec.cpp913
-rw-r--r--media/libstagefright/MediaCodecList.cpp687
-rw-r--r--media/libstagefright/MediaCodecSource.cpp831
-rw-r--r--media/libstagefright/MediaDefs.cpp7
-rw-r--r--media/libstagefright/MediaExtractor.cpp3
-rw-r--r--media/libstagefright/MediaMuxer.cpp29
-rw-r--r--media/libstagefright/MediaSource.cpp13
-rw-r--r--media/libstagefright/MidiExtractor.cpp325
-rw-r--r--media/libstagefright/NuCachedSource2.cpp83
-rw-r--r--media/libstagefright/NuMediaExtractor.cpp8
-rw-r--r--media/libstagefright/OMXClient.cpp33
-rw-r--r--media/libstagefright/OMXCodec.cpp663
-rw-r--r--media/libstagefright/OggExtractor.cpp122
-rw-r--r--media/libstagefright/SampleIterator.cpp9
-rw-r--r--media/libstagefright/SampleTable.cpp212
-rw-r--r--media/libstagefright/SkipCutBuffer.cpp3
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp78
-rw-r--r--media/libstagefright/StagefrightMetadataRetriever.cpp86
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp56
-rw-r--r--media/libstagefright/TimedEventQueue.cpp19
-rw-r--r--media/libstagefright/Utils.cpp209
-rw-r--r--media/libstagefright/VBRISeeker.cpp9
-rw-r--r--media/libstagefright/WAVExtractor.cpp10
-rw-r--r--media/libstagefright/avc_utils.cpp82
-rw-r--r--media/libstagefright/chromium_http/Android.mk37
-rw-r--r--media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp355
-rw-r--r--media/libstagefright/chromium_http/DataUriSource.cpp68
-rw-r--r--media/libstagefright/chromium_http/support.cpp559
-rw-r--r--media/libstagefright/chromium_http/support.h178
-rw-r--r--media/libstagefright/chromium_http_stub.cpp102
-rw-r--r--media/libstagefright/codecs/aacdec/Android.mk5
-rw-r--r--media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp372
-rw-r--r--media/libstagefright/codecs/aacdec/DrcPresModeWrap.h62
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp920
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.h29
-rw-r--r--media/libstagefright/codecs/aacenc/AACEncoder.cpp2
-rw-r--r--media/libstagefright/codecs/aacenc/Android.mk6
-rw-r--r--media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp83
-rw-r--r--media/libstagefright/codecs/aacenc/SoftAACEncoder2.h2
-rw-r--r--media/libstagefright/codecs/aacenc/basic_op/basic_op.h3
-rw-r--r--media/libstagefright/codecs/aacenc/basic_op/oper_32b.c12
-rw-r--r--media/libstagefright/codecs/aacenc/src/aacenc.c16
-rw-r--r--media/libstagefright/codecs/aacenc/src/aacenc_core.c1
-rw-r--r--media/libstagefright/codecs/aacenc/src/adj_thr.c4
-rw-r--r--media/libstagefright/codecs/aacenc/src/bitbuffer.c23
-rw-r--r--media/libstagefright/codecs/aacenc/src/bitenc.c5
-rw-r--r--media/libstagefright/codecs/aacenc/src/block_switch.c29
-rw-r--r--media/libstagefright/codecs/aacenc/src/dyn_bits.c3
-rw-r--r--media/libstagefright/codecs/aacenc/src/ms_stereo.c1
-rw-r--r--media/libstagefright/codecs/aacenc/src/psy_main.c6
-rw-r--r--media/libstagefright/codecs/aacenc/src/qc_main.c8
-rw-r--r--media/libstagefright/codecs/aacenc/src/sf_estim.c2
-rw-r--r--media/libstagefright/codecs/aacenc/src/tns.c38
-rw-r--r--media/libstagefright/codecs/aacenc/src/transform.c1
-rw-r--r--media/libstagefright/codecs/amrnb/common/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h10
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp8
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/div_s.cpp4
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp8
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp8
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/mult_r.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/norm_l.cpp4
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/norm_s.cpp4
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/residu.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/shr.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/common/src/weight_a.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/dec/Android.mk27
-rw-r--r--media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp4
-rw-r--r--media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp8
-rw-r--r--media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp8
-rw-r--r--media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp150
-rw-r--r--media/libstagefright/codecs/amrnb/enc/Android.mk6
-rw-r--r--media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp24
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/convolve.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp4
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp6
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/levinson.cpp10
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp2
-rw-r--r--media/libstagefright/codecs/amrwb/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp2
-rw-r--r--media/libstagefright/codecs/amrwbenc/Android.mk7
-rw-r--r--media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp2
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s1
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s1
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s1
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s1
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/autocorr.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/convolve.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/pitch_f4.c3
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/q_pulse.c2
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/syn_filt.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/wb_vad.c2
-rw-r--r--media/libstagefright/codecs/avc/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/avc/common/src/deblock.cpp2
-rw-r--r--media/libstagefright/codecs/avc/enc/Android.mk8
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp469
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h25
-rw-r--r--media/libstagefright/codecs/avc/enc/src/findhalfpel.cpp4
-rw-r--r--media/libstagefright/codecs/avc/enc/src/init.cpp4
-rw-r--r--media/libstagefright/codecs/avc/enc/src/rate_control.cpp2
-rw-r--r--media/libstagefright/codecs/avcdec/Android.mk27
-rw-r--r--media/libstagefright/codecs/avcdec/SoftAVCDec.cpp808
-rw-r--r--media/libstagefright/codecs/avcdec/SoftAVCDec.h177
-rw-r--r--media/libstagefright/codecs/avcenc/Android.mk30
-rw-r--r--media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp1335
-rw-r--r--media/libstagefright/codecs/avcenc/SoftAVCEnc.h309
-rw-r--r--media/libstagefright/codecs/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/common/cmnMemory.c18
-rw-r--r--media/libstagefright/codecs/flac/enc/Android.mk2
-rw-r--r--media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp42
-rw-r--r--media/libstagefright/codecs/g711/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/g711/dec/SoftG711.cpp20
-rw-r--r--media/libstagefright/codecs/g711/dec/SoftG711.h3
-rw-r--r--media/libstagefright/codecs/gsm/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/gsm/dec/SoftGSM.cpp32
-rw-r--r--media/libstagefright/codecs/gsm/dec/SoftGSM.h3
-rw-r--r--media/libstagefright/codecs/hevcdec/Android.mk30
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.cpp770
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.h125
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp83
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h4
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp4
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp15
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp6
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/Android.mk8
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp452
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h30
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h2
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp8
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp8
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h2
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp2
-rw-r--r--media/libstagefright/codecs/mp3dec/Android.mk6
-rw-r--r--media/libstagefright/codecs/mp3dec/SoftMP3.cpp136
-rw-r--r--media/libstagefright/codecs/mp3dec/SoftMP3.h2
-rw-r--r--media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_arm.s210
-rw-r--r--media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_arm.s369
-rw-r--r--media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_wm.asm366
-rw-r--r--media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_arm.s237
-rw-r--r--media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s1
-rw-r--r--media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_wm.asm231
-rw-r--r--media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp2
-rw-r--r--media/libstagefright/codecs/mpeg2dec/Android.mk27
-rw-r--r--media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp756
-rw-r--r--media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h178
-rw-r--r--media/libstagefright/codecs/on2/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp98
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.h6
-rw-r--r--media/libstagefright/codecs/on2/enc/Android.mk5
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp743
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h117
-rw-r--r--media/libstagefright/codecs/on2/h264dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp91
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.h5
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/inc/basetype.h0
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_BitDec_s.h18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_Bitstream.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCTTable.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCT_s.h22
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_MaskTable.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_s.h16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armOMX.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes.h60
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes_s.h16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm11/build_vc.pl17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_Bitstream.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_IDCTTable.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_MaskTable.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVC.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVCCOMM_s.h18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/api/armVCM4P10_CAVLCTables.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_CAVLCTables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DequantTables_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_QuantTables_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Clip8_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Lookup_Tables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_SetPredDir_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s16
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_BitDec_s.h18
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Bitstream.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCTTable.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCT_s.h22
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_MaskTable.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Version.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_s.h16
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armOMX.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes.h60
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes_s.h16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/build_vc.pl17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_Bitstream.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_IDCTTable.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_MaskTable.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVC.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVCCOMM_s.h18
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC_s.h0
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s18
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/api/armVCM4P10_CAVLCTables.h17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s18
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_CAVLCTables.c17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s18
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s18
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s18
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s18
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s18
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s18
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s16
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s18
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Average_4x_Align_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingChroma_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingLuma_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S33
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S24
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Align_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Copy_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Interpolate_Chroma_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_QuantTables_s.S16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_TransformResidual4x4_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_UnpackBlock4x4_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DeblockLuma_I.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_InterpolateLuma_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntraChroma_8x8_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_16x16_s.S16
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_4x4_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantChromaDCFromPair_s.S17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantLumaDCFromPair_s.S17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s16
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Lookup_Tables.c17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s16
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c17
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s18
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s16
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s16
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/src/armVC_Version.c0
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM_Bitstream.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armOMX.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/omxtypes.h60
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/omxdl/reference/build_vc.pl17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM_Bitstream.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/api/armVC.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_Average.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_SAD.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_16x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_8x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock_SAD.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy16x16.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy8x8.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ExpandFrame_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_LimitMVToRect.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_16x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_8x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/api/armVCM4P10_CAVLCTables.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CAVLCTables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CompareMotionCostToMV.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DeBlockPixel.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DequantTables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_FwdTransformResidual4x4.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfDiag_Luma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfHor_Luma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfVer_Luma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Chroma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Luma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_PredictIntraDC4x4.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_QuantTables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_SADQuar.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_TransformResidual4x4.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock2x2.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock4x4.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_Average_4x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Half.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Integer.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Quarter.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_GetVLCInfo.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfHor_Luma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfVer_Luma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateLuma.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_ChromaDC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_LumaDC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformResidualAndAdd.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEGetBufSize.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEInit.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MotionEstimationMB.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_16x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_4x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_8x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SAD_4x.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SATD_4x4.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SubAndTransformQDQResidual.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_ChromaDC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_LumaDC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_DCT_Table.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_ACDCPredict.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Half.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Integer.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CheckVLCEscapeMode.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CompareMV.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DCT_Table.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_intra.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_EncodeVLCZigzag_intra.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLCBuffer.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLDBuffer.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_GetVLCBits.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_PutVLCBits.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_SetPredDir.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_16x16.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_8x8.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_16x16.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_8x8.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DCT8x8blk.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeMV.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_Inter.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraACVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraDCVLC.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_FindMVpred.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_IDCT8x8blk.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MCReconBlock.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEGetBufSize.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEInit.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MotionEstimationMB.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInter_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantIntra_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvInter_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_inter.c17
-rw-r--r--media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_intra.c17
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c0
-rw-r--r--media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c4
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_cfg.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.c2
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_container.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c5
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_seq_param_set.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c3
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c2
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.h0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.c0
-rw-r--r--[-rwxr-xr-x]media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.h0
-rw-r--r--media/libstagefright/codecs/opus/Android.mk4
-rw-r--r--media/libstagefright/codecs/opus/dec/Android.mk19
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.cpp540
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.h94
-rw-r--r--media/libstagefright/codecs/raw/Android.mk2
-rw-r--r--media/libstagefright/codecs/raw/SoftRaw.cpp2
-rw-r--r--media/libstagefright/codecs/vorbis/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp84
-rw-r--r--media/libstagefright/codecs/vorbis/dec/SoftVorbis.h2
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp138
-rw-r--r--media/libstagefright/data/media_codecs_google_audio.xml92
-rw-r--r--media/libstagefright/data/media_codecs_google_telephony.xml25
-rwxr-xr-xmedia/libstagefright/data/media_codecs_google_video.xml113
-rw-r--r--media/libstagefright/data/media_codecs_google_video_le.xml108
-rw-r--r--media/libstagefright/foundation/ABitReader.cpp68
-rw-r--r--media/libstagefright/foundation/ABuffer.cpp31
-rw-r--r--media/libstagefright/foundation/ADebug.cpp117
-rw-r--r--media/libstagefright/foundation/AHierarchicalStateMachine.cpp2
-rw-r--r--media/libstagefright/foundation/ALooper.cpp8
-rw-r--r--media/libstagefright/foundation/ALooperRoster.cpp149
-rw-r--r--media/libstagefright/foundation/AMessage.cpp152
-rw-r--r--media/libstagefright/foundation/ANetworkSession.cpp12
-rw-r--r--media/libstagefright/foundation/AString.cpp90
-rw-r--r--media/libstagefright/foundation/AStringUtils.cpp77
-rw-r--r--media/libstagefright/foundation/AWakeLock.cpp109
-rw-r--r--media/libstagefright/foundation/Android.mk9
-rw-r--r--media/libstagefright/foundation/base64.cpp6
-rw-r--r--media/libstagefright/http/Android.mk28
-rw-r--r--media/libstagefright/http/HTTPHelper.cpp70
-rw-r--r--media/libstagefright/http/HTTPHelper.h31
-rw-r--r--media/libstagefright/http/MediaHTTP.cpp205
-rw-r--r--media/libstagefright/httplive/Android.mk5
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp1120
-rw-r--r--media/libstagefright/httplive/LiveSession.h130
-rw-r--r--media/libstagefright/httplive/M3UParser.cpp364
-rw-r--r--media/libstagefright/httplive/M3UParser.h17
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp1179
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.h89
-rw-r--r--media/libstagefright/id3/Android.mk4
-rw-r--r--media/libstagefright/id3/ID3.cpp106
-rw-r--r--media/libstagefright/include/AACEncoder.h2
-rw-r--r--media/libstagefright/include/AwesomePlayer.h6
-rw-r--r--media/libstagefright/include/ChromiumHTTPDataSource.h125
-rw-r--r--media/libstagefright/include/FragmentedMP4Parser.h274
-rw-r--r--media/libstagefright/include/HTTPBase.h14
-rw-r--r--media/libstagefright/include/ID3.h4
-rw-r--r--media/libstagefright/include/MPEG2TSExtractor.h2
-rw-r--r--media/libstagefright/include/MPEG4Extractor.h11
-rw-r--r--media/libstagefright/include/MidiExtractor.h95
-rw-r--r--media/libstagefright/include/NuCachedSource2.h3
-rw-r--r--media/libstagefright/include/OMX.h8
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h39
-rw-r--r--media/libstagefright/include/SDPLoader.h8
-rw-r--r--media/libstagefright/include/SampleIterator.h4
-rw-r--r--media/libstagefright/include/SampleTable.h13
-rw-r--r--media/libstagefright/include/SimpleSoftOMXComponent.h5
-rw-r--r--media/libstagefright/include/SoftVideoDecoderOMXComponent.h31
-rw-r--r--media/libstagefright/include/SoftVideoEncoderOMXComponent.h105
-rw-r--r--media/libstagefright/include/SoftwareRenderer.h10
-rw-r--r--media/libstagefright/include/StagefrightMetadataRetriever.h1
-rw-r--r--media/libstagefright/include/TimedEventQueue.h2
-rw-r--r--media/libstagefright/include/WVMExtractor.h3
-rw-r--r--media/libstagefright/include/avc_utils.h4
-rw-r--r--media/libstagefright/matroska/Android.mk2
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp241
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.h12
-rw-r--r--media/libstagefright/mp4/FragmentedMP4Parser.cpp1993
-rw-r--r--media/libstagefright/mp4/TrackFragment.cpp364
-rw-r--r--media/libstagefright/mp4/TrackFragment.h122
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp109
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h18
-rw-r--r--media/libstagefright/mpeg2ts/Android.mk2
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp204
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.h15
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp288
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.h5
-rw-r--r--media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp15
-rw-r--r--media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp20
-rw-r--r--media/libstagefright/omx/Android.mk6
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp312
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h68
-rw-r--r--media/libstagefright/omx/OMX.cpp41
-rw-r--r--media/libstagefright/omx/OMXMaster.cpp2
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp755
-rw-r--r--media/libstagefright/omx/SimpleSoftOMXComponent.cpp24
-rw-r--r--media/libstagefright/omx/SoftOMXComponent.cpp42
-rwxr-xr-x[-rw-r--r--]media/libstagefright/omx/SoftOMXPlugin.cpp11
-rw-r--r--media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp251
-rw-r--r--media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp616
-rw-r--r--media/libstagefright/omx/tests/Android.mk4
-rw-r--r--media/libstagefright/omx/tests/OMXHarness.cpp29
-rw-r--r--media/libstagefright/rtsp/AAMRAssembler.cpp4
-rw-r--r--media/libstagefright/rtsp/AAVCAssembler.cpp10
-rw-r--r--media/libstagefright/rtsp/AMPEG2TSAssembler.cpp4
-rw-r--r--media/libstagefright/rtsp/AMPEG2TSAssembler.h2
-rw-r--r--media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp6
-rw-r--r--media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp20
-rw-r--r--media/libstagefright/rtsp/APacketSource.cpp4
-rw-r--r--media/libstagefright/rtsp/ARTPConnection.cpp7
-rw-r--r--media/libstagefright/rtsp/ARTPWriter.cpp10
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp6
-rw-r--r--media/libstagefright/rtsp/ARawAudioAssembler.cpp4
-rw-r--r--media/libstagefright/rtsp/ARawAudioAssembler.h2
-rw-r--r--media/libstagefright/rtsp/ASessionDescription.cpp5
-rw-r--r--media/libstagefright/rtsp/Android.mk13
-rw-r--r--media/libstagefright/rtsp/MyHandler.h61
-rw-r--r--media/libstagefright/rtsp/SDPLoader.cpp34
-rw-r--r--media/libstagefright/tests/Android.mk45
-rw-r--r--media/libstagefright/tests/SurfaceMediaSource_test.cpp23
-rw-r--r--media/libstagefright/tests/Utils_test.cpp203
-rw-r--r--media/libstagefright/timedtext/Android.mk3
-rw-r--r--media/libstagefright/timedtext/TimedTextDriver.cpp11
-rw-r--r--media/libstagefright/timedtext/TimedTextPlayer.cpp3
-rw-r--r--media/libstagefright/timedtext/TimedTextPlayer.h2
-rw-r--r--media/libstagefright/timedtext/TimedTextSRTSource.h2
-rw-r--r--media/libstagefright/timedtext/TimedTextSource.h2
-rw-r--r--media/libstagefright/timedtext/test/Android.mk8
-rw-r--r--media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp14
-rw-r--r--media/libstagefright/webm/Android.mk23
-rw-r--r--media/libstagefright/webm/EbmlUtil.cpp108
-rw-r--r--media/libstagefright/webm/EbmlUtil.h50
-rw-r--r--media/libstagefright/webm/LinkedBlockingQueue.h79
-rw-r--r--media/libstagefright/webm/WebmConstants.h133
-rw-r--r--media/libstagefright/webm/WebmElement.cpp367
-rw-r--r--media/libstagefright/webm/WebmElement.h127
-rw-r--r--media/libstagefright/webm/WebmFrame.cpp83
-rw-r--r--media/libstagefright/webm/WebmFrame.h46
-rw-r--r--media/libstagefright/webm/WebmFrameThread.cpp399
-rw-r--r--media/libstagefright/webm/WebmFrameThread.h160
-rw-r--r--media/libstagefright/webm/WebmWriter.cpp550
-rw-r--r--media/libstagefright/webm/WebmWriter.h130
-rw-r--r--media/libstagefright/wifi-display/VideoFormats.cpp2
-rw-r--r--media/libstagefright/wifi-display/rtp/RTPSender.cpp5
-rw-r--r--media/libstagefright/wifi-display/source/Converter.cpp49
-rw-r--r--media/libstagefright/wifi-display/source/MediaPuller.cpp2
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.cpp8
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.h3
-rw-r--r--media/libstagefright/wifi-display/source/RepeaterSource.cpp3
-rw-r--r--media/libstagefright/wifi-display/source/TSPacketizer.cpp4
-rw-r--r--media/libstagefright/wifi-display/source/WifiDisplaySource.cpp38
-rw-r--r--media/libstagefright/yuv/Android.mk2
-rw-r--r--media/libstagefright/yuv/YUVImage.cpp12
-rw-r--r--media/mediaserver/Android.mk11
-rw-r--r--media/mediaserver/main_mediaserver.cpp4
-rw-r--r--media/mtp/Android.mk3
-rw-r--r--media/mtp/MtpDataPacket.cpp162
-rw-r--r--media/mtp/MtpDataPacket.h25
-rw-r--r--media/mtp/MtpDevice.cpp45
-rw-r--r--media/mtp/MtpDevice.h2
-rw-r--r--media/mtp/MtpDeviceInfo.cpp33
-rw-r--r--media/mtp/MtpDeviceInfo.h4
-rw-r--r--media/mtp/MtpObjectInfo.cpp42
-rw-r--r--media/mtp/MtpObjectInfo.h2
-rw-r--r--media/mtp/MtpPacket.cpp2
-rw-r--r--media/mtp/MtpPacket.h8
-rw-r--r--media/mtp/MtpProperty.cpp106
-rw-r--r--media/mtp/MtpProperty.h14
-rw-r--r--media/mtp/MtpRequestPacket.cpp20
-rw-r--r--media/mtp/MtpRequestPacket.h4
-rw-r--r--media/mtp/MtpServer.cpp190
-rw-r--r--media/mtp/MtpServer.h1
-rw-r--r--media/mtp/MtpStorageInfo.cpp24
-rw-r--r--media/mtp/MtpStorageInfo.h2
-rw-r--r--media/mtp/MtpStringBuffer.cpp13
-rw-r--r--media/mtp/MtpStringBuffer.h2
-rw-r--r--media/mtp/MtpUtils.cpp3
-rw-r--r--media/ndk/Android.mk52
-rw-r--r--media/ndk/NdkMediaCodec.cpp505
-rw-r--r--media/ndk/NdkMediaCrypto.cpp121
-rw-r--r--media/ndk/NdkMediaCryptoPriv.h41
-rw-r--r--media/ndk/NdkMediaDrm.cpp728
-rw-r--r--media/ndk/NdkMediaExtractor.cpp360
-rw-r--r--media/ndk/NdkMediaFormat.cpp260
-rw-r--r--media/ndk/NdkMediaFormatPriv.h44
-rw-r--r--media/ndk/NdkMediaMuxer.cpp107
887 files changed, 51749 insertions, 19527 deletions
diff --git a/media/common_time/ICommonClock.cpp b/media/common_time/ICommonClock.cpp
index 25ae69e..19b7d6e 100644
--- a/media/common_time/ICommonClock.cpp
+++ b/media/common_time/ICommonClock.cpp
@@ -206,7 +206,7 @@ class BpCommonClock : public BpInterface<ICommonClock>
const sp<ICommonClockListener>& listener) {
Parcel data, reply;
data.writeInterfaceToken(ICommonClock::getInterfaceDescriptor());
- data.writeStrongBinder(listener->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(listener));
status_t status = remote()->transact(REGISTER_LISTENER, data, &reply);
@@ -221,7 +221,7 @@ class BpCommonClock : public BpInterface<ICommonClock>
const sp<ICommonClockListener>& listener) {
Parcel data, reply;
data.writeInterfaceToken(ICommonClock::getInterfaceDescriptor());
- data.writeStrongBinder(listener->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(listener));
status_t status = remote()->transact(UNREGISTER_LISTENER, data, &reply);
if (status == OK) {
diff --git a/media/common_time/utils.cpp b/media/common_time/utils.cpp
index 6539171..91cf2fd 100644
--- a/media/common_time/utils.cpp
+++ b/media/common_time/utils.cpp
@@ -59,7 +59,7 @@ void serializeSockaddr(Parcel* p, const struct sockaddr_storage* addr) {
}
void deserializeSockaddr(const Parcel* p, struct sockaddr_storage* addr) {
- memset(addr, 0, sizeof(addr));
+ memset(addr, 0, sizeof(*addr));
addr->ss_family = p->readInt32();
switch(addr->ss_family) {
diff --git a/media/img_utils/Android.mk b/media/img_utils/Android.mk
new file mode 100644
index 0000000..1cd00bd
--- /dev/null
+++ b/media/img_utils/Android.mk
@@ -0,0 +1,15 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+include $(call all-subdir-makefiles)
diff --git a/media/img_utils/include/img_utils/ByteArrayOutput.h b/media/img_utils/include/img_utils/ByteArrayOutput.h
new file mode 100644
index 0000000..ba73977
--- /dev/null
+++ b/media/img_utils/include/img_utils/ByteArrayOutput.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_BYTE_ARRAY_OUTPUT_H
+#define IMG_UTILS_BYTE_ARRAY_OUTPUT_H
+
+#include <img_utils/Output.h>
+
+#include <utils/Errors.h>
+#include <utils/Vector.h>
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class that accumulates written bytes into a buffer.
+ */
+class ANDROID_API ByteArrayOutput : public Output {
+ public:
+
+ ByteArrayOutput();
+
+ virtual ~ByteArrayOutput();
+
+ /**
+ * Open this ByteArrayOutput.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Write bytes from the given buffer. The number of bytes given in the count
+ * argument will be written. Bytes will be written from the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+
+ /**
+ * Close this ByteArrayOutput.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+
+ /**
+ * Get current size of the array of bytes written.
+ */
+ virtual size_t getSize() const;
+
+ /**
+ * Get pointer to array of bytes written. It is not valid to use this pointer if
+ * open, write, or close is called after this method.
+ */
+ virtual const uint8_t* getArray() const;
+
+ protected:
+ Vector<uint8_t> mByteArray;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_BYTE_ARRAY_OUTPUT_H*/
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h
new file mode 100644
index 0000000..4389b02
--- /dev/null
+++ b/media/img_utils/include/img_utils/DngUtils.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_DNG_UTILS_H
+#define IMG_UTILS_DNG_UTILS_H
+
+#include <img_utils/ByteArrayOutput.h>
+#include <img_utils/EndianUtils.h>
+
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <utils/RefBase.h>
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+#define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0])))
+
+/**
+ * Utility class for building values for the OpcodeList tags specified
+ * in the Adobe DNG 1.4 spec.
+ */
+class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
+ public:
+ enum CfaLayout {
+ CFA_RGGB = 0,
+ CFA_GRBG,
+ CFA_GBRG,
+ CFA_BGGR,
+ };
+
+ OpcodeListBuilder();
+ virtual ~OpcodeListBuilder();
+
+ /**
+ * Get the total size of this opcode list in bytes.
+ */
+ virtual size_t getSize() const;
+
+ /**
+ * Get the number of opcodes defined in this list.
+ */
+ virtual uint32_t getCount() const;
+
+ /**
+ * Write the opcode list into the given buffer. This buffer
+ * must be able to hold at least as many elements as returned
+ * by calling the getSize() method.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t buildOpList(/*out*/ uint8_t* buf) const;
+
+ /**
+ * Add GainMap opcode(s) for the given metadata parameters. The given
+ * CFA layout must match the layout of the shading map passed into the
+ * lensShadingMap parameter.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addGainMapsForMetadata(uint32_t lsmWidth,
+ uint32_t lsmHeight,
+ uint32_t activeAreaTop,
+ uint32_t activeAreaLeft,
+ uint32_t activeAreaBottom,
+ uint32_t activeAreaRight,
+ CfaLayout cfa,
+ const float* lensShadingMap);
+
+
+ /**
+ * Add a GainMap opcode with the given fields. The mapGains array
+ * must have mapPointsV * mapPointsH * mapPlanes elements.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addGainMap(uint32_t top,
+ uint32_t left,
+ uint32_t bottom,
+ uint32_t right,
+ uint32_t plane,
+ uint32_t planes,
+ uint32_t rowPitch,
+ uint32_t colPitch,
+ uint32_t mapPointsV,
+ uint32_t mapPointsH,
+ double mapSpacingV,
+ double mapSpacingH,
+ double mapOriginV,
+ double mapOriginH,
+ uint32_t mapPlanes,
+ const float* mapGains);
+
+ // TODO: Add other Opcode methods
+ protected:
+ static const uint32_t FLAG_OPTIONAL = 0x1u;
+ static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u;
+
+ enum {
+ GAIN_MAP_ID = 9,
+ LSM_R_IND = 0,
+ LSM_GE_IND = 1,
+ LSM_GO_IND = 2,
+ LSM_B_IND = 3,
+ };
+
+ uint32_t mCount;
+ ByteArrayOutput mOpList;
+ EndianOutput mEndianOut;
+
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_DNG_UTILS_H*/
diff --git a/media/img_utils/include/img_utils/EndianUtils.h b/media/img_utils/include/img_utils/EndianUtils.h
new file mode 100644
index 0000000..e99be1a
--- /dev/null
+++ b/media/img_utils/include/img_utils/EndianUtils.h
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_ENDIAN_UTILS
+#define IMG_UTILS_ENDIAN_UTILS
+
+#include <img_utils/Output.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+#include <endian.h>
+#include <assert.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Endianness types supported.
+ */
+enum ANDROID_API Endianness {
+ UNDEFINED_ENDIAN, // Default endianness will be used.
+ BIG,
+ LITTLE
+};
+
+/**
+ * Convert from the native device endianness to big endian.
+ */
+template<typename T>
+T convertToBigEndian(T in);
+
+/**
+ * Convert from the native device endianness to little endian.
+ */
+template<typename T>
+T convertToLittleEndian(T in);
+
+/**
+ * A utility class for writing to an Output with the given endianness.
+ */
+class ANDROID_API EndianOutput : public Output {
+ public:
+ /**
+ * Wrap the given Output. Calling write methods will result in
+ * writes to this output.
+ */
+ EndianOutput(Output* out, Endianness end=LITTLE);
+
+ virtual ~EndianOutput();
+
+ /**
+ * Call open on the wrapped output.
+ */
+ virtual status_t open();
+
+ /**
+ * Call close on the wrapped output.
+ */
+ virtual status_t close();
+
+ /**
+ * Set the endianness to use when writing.
+ */
+ virtual void setEndianness(Endianness end);
+
+ /**
+ * Get the currently configured endianness.
+ */
+ virtual Endianness getEndianness() const;
+
+ /**
+ * Get the current number of bytes written by this EndianOutput.
+ */
+ virtual uint32_t getCurrentOffset() const;
+
+
+ // TODO: switch write methods to uint32_t instead of size_t,
+ // the max size of a TIFF files is bounded
+
+ /**
+ * The following methods will write elements from given input buffer to the output.
+ * Count elements in the buffer will be written with the endianness set for this
+ * EndianOutput. If the given offset is greater than zero, that many elements will
+ * be skipped in the buffer before writing.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int8_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const uint16_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int16_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const uint32_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int32_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const uint64_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int64_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const float* buf, size_t offset, size_t count);
+
+ virtual status_t write(const double* buf, size_t offset, size_t count);
+
+ protected:
+ template<typename T>
+ inline status_t writeHelper(const T* buf, size_t offset, size_t count);
+
+ uint32_t mOffset;
+ Output* mOutput;
+ Endianness mEndian;
+};
+
+template<typename T>
+inline status_t EndianOutput::writeHelper(const T* buf, size_t offset, size_t count) {
+ assert(offset <= count);
+ status_t res = OK;
+ size_t size = sizeof(T);
+ switch(mEndian) {
+ case BIG: {
+ for (size_t i = offset; i < count; ++i) {
+ T tmp = convertToBigEndian<T>(buf[offset + i]);
+ if ((res = mOutput->write(reinterpret_cast<uint8_t*>(&tmp), 0, size))
+ != OK) {
+ return res;
+ }
+ mOffset += size;
+ }
+ break;
+ }
+ case LITTLE: {
+ for (size_t i = offset; i < count; ++i) {
+ T tmp = convertToLittleEndian<T>(buf[offset + i]);
+ if ((res = mOutput->write(reinterpret_cast<uint8_t*>(&tmp), 0, size))
+ != OK) {
+ return res;
+ }
+ mOffset += size;
+ }
+ break;
+ }
+ default: {
+ return BAD_VALUE;
+ }
+ }
+ return res;
+}
+
+template<>
+inline uint8_t convertToBigEndian(uint8_t in) {
+ return in;
+}
+
+template<>
+inline int8_t convertToBigEndian(int8_t in) {
+ return in;
+}
+
+template<>
+inline uint16_t convertToBigEndian(uint16_t in) {
+ return htobe16(in);
+}
+
+template<>
+inline int16_t convertToBigEndian(int16_t in) {
+ return htobe16(in);
+}
+
+template<>
+inline uint32_t convertToBigEndian(uint32_t in) {
+ return htobe32(in);
+}
+
+template<>
+inline int32_t convertToBigEndian(int32_t in) {
+ return htobe32(in);
+}
+
+template<>
+inline uint64_t convertToBigEndian(uint64_t in) {
+ return htobe64(in);
+}
+
+template<>
+inline int64_t convertToBigEndian(int64_t in) {
+ return htobe64(in);
+}
+
+template<>
+inline uint8_t convertToLittleEndian(uint8_t in) {
+ return in;
+}
+
+template<>
+inline int8_t convertToLittleEndian(int8_t in) {
+ return in;
+}
+
+template<>
+inline uint16_t convertToLittleEndian(uint16_t in) {
+ return htole16(in);
+}
+
+template<>
+inline int16_t convertToLittleEndian(int16_t in) {
+ return htole16(in);
+}
+
+template<>
+inline uint32_t convertToLittleEndian(uint32_t in) {
+ return htole32(in);
+}
+
+template<>
+inline int32_t convertToLittleEndian(int32_t in) {
+ return htole32(in);
+}
+
+template<>
+inline uint64_t convertToLittleEndian(uint64_t in) {
+ return htole64(in);
+}
+
+template<>
+inline int64_t convertToLittleEndian(int64_t in) {
+ return htole64(in);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_ENDIAN_UTILS*/
+
diff --git a/media/img_utils/include/img_utils/FileInput.h b/media/img_utils/include/img_utils/FileInput.h
new file mode 100644
index 0000000..4d4f22b
--- /dev/null
+++ b/media/img_utils/include/img_utils/FileInput.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_FILE_INPUT_H
+#define IMG_UTILS_FILE_INPUT_H
+
+#include <img_utils/Input.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class for reading from a file.
+ */
+class ANDROID_API FileInput : public Input {
+ public:
+ /**
+ * Create a file input for the given path.
+ */
+ FileInput(String8 path);
+
+ virtual ~FileInput();
+
+ /**
+ * Open a file descriptor to the path given in the constructor.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Read bytes from the file into the given buffer. At most, the number
+ * of bytes given in the count argument will be read. Bytes will be written
+ * into the given buffer starting at the index given in the offset argument.
+ *
+ * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ virtual ssize_t read(uint8_t* buf, size_t offset, size_t count);
+
+ /**
+ * Close the file descriptor to the path given in the constructor.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+ private:
+ FILE *mFp;
+ String8 mPath;
+ bool mOpen;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+
+#endif /*IMG_UTILS_INPUT_H*/
diff --git a/media/img_utils/include/img_utils/FileOutput.h b/media/img_utils/include/img_utils/FileOutput.h
new file mode 100644
index 0000000..fd5be27
--- /dev/null
+++ b/media/img_utils/include/img_utils/FileOutput.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_FILE_OUTPUT_H
+#define IMG_UTILS_FILE_OUTPUT_H
+
+#include <img_utils/Output.h>
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+class ANDROID_API FileOutput : public Output {
+ public:
+ FileOutput(String8 path);
+ virtual ~FileOutput();
+ virtual status_t open();
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+ virtual status_t close();
+ private:
+ FILE *mFp;
+ String8 mPath;
+ bool mOpen;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_FILE_OUTPUT_H*/
diff --git a/media/img_utils/include/img_utils/Input.h b/media/img_utils/include/img_utils/Input.h
new file mode 100644
index 0000000..6a03647
--- /dev/null
+++ b/media/img_utils/include/img_utils/Input.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_INPUT_H
+#define IMG_UTILS_INPUT_H
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class used as a source of bytes.
+ */
+class ANDROID_API Input {
+ public:
+ virtual ~Input();
+
+ /**
+ * Open this Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Read bytes into the given buffer. At most, the number of bytes given in the
+ * count argument will be read. Bytes will be written into the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns the number of bytes read, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ virtual ssize_t read(uint8_t* buf, size_t offset, size_t count) = 0;
+
+ /**
+ * Skips bytes in the input.
+ *
+ * Returns the number of bytes skipped, or NOT_ENOUGH_DATA if at the end of the file. If an
+ * error has occurred, this will return a negative error code other than NOT_ENOUGH_DATA.
+ */
+ virtual ssize_t skip(size_t count);
+
+ /**
+ * Close the Input. It is not valid to call open on a previously closed Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+
+#endif /*IMG_UTILS_INPUT_H*/
diff --git a/media/img_utils/include/img_utils/Orderable.h b/media/img_utils/include/img_utils/Orderable.h
new file mode 100644
index 0000000..87253a4
--- /dev/null
+++ b/media/img_utils/include/img_utils/Orderable.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_ORDERABLE
+#define IMG_UTILS_ORDERABLE
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+#define COMPARE_DEF(op) \
+inline bool operator op (const Orderable& orderable) const;
+
+/**
+ * Subclasses of Orderable can be compared and sorted. This is
+ * intended to be used to create sorted arrays of TIFF entries
+ * and IFDs.
+ */
+class ANDROID_API Orderable {
+ public:
+ virtual ~Orderable();
+
+ /**
+ * Comparison operatotors are based on the value returned
+ * from this method.
+ */
+ virtual uint32_t getComparableValue() const = 0;
+
+ COMPARE_DEF(>)
+ COMPARE_DEF(<)
+ COMPARE_DEF(>=)
+ COMPARE_DEF(<=)
+ COMPARE_DEF(==)
+ COMPARE_DEF(!=)
+};
+
+#undef COMPARE_DEF
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_ORDERABLE*/
diff --git a/media/img_utils/include/img_utils/Output.h b/media/img_utils/include/img_utils/Output.h
new file mode 100644
index 0000000..35fae23
--- /dev/null
+++ b/media/img_utils/include/img_utils/Output.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_OUTPUT_H
+#define IMG_UTILS_OUTPUT_H
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class used to output bytes.
+ */
+class ANDROID_API Output {
+ public:
+ virtual ~Output();
+
+ /**
+ * Open this Output.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Write bytes from the given buffer. The number of bytes given in the count
+ * argument will be written. Bytes will be written from the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count) = 0;
+
+ /**
+ * Close this Output. It is not valid to call open on a previously closed Output.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_OUTPUT_H*/
diff --git a/media/libstagefright/chromium_http/chromium_http_stub.cpp b/media/img_utils/include/img_utils/Pair.h
index 289f6de..d651cac 100644
--- a/media/libstagefright/chromium_http/chromium_http_stub.cpp
+++ b/media/img_utils/include/img_utils/Pair.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,25 +14,31 @@
* limitations under the License.
*/
-#include <dlfcn.h>
+#ifndef IMG_UTILS_PAIR_H
+#define IMG_UTILS_PAIR_H
-#include <include/chromium_http_stub.h>
-#include <include/ChromiumHTTPDataSource.h>
-#include <include/DataUriSource.h>
+#include <cutils/compiler.h>
namespace android {
+namespace img_utils {
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
- return new ChromiumHTTPDataSource(flags);
-}
+/**
+ * Generic pair utility class. Nothing special here.
+ */
+template<typename F, typename S>
+class ANDROID_API Pair {
+ public:
+ F first;
+ S second;
+
+ Pair() {}
+
+ Pair(const Pair& o) : first(o.first), second(o.second) {}
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- return ChromiumHTTPDataSource::UpdateProxyConfig(host, port, exclusionList);
-}
+ Pair(const F& f, const S& s) : first(f), second(s) {}
+};
-DataSource *createDataUriSource(const char *uri) {
- return new DataUriSource(uri);
-}
+} /*namespace img_utils*/
+} /*namespace android*/
-}
+#endif /*IMG_UTILS_PAIR_H*/
diff --git a/media/img_utils/include/img_utils/SortedEntryVector.h b/media/img_utils/include/img_utils/SortedEntryVector.h
new file mode 100644
index 0000000..f059a82
--- /dev/null
+++ b/media/img_utils/include/img_utils/SortedEntryVector.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_SORTED_ENTRY_VECTOR_H
+#define IMG_UTILS_SORTED_ENTRY_VECTOR_H
+
+#include <img_utils/TiffEntry.h>
+
+#include <utils/StrongPointer.h>
+#include <utils/SortedVector.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Subclass of SortedVector that has been extended to
+ * do comparisons/lookups based on the tag ID of the entries.
+ */
+class SortedEntryVector : public SortedVector<sp<TiffEntry> > {
+ public:
+ virtual ~SortedEntryVector();
+
+ /**
+ * Returns the index of the entry with the given tag ID, or
+ * -1 if none exists.
+ */
+ ssize_t indexOfTag(uint16_t tag) const;
+
+ protected:
+ /**
+ * Compare tag ID.
+ */
+ virtual int do_compare(const void* lhs, const void* rhs) const;
+};
+
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_SORTED_ENTRY_VECTOR_H*/
diff --git a/media/img_utils/include/img_utils/StripSource.h b/media/img_utils/include/img_utils/StripSource.h
new file mode 100644
index 0000000..b5c6b60
--- /dev/null
+++ b/media/img_utils/include/img_utils/StripSource.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_STRIP_SOURCE_H
+#define IMG_UTILS_STRIP_SOURCE_H
+
+#include <img_utils/Output.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * This class acts as a data source for strips set in a TiffIfd.
+ */
+class ANDROID_API StripSource {
+ public:
+ virtual ~StripSource();
+
+ /**
+ * Write count bytes to the stream.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t writeToStream(Output& stream, uint32_t count) = 0;
+
+ /**
+ * Return the source IFD.
+ */
+ virtual uint32_t getIfd() const = 0;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_STRIP_SOURCE_H*/
diff --git a/media/img_utils/include/img_utils/TagDefinitions.h b/media/img_utils/include/img_utils/TagDefinitions.h
new file mode 100644
index 0000000..e9a7480
--- /dev/null
+++ b/media/img_utils/include/img_utils/TagDefinitions.h
@@ -0,0 +1,1392 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_TAG_DEFINITION_H
+#define IMG_UTILS_TIFF_TAG_DEFINITION_H
+
+#include <img_utils/TiffEntry.h>
+#include <img_utils/Output.h>
+#include <img_utils/TiffHelpers.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Tag definitions contain information about standard TIFF compatible tags.
+ */
+typedef struct TagDefinition {
+ // The tag name.
+ const char* tagName;
+ // The specified tag ID.
+ const uint16_t tagId;
+ // The default type for this tag. This must be a valid TIFF type.
+ const TagType defaultType;
+ // The default Image File Directory (IFD) for this tag.
+ const uint32_t defaultIfd;
+ // The valid count for this tag, or 0 if the count is not fixed.
+ const uint32_t fixedCount;
+ // The endianness of the tag value, or UNDEFINED_ENDIAN if there is no fixed endian
+ const Endianness fixedEndian;
+} TagDefinition_t;
+
+/**
+ * Convenience defines for tag ids.
+ */
+enum {
+ TAG_RAWTOPREVIEWGAIN = 0xC7A8u,
+ TAG_NEWRAWIMAGEDIGEST = 0xC7A7u,
+ TAG_ORIGINALDEFAULTCROPSIZE = 0xC793u,
+ TAG_ORIGINALBESTQUALITYFINALSIZE = 0xC792u,
+ TAG_ORIGINALDEFAULTFINALSIZE = 0xC791u,
+ TAG_PROFILEHUESATMAPENCODING = 0xC7A3u,
+ TAG_PROFILELOOKTABLEENCODING = 0xC7A4u,
+ TAG_BASELINEEXPOSUREOFFSET = 0xC7A5u,
+ TAG_DEFAULTBLACKRENDER = 0xC7A6u,
+ TAG_DEFAULTUSERCROP = 0xC7B5u,
+ TAG_NOISEPROFILE = 0xC761u,
+ TAG_OPCODELIST3 = 0xC74Eu,
+ TAG_OPCODELIST2 = 0xC741u,
+ TAG_OPCODELIST1 = 0xC740u,
+ TAG_PROFILELOOKTABLEDATA = 0xC726u,
+ TAG_PROFILELOOKTABLEDIMS = 0xC725u,
+ TAG_ROWINTERLEAVEFACTOR = 0xC71Fu,
+ TAG_SUBTILEBLOCKSIZE = 0xC71Eu,
+ TAG_ORIGINALRAWFILEDIGEST = 0xC71Du,
+ TAG_RAWIMAGEDIGEST = 0xC71Cu,
+ TAG_PREVIEWDATETIME = 0xC71Bu,
+ TAG_PREVIEWCOLORSPACE = 0xC71Au,
+ TAG_PREVIEWSETTINGSDIGEST = 0xC719u,
+ TAG_PREVIEWSETTINGSNAME = 0xC718u,
+ TAG_PREVIEWAPPLICATIONVERSION = 0xC717u,
+ TAG_PREVIEWAPPLICATIONNAME = 0xC716u,
+ TAG_FORWARDMATRIX2 = 0xC715u,
+ TAG_FORWARDMATRIX1 = 0xC714u,
+ TAG_PROFILECOPYRIGHT = 0xC6FEu,
+ TAG_PROFILEEMBEDPOLICY = 0xC6FDu,
+ TAG_PROFILETONECURVE = 0xC6FCu,
+ TAG_PROFILEHUESATMAPDATA2 = 0xC6FBu,
+ TAG_PROFILEHUESATMAPDATA1 = 0xC6FAu,
+ TAG_PROFILEHUESATMAPDIMS = 0xC6F9u,
+ TAG_PROFILENAME = 0xC6F8u,
+ TAG_NOISEREDUCTIONAPPLIED = 0xC6F7u,
+ TAG_ASSHOTPROFILENAME = 0xC6F6u,
+ TAG_EXTRACAMERAPROFILES = 0xC6F5u,
+ TAG_PROFILECALIBRATIONSIGNATURE = 0xC6F4u,
+ TAG_CAMERACALIBRATIONSIGNATURE = 0xC6F3u,
+ TAG_COLORIMETRICREFERENCE = 0xC6BFu,
+ TAG_CURRENTPREPROFILEMATRIX = 0xC692u,
+ TAG_CURRENTICCPROFILE = 0xC691u,
+ TAG_ASSHOTPREPROFILEMATRIX = 0xC690u,
+ TAG_ASSHOTICCPROFILE = 0xC68Fu,
+ TAG_MASKEDAREAS = 0xC68Eu,
+ TAG_ACTIVEAREA = 0xC68Du,
+ TAG_ORIGINALRAWFILEDATA = 0xC68Cu,
+ TAG_ORIGINALRAWFILENAME = 0xC68Bu,
+ TAG_RAWDATAUNIQUEID = 0xC65Du,
+ TAG_MAKERNOTESAFETY = 0xC635u,
+ TAG_DNGPRIVATEDATA = 0xC634u,
+ TAG_SHADOWSCALE = 0xC633u,
+ TAG_ANTIALIASSTRENGTH = 0xC632u,
+ TAG_CHROMABLURRADIUS = 0xC631u,
+ TAG_LENSINFO = 0xC630u,
+ TAG_CAMERASERIALNUMBER = 0xC62Fu,
+ TAG_LINEARRESPONSELIMIT = 0xC62Eu,
+ TAG_BAYERGREENSPLIT = 0xC62Du,
+ TAG_BASELINESHARPNESS = 0xC62Cu,
+ TAG_BASELINENOISE = 0xC62Bu,
+ TAG_BASELINEEXPOSURE = 0xC62Au,
+ TAG_ASSHOTWHITEXY = 0xC629u,
+ TAG_ASSHOTNEUTRAL = 0xC628u,
+ TAG_ANALOGBALANCE = 0xC627u,
+ TAG_REDUCTIONMATRIX2 = 0xC626u,
+ TAG_REDUCTIONMATRIX1 = 0xC625u,
+ TAG_CAMERACALIBRATION2 = 0xC624u,
+ TAG_CAMERACALIBRATION1 = 0xC623u,
+ TAG_COLORMATRIX2 = 0xC622u,
+ TAG_COLORMATRIX1 = 0xC621u,
+ TAG_CALIBRATIONILLUMINANT2 = 0xC65Bu,
+ TAG_CALIBRATIONILLUMINANT1 = 0xC65Au,
+ TAG_DEFAULTCROPSIZE = 0xC620u,
+ TAG_DEFAULTCROPORIGIN = 0xC61Fu,
+ TAG_BESTQUALITYSCALE = 0xC65Cu,
+ TAG_DEFAULTSCALE = 0xC61Eu,
+ TAG_WHITELEVEL = 0xC61Du,
+ TAG_BLACKLEVELDELTAV = 0xC61Cu,
+ TAG_BLACKLEVELDELTAH = 0xC61Bu,
+ TAG_BLACKLEVEL = 0xC61Au,
+ TAG_BLACKLEVELREPEATDIM = 0xC619u,
+ TAG_LINEARIZATIONTABLE = 0xC618u,
+ TAG_CFALAYOUT = 0xC617u,
+ TAG_CFAPLANECOLOR = 0xC616u,
+ TAG_LOCALIZEDCAMERAMODEL = 0xC615u,
+ TAG_UNIQUECAMERAMODEL = 0xC614u,
+ TAG_DNGBACKWARDVERSION = 0xC613u,
+ TAG_DNGVERSION = 0xC612u,
+ TAG_SUBFILETYPE = 0x00FFu,
+ TAG_YRESOLUTION = 0x011Bu,
+ TAG_XRESOLUTION = 0x011Au,
+ TAG_THRESHHOLDING = 0x0107u,
+ TAG_STRIPOFFSETS = 0x0111u,
+ TAG_STRIPBYTECOUNTS = 0x0117u,
+ TAG_SOFTWARE = 0x0131u,
+ TAG_SAMPLESPERPIXEL = 0x0115u,
+ TAG_ROWSPERSTRIP = 0x0116u,
+ TAG_RESOLUTIONUNIT = 0x0128u,
+ TAG_PLANARCONFIGURATION = 0x011Cu,
+ TAG_PHOTOMETRICINTERPRETATION = 0x0106u,
+ TAG_ORIENTATION = 0x0112u,
+ TAG_NEWSUBFILETYPE = 0x00FEu,
+ TAG_MODEL = 0x0110u,
+ TAG_MINSAMPLEVALUE = 0x0118u,
+ TAG_MAXSAMPLEVALUE = 0x0119u,
+ TAG_MAKE = 0x010Fu,
+ TAG_IMAGEWIDTH = 0x0100u,
+ TAG_IMAGELENGTH = 0x0101u,
+ TAG_IMAGEDESCRIPTION = 0x010Eu,
+ TAG_HOSTCOMPUTER = 0x013Cu,
+ TAG_GRAYRESPONSEUNIT = 0x0122u,
+ TAG_GRAYRESPONSECURVE = 0x0123u,
+ TAG_FREEOFFSETS = 0x0120u,
+ TAG_FREEBYTECOUNTS = 0x0121u,
+ TAG_FILLORDER = 0x010Au,
+ TAG_EXTRASAMPLES = 0x0152u,
+ TAG_DATETIME = 0x0132u,
+ TAG_COPYRIGHT = 0x8298u,
+ TAG_COMPRESSION = 0x0103u,
+ TAG_COLORMAP = 0x0140u,
+ TAG_CELLWIDTH = 0x0108u,
+ TAG_CELLLENGTH = 0x0109u,
+ TAG_BITSPERSAMPLE = 0x0102u,
+ TAG_ARTIST = 0x013Bu,
+ TAG_EXIFVERSION = 0x9000u,
+ TAG_CFAREPEATPATTERNDIM = 0x828Du,
+ TAG_DATETIMEORIGINAL = 0x9003u,
+ TAG_CFAPATTERN = 0x828Eu,
+ TAG_SUBIFDS = 0x014Au,
+ TAG_TIFFEPSTANDARDID = 0x9216u,
+ TAG_EXPOSURETIME = 0x829Au,
+ TAG_ISOSPEEDRATINGS = 0x8827u,
+ TAG_FOCALLENGTH = 0x920Au,
+ TAG_FNUMBER = 0x829Du,
+ TAG_GPSINFO = 0x8825u,
+ TAG_GPSVERSIONID = 0x0u,
+ TAG_GPSLATITUDEREF = 0x1u,
+ TAG_GPSLATITUDE = 0x2u,
+ TAG_GPSLONGITUDEREF = 0x3u,
+ TAG_GPSLONGITUDE = 0x4u,
+ TAG_GPSTIMESTAMP = 0x7u,
+ TAG_GPSDATESTAMP = 0x001Du,
+};
+
+/**
+ * TIFF_EP_TAG_DEFINITIONS contains tags defined in the TIFF EP spec
+ */
+const TagDefinition_t TIFF_EP_TAG_DEFINITIONS[] = {
+ { // PhotometricInterpretation
+ "PhotometricInterpretation",
+ 0x0106u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // SubIfds
+ "SubIfds",
+ 0x014Au,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFAPattern
+ "CFAPattern",
+ 0x828Eu,
+ BYTE,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFARepeatPatternDim
+ "CFARepeatPatternDim",
+ 0x828Du,
+ SHORT,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // DateTimeOriginal
+ "DateTimeOriginal",
+ 0x9003u,
+ ASCII,
+ IFD_0,
+ 20,
+ UNDEFINED_ENDIAN
+ },
+ { // Tiff/EPStandardID
+ "Tiff",
+ 0x9216u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // ExposureTime
+ "ExposureTime",
+ 0x829Au,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ISOSpeedRatings
+ "ISOSpeedRatings",
+ 0x8827u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // FocalLength
+ "FocalLength",
+ 0x920Au,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // FNumber
+ "FNumber",
+ 0x829Du,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSInfo
+ "GPSInfo",
+ 0x8825u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSVersionID
+ "GPSVersionID",
+ 0x0u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSLatitudeRef
+ "GPSLatitudeRef",
+ 0x1u,
+ ASCII,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSLatitude
+ "GPSLatitude",
+ 0x2u,
+ RATIONAL,
+ IFD_0,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSLongitudeRef
+ "GPSLongitudeRef",
+ 0x3u,
+ ASCII,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSLongitude
+ "GPSLongitude",
+ 0x4u,
+ RATIONAL,
+ IFD_0,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSTimeStamp
+ "GPSTimeStamp",
+ 0x7u,
+ RATIONAL,
+ IFD_0,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ /*TODO: Remaining TIFF EP tags*/
+};
+
+/**
+ * EXIF_2_3_TAG_DEFINITIONS contains tags defined in the Jeita EXIF 2.3 spec
+ */
+const TagDefinition_t EXIF_2_3_TAG_DEFINITIONS[] = {
+ { // ExifVersion
+ "ExifVersion",
+ 0x9000u,
+ UNDEFINED,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // GPSDateStamp
+ "GPSDateStamp",
+ 0x001Du,
+ ASCII,
+ IFD_0,
+ 11,
+ UNDEFINED_ENDIAN
+ },
+ /*TODO: Remaining EXIF 2.3 tags*/
+};
+
+/**
+ * TIFF_6_TAG_DEFINITIONS contains tags defined in the TIFF 6.0 spec
+ */
+const TagDefinition_t TIFF_6_TAG_DEFINITIONS[] = {
+ { // SubFileType
+ "SubFileType",
+ 0x00FFu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Artist
+ "Artist",
+ 0x013Bu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BitsPerSample
+ "BitsPerSample",
+ 0x0102u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CellLength
+ "CellLength",
+ 0x0109u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CellWidth
+ "CellWidth",
+ 0x0108u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorMap
+ "ColorMap",
+ 0x0140u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // Compression
+ "Compression",
+ 0x0103u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Copyright
+ "Copyright",
+ 0x8298u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // DateTime
+ "DateTime",
+ 0x0132u,
+ ASCII,
+ IFD_0,
+ 20,
+ UNDEFINED_ENDIAN
+ },
+ { // ExtraSamples
+ "ExtraSamples",
+ 0x0152u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // FillOrder
+ "FillOrder",
+ 0x010Au,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // FreeByteCounts
+ "FreeByteCounts",
+ 0x0121u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // FreeOffsets
+ "FreeOffsets",
+ 0x0120u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // GrayResponseCurve
+ "GrayResponseCurve",
+ 0x0123u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // GrayResponseUnit
+ "GrayResponseUnit",
+ 0x0122u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // HostComputer
+ "HostComputer",
+ 0x013Cu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ImageDescription
+ "ImageDescription",
+ 0x010Eu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ImageLength
+ "ImageLength",
+ 0x0101u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ImageWidth
+ "ImageWidth",
+ 0x0100u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Make
+ "Make",
+ 0x010Fu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // MaxSampleValue
+ "MaxSampleValue",
+ 0x0119u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // MinSampleValue
+ "MinSampleValue",
+ 0x0118u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // Model
+ "Model",
+ 0x0110u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // NewSubfileType
+ "NewSubfileType",
+ 0x00FEu,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Orientation
+ "Orientation",
+ 0x0112u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // PhotoMetricInterpretation
+ "PhotoMetricInterpretation",
+ 0x0106u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // PlanarConfiguration
+ "PlanarConfiguration",
+ 0x011Cu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ResolutionUnit
+ "ResolutionUnit",
+ 0x0128u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // RowsPerStrip
+ "RowsPerStrip",
+ 0x0116u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // SamplesPerPixel
+ "SamplesPerPixel",
+ 0x0115u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Software
+ "Software",
+ 0x0131u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // StripByteCounts
+ "StripByteCounts",
+ 0x0117u,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // StripOffsets
+ "StripOffsets",
+ 0x0111u,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // SubfileType
+ "SubfileType",
+ 0x00FFu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Threshholding
+ "Threshholding",
+ 0x0107u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // XResolution
+ "XResolution",
+ 0x011Au,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // YResolution
+ "YResolution",
+ 0x011Bu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+};
+
+/**
+ * DNG_TAG_DEFINITIONS contains tags defined in the DNG 1.4 spec
+ */
+const TagDefinition_t DNG_TAG_DEFINITIONS[] = {
+ { // DNGVersion
+ "DNGVersion",
+ 0xC612u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // DNGBackwardVersion
+ "DNGBackwardVersion",
+ 0xC613u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // UniqueCameraModel
+ "UniqueCameraModel",
+ 0xC614u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // LocalizedCameraModel
+ "LocalizedCameraModel",
+ 0xC615u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFAPlaneColor
+ "CFAPlaneColor",
+ 0xC616u,
+ BYTE,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFALayout
+ "CFALayout",
+ 0xC617u,
+ SHORT,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // LinearizationTable
+ "LinearizationTable",
+ 0xC618u,
+ SHORT,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevelRepeatDim
+ "BlackLevelRepeatDim",
+ 0xC619u,
+ SHORT,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevel
+ "BlackLevel",
+ 0xC61Au,
+ LONG,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevelDeltaH
+ "BlackLevelDeltaH",
+ 0xC61Bu,
+ SRATIONAL,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevelDeltaV
+ "BlackLevelDeltaV",
+ 0xC61Cu,
+ SRATIONAL,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // WhiteLevel
+ "WhiteLevel",
+ 0xC61Du,
+ LONG,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultScale
+ "DefaultScale",
+ 0xC61Eu,
+ RATIONAL,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // BestQualityScale
+ "BestQualityScale",
+ 0xC65Cu,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultCropOrigin
+ "DefaultCropOrigin",
+ 0xC61Fu,
+ LONG,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultCropSize
+ "DefaultCropSize",
+ 0xC620u,
+ LONG,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // CalibrationIlluminant1
+ "CalibrationIlluminant1",
+ 0xC65Au,
+ SHORT,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CalibrationIlluminant2
+ "CalibrationIlluminant2",
+ 0xC65Bu,
+ SHORT,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorMatrix1
+ "ColorMatrix1",
+ 0xC621u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorMatrix2
+ "ColorMatrix2",
+ 0xC622u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraCalibration1
+ "CameraCalibration1",
+ 0xC623u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraCalibration2
+ "CameraCalibration2",
+ 0xC624u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ReductionMatrix1
+ "ReductionMatrix1",
+ 0xC625u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ReductionMatrix2
+ "ReductionMatrix2",
+ 0xC626u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AnalogBalance
+ "AnalogBalance",
+ 0xC627u,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotNeutral
+ "AsShotNeutral",
+ 0xC628u,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotWhiteXY
+ "AsShotWhiteXY",
+ 0xC629u,
+ RATIONAL,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineExposure
+ "BaselineExposure",
+ 0xC62Au,
+ SRATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineNoise
+ "BaselineNoise",
+ 0xC62Bu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineSharpness
+ "BaselineSharpness",
+ 0xC62Cu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BayerGreenSplit
+ "BayerGreenSplit",
+ 0xC62Du,
+ LONG,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // LinearResponseLimit
+ "LinearResponseLimit",
+ 0xC62Eu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraSerialNumber
+ "CameraSerialNumber",
+ 0xC62Fu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // LensInfo
+ "LensInfo",
+ 0xC630u,
+ RATIONAL,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // ChromaBlurRadius
+ "ChromaBlurRadius",
+ 0xC631u,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // AntiAliasStrength
+ "AntiAliasStrength",
+ 0xC632u,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ShadowScale
+ "ShadowScale",
+ 0xC633u,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // DNGPrivateData
+ "DNGPrivateData",
+ 0xC634u,
+ BYTE,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // MakerNoteSafety
+ "MakerNoteSafety",
+ 0xC635u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // RawDataUniqueID
+ "RawDataUniqueID",
+ 0xC65Du,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalRawFileName
+ "OriginalRawFileName",
+ 0xC68Bu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalRawFileData
+ "OriginalRawFileData",
+ 0xC68Cu,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ BIG
+ },
+ { // ActiveArea
+ "ActiveArea",
+ 0xC68Du,
+ LONG,
+ RAW_IFD,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // MaskedAreas
+ "MaskedAreas",
+ 0xC68Eu,
+ LONG,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotICCProfile
+ "AsShotICCProfile",
+ 0xC68Fu,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotPreProfileMatrix
+ "AsShotPreProfileMatrix",
+ 0xC690u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CurrentICCProfile
+ "CurrentICCProfile",
+ 0xC691u,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CurrentICCProfile
+ "CurrentICCProfile",
+ 0xC691u,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CurrentPreProfileMatrix
+ "CurrentPreProfileMatrix",
+ 0xC692u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorimetricReference
+ "ColorimetricReference",
+ 0xC6BFu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraCalibrationSignature
+ "CameraCalibrationSignature",
+ 0xC6F3u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileCalibrationSignature
+ "ProfileCalibrationSignature",
+ 0xC6F4u,
+ ASCII,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ExtraCameraProfiles
+ "ExtraCameraProfiles",
+ 0xC6F5u,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotProfileName
+ "AsShotProfileName",
+ 0xC6F6u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // NoiseReductionApplied
+ "NoiseReductionApplied",
+ 0xC6F7u,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileName
+ "ProfileName",
+ 0xC6F8u,
+ ASCII,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapDims
+ "ProfileHueSatMapDims",
+ 0xC6F9u,
+ LONG,
+ PROFILE_IFD,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapData1
+ "ProfileHueSatMapData1",
+ 0xC6FAu,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapData2
+ "ProfileHueSatMapData2",
+ 0xC6FBu,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileToneCurve
+ "ProfileToneCurve",
+ 0xC6FCu,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileEmbedPolicy
+ "ProfileEmbedPolicy",
+ 0xC6FDu,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileCopyright
+ "ProfileCopyright",
+ 0xC6FEu,
+ ASCII,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ForwardMatrix1
+ "ForwardMatrix1",
+ 0xC714u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ForwardMatrix2
+ "ForwardMatrix2",
+ 0xC715u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewApplicationName
+ "PreviewApplicationName",
+ 0xC716u,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewApplicationVersion
+ "PreviewApplicationVersion",
+ 0xC717u,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewSettingsName
+ "PreviewSettingsName",
+ 0xC718u,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewSettingsDigest
+ "PreviewSettingsDigest",
+ 0xC719u,
+ BYTE,
+ PREVIEW_IFD,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewColorSpace
+ "PreviewColorSpace",
+ 0xC71Au,
+ LONG,
+ PREVIEW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewDateTime
+ "PreviewDateTime",
+ 0xC71Bu,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // RawImageDigest
+ "RawImageDigest",
+ 0xC71Cu,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalRawFileDigest
+ "OriginalRawFileDigest",
+ 0xC71Du,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // SubTileBlockSize
+ "SubTileBlockSize",
+ 0xC71Eu,
+ LONG,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // RowInterleaveFactor
+ "RowInterleaveFactor",
+ 0xC71Fu,
+ LONG,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileLookTableDims
+ "ProfileLookTableDims",
+ 0xC725u,
+ LONG,
+ PROFILE_IFD,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileLookTableData
+ "ProfileLookTableData",
+ 0xC726u,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // OpcodeList1
+ "OpcodeList1",
+ 0xC740u,
+ UNDEFINED,
+ RAW_IFD,
+ 0,
+ BIG
+ },
+ { // OpcodeList2
+ "OpcodeList2",
+ 0xC741u,
+ UNDEFINED,
+ RAW_IFD,
+ 0,
+ BIG
+ },
+ { // OpcodeList3
+ "OpcodeList3",
+ 0xC74Eu,
+ UNDEFINED,
+ RAW_IFD,
+ 0,
+ BIG
+ },
+ { // NoiseProfile
+ "NoiseProfile",
+ 0xC761u,
+ DOUBLE,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultUserCrop
+ "DefaultUserCrop",
+ 0xC7B5u,
+ RATIONAL,
+ RAW_IFD,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultBlackRender
+ "DefaultBlackRender",
+ 0xC7A6u,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineExposureOffset
+ "BaselineExposureOffset",
+ 0xC7A5u,
+ RATIONAL,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileLookTableEncoding
+ "ProfileLookTableEncoding",
+ 0xC7A4u,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapEncoding
+ "ProfileHueSatMapEncoding",
+ 0xC7A3u,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalDefaultFinalSize
+ "OriginalDefaultFinalSize",
+ 0xC791u,
+ LONG,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalBestQualityFinalSize
+ "OriginalBestQualityFinalSize",
+ 0xC792u,
+ LONG,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalDefaultCropSize
+ "OriginalDefaultCropSize",
+ 0xC793u,
+ LONG,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // NewRawImageDigest
+ "NewRawImageDigest",
+ 0xC7A7u,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // RawToPreviewGain
+ "RawToPreviewGain",
+ 0xC7A8u,
+ DOUBLE,
+ PREVIEW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_TAG_DEFINITION_H*/
diff --git a/media/img_utils/include/img_utils/TiffEntry.h b/media/img_utils/include/img_utils/TiffEntry.h
new file mode 100644
index 0000000..4d672b2
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffEntry.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_ENTRY
+#define IMG_UTILS_TIFF_ENTRY
+
+#include <img_utils/TiffWritable.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/EndianUtils.h>
+
+#include <cutils/compiler.h>
+#include <utils/String8.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+#define COMPARE_DEF(op) \
+inline bool operator op (const TiffEntry& entry) const;
+
+/**
+ * This class holds a single TIFF IFD entry.
+ *
+ * Subclasses are expected to support assignment and copying operations.
+ */
+class ANDROID_API TiffEntry : public TiffWritable {
+ public:
+ virtual ~TiffEntry();
+
+ /**
+ * Write the 12-byte IFD entry to the output. The given offset will be
+ * set as the tag value if the size of the tag value exceeds the max
+ * size for the TIFF Value field (4 bytes), and should be word aligned.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const = 0;
+
+ /**
+ * Get the count set for this entry. This corresponds to the TIFF Count
+ * field.
+ */
+ virtual uint32_t getCount() const = 0;
+
+ /**
+ * Get the tag id set for this entry. This corresponds to the TIFF Tag
+ * field.
+ */
+ virtual uint16_t getTag() const = 0;
+
+ /**
+ * Get the type set for this entry. This corresponds to the TIFF Type
+ * field.
+ */
+ virtual TagType getType() const = 0;
+
+ /**
+ * Get the defined endianness for this entry. If this is defined,
+ * the tag value will be written with the given byte order.
+ */
+ virtual Endianness getEndianness() const = 0;
+
+ /**
+ * Get the value for this entry. This corresponds to the TIFF Value
+ * field.
+ *
+ * Returns NULL if the value is NULL, or if the type used does not
+ * match the type of this tag.
+ */
+ template<typename T>
+ const T* getData() const;
+
+ virtual String8 toString() const;
+
+ /**
+ * Force the type used here to be a valid TIFF type.
+ *
+ * Returns NULL if the given value is NULL, or if the type given does
+ * not match the type of the value given.
+ */
+ template<typename T>
+ static const T* forceValidType(TagType type, const T* value);
+
+ virtual const void* getDataHelper() const = 0;
+
+ COMPARE_DEF(>)
+ COMPARE_DEF(<)
+
+ protected:
+ enum {
+ MAX_PRINT_STRING_LENGTH = 256
+ };
+};
+
+#define COMPARE(op) \
+bool TiffEntry::operator op (const TiffEntry& entry) const { \
+ return getComparableValue() op entry.getComparableValue(); \
+}
+
+COMPARE(>)
+COMPARE(<)
+
+
+template<typename T>
+const T* TiffEntry::getData() const {
+ const T* value = reinterpret_cast<const T*>(getDataHelper());
+ return forceValidType<T>(getType(), value);
+}
+
+#undef COMPARE
+#undef COMPARE_DEF
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_ENTRY*/
diff --git a/media/img_utils/include/img_utils/TiffEntryImpl.h b/media/img_utils/include/img_utils/TiffEntryImpl.h
new file mode 100644
index 0000000..c73e231
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffEntryImpl.h
@@ -0,0 +1,218 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_ENTRY_IMPL
+#define IMG_UTILS_TIFF_ENTRY_IMPL
+
+#include <img_utils/TiffIfd.h>
+#include <img_utils/TiffEntry.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/Output.h>
+#include <img_utils/EndianUtils.h>
+
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+template<typename T>
+class TiffEntryImpl : public TiffEntry {
+ public:
+ TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end, const T* data);
+ virtual ~TiffEntryImpl();
+
+ status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const;
+ status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const;
+
+ uint32_t getCount() const;
+ uint16_t getTag() const;
+ TagType getType() const;
+ Endianness getEndianness() const;
+ size_t getSize() const;
+ uint32_t getComparableValue() const;
+
+ protected:
+ const void* getDataHelper() const;
+ uint32_t getActualSize() const;
+
+ uint16_t mTag;
+ uint16_t mType;
+ uint32_t mCount;
+ Endianness mEnd;
+ Vector<T> mData;
+
+};
+
+template<typename T>
+TiffEntryImpl<T>::TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end,
+ const T* data)
+ : mTag(tag), mType(static_cast<uint16_t>(type)), mCount(count), mEnd(end) {
+ count = (type == RATIONAL || type == SRATIONAL) ? count * 2 : count;
+ ssize_t index = mData.appendArray(data, count);
+ LOG_ALWAYS_FATAL_IF(index < 0, "%s: Could not allocate vector for data.", __FUNCTION__);
+}
+
+template<typename T>
+TiffEntryImpl<T>::~TiffEntryImpl() {}
+
+template<typename T>
+uint32_t TiffEntryImpl<T>::getCount() const {
+ return mCount;
+}
+
+template<typename T>
+uint16_t TiffEntryImpl<T>::getTag() const {
+ return mTag;
+}
+
+template<typename T>
+TagType TiffEntryImpl<T>::getType() const {
+ return static_cast<TagType>(mType);
+}
+
+template<typename T>
+const void* TiffEntryImpl<T>::getDataHelper() const {
+ return reinterpret_cast<const void*>(mData.array());
+}
+
+template<typename T>
+size_t TiffEntryImpl<T>::getSize() const {
+ uint32_t total = getActualSize();
+ WORD_ALIGN(total)
+ return (total <= OFFSET_SIZE) ? 0 : total;
+}
+
+template<typename T>
+uint32_t TiffEntryImpl<T>::getActualSize() const {
+ uint32_t total = sizeof(T) * mCount;
+ if (getType() == RATIONAL || getType() == SRATIONAL) {
+ // 2 ints stored for each rational, multiply by 2
+ total <<= 1;
+ }
+ return total;
+}
+
+template<typename T>
+Endianness TiffEntryImpl<T>::getEndianness() const {
+ return mEnd;
+}
+
+template<typename T>
+uint32_t TiffEntryImpl<T>::getComparableValue() const {
+ return mTag;
+}
+
+template<typename T>
+status_t TiffEntryImpl<T>::writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const {
+ assert((offset % TIFF_WORD_SIZE) == 0);
+ status_t ret = OK;
+ BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret);
+ BAIL_ON_FAIL(out->write(&mType, 0, 1), ret);
+ BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret);
+
+ uint32_t dataSize = getActualSize();
+ if (dataSize > OFFSET_SIZE) {
+ BAIL_ON_FAIL(out->write(&offset, 0, 1), ret);
+ } else {
+ uint32_t count = mCount;
+ if (getType() == RATIONAL || getType() == SRATIONAL) {
+ /**
+ * Rationals are stored as an array of ints. Each
+ * rational is represented by 2 ints. To recover the
+ * size of the array here, multiply the count by 2.
+ */
+ count <<= 1;
+ }
+ BAIL_ON_FAIL(out->write(mData.array(), 0, count), ret);
+ ZERO_TILL_WORD(out, dataSize, ret);
+ }
+ return ret;
+}
+
+template<typename T>
+status_t TiffEntryImpl<T>::writeData(uint32_t /*offset*/, EndianOutput* out) const {
+ status_t ret = OK;
+
+ // Some tags have fixed-endian value output
+ Endianness tmp = UNDEFINED_ENDIAN;
+ if (mEnd != UNDEFINED_ENDIAN) {
+ tmp = out->getEndianness();
+ out->setEndianness(mEnd);
+ }
+
+ uint32_t count = mCount;
+ if (getType() == RATIONAL || getType() == SRATIONAL) {
+ /**
+ * Rationals are stored as an array of ints. Each
+ * rational is represented by 2 ints. To recover the
+ * size of the array here, multiply the count by 2.
+ */
+ count <<= 1;
+ }
+
+ BAIL_ON_FAIL(out->write(mData.array(), 0, count), ret);
+
+ if (mEnd != UNDEFINED_ENDIAN) {
+ out->setEndianness(tmp);
+ }
+
+ // Write to next word alignment
+ ZERO_TILL_WORD(out, sizeof(T) * count, ret);
+ return ret;
+}
+
+template<>
+inline status_t TiffEntryImpl<sp<TiffIfd> >::writeTagInfo(uint32_t offset,
+ /*out*/EndianOutput* out) const {
+ assert((offset % TIFF_WORD_SIZE) == 0);
+ status_t ret = OK;
+ BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret);
+ BAIL_ON_FAIL(out->write(&mType, 0, 1), ret);
+ BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret);
+
+ BAIL_ON_FAIL(out->write(&offset, 0, 1), ret);
+ return ret;
+}
+
+template<>
+inline uint32_t TiffEntryImpl<sp<TiffIfd> >::getActualSize() const {
+ uint32_t total = 0;
+ for (size_t i = 0; i < mData.size(); ++i) {
+ total += mData[i]->getSize();
+ }
+ return total;
+}
+
+template<>
+inline status_t TiffEntryImpl<sp<TiffIfd> >::writeData(uint32_t offset, EndianOutput* out) const {
+ status_t ret = OK;
+ for (uint32_t i = 0; i < mCount; ++i) {
+ BAIL_ON_FAIL(mData[i]->writeData(offset, out), ret);
+ offset += mData[i]->getSize();
+ }
+ return ret;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_ENTRY_IMPL*/
+
+
diff --git a/media/img_utils/include/img_utils/TiffHelpers.h b/media/img_utils/include/img_utils/TiffHelpers.h
new file mode 100644
index 0000000..0969e4d
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffHelpers.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_HELPERS_H
+#define IMG_UTILS_TIFF_HELPERS_H
+
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+const uint8_t ZERO_WORD[] = {0, 0, 0, 0};
+
+#define BAIL_ON_FAIL(x, flag) \
+ if ((flag = (x)) != OK) return flag;
+
+#define BYTES_TILL_WORD(index) \
+ ((TIFF_WORD_SIZE - ((index) % TIFF_WORD_SIZE)) % TIFF_WORD_SIZE)
+
+#define WORD_ALIGN(count) \
+ count += BYTES_TILL_WORD(count);
+
+#define ZERO_TILL_WORD(output, index, ret) \
+ { \
+ size_t remaining = BYTES_TILL_WORD(index); \
+ if (remaining > 0) { \
+ BAIL_ON_FAIL((output)->write(ZERO_WORD, 0, remaining), ret); \
+ } \
+ }
+
+/**
+ * Basic TIFF header constants.
+ */
+enum {
+ BAD_OFFSET = 0,
+ TIFF_WORD_SIZE = 4, // Size in bytes
+ IFD_HEADER_SIZE = 2, // Size in bytes
+ IFD_FOOTER_SIZE = 4, // Size in bytes
+ TIFF_ENTRY_SIZE = 12, // Size in bytes
+ MAX_IFD_ENTRIES = UINT16_MAX,
+ FILE_HEADER_SIZE = 8, // Size in bytes
+ ENDIAN_MARKER_SIZE = 2, // Size in bytes
+ TIFF_MARKER_SIZE = 2, // Size in bytes
+ OFFSET_MARKER_SIZE = 4, // Size in bytes
+ TIFF_FILE_MARKER = 42,
+ BIG_ENDIAN_MARKER = 0x4D4Du,
+ LITTLE_ENDIAN_MARKER = 0x4949u
+};
+
+/**
+ * Constants for the TIFF tag types.
+ */
+enum TagType {
+ UNKNOWN_TAGTYPE = 0,
+ BYTE=1,
+ ASCII,
+ SHORT,
+ LONG,
+ RATIONAL,
+ SBYTE,
+ UNDEFINED,
+ SSHORT,
+ SLONG,
+ SRATIONAL,
+ FLOAT,
+ DOUBLE
+};
+
+/**
+ * Sizes of the TIFF entry fields (in bytes).
+ */
+enum {
+ TAG_SIZE = 2,
+ TYPE_SIZE = 2,
+ COUNT_SIZE = 4,
+ OFFSET_SIZE = 4
+};
+
+/**
+ * Convenience IFD id constants.
+ */
+enum {
+ IFD_0 = 0,
+ RAW_IFD,
+ PROFILE_IFD,
+ PREVIEW_IFD
+};
+
+inline size_t getTypeSize(TagType type) {
+ switch(type) {
+ case UNDEFINED:
+ case ASCII:
+ case BYTE:
+ case SBYTE:
+ return 1;
+ case SHORT:
+ case SSHORT:
+ return 2;
+ case LONG:
+ case SLONG:
+ case FLOAT:
+ return 4;
+ case RATIONAL:
+ case SRATIONAL:
+ case DOUBLE:
+ return 8;
+ default:
+ return 0;
+ }
+}
+
+inline uint32_t calculateIfdSize(size_t numberOfEntries) {
+ return IFD_HEADER_SIZE + IFD_FOOTER_SIZE + TIFF_ENTRY_SIZE * numberOfEntries;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_HELPERS_H*/
diff --git a/media/img_utils/include/img_utils/TiffIfd.h b/media/img_utils/include/img_utils/TiffIfd.h
new file mode 100644
index 0000000..51b5c9a
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffIfd.h
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_IFD_H
+#define IMG_UTILS_TIFF_IFD_H
+
+#include <img_utils/TiffWritable.h>
+#include <img_utils/TiffEntry.h>
+#include <img_utils/Output.h>
+#include <img_utils/SortedEntryVector.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <utils/SortedVector.h>
+#include <utils/StrongPointer.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * This class holds a single TIFF Image File Directory (IFD) structure.
+ *
+ * This maps to the TIFF IFD structure that is logically composed of:
+ * - A 2-byte field listing the number of entries.
+ * - A list of 12-byte TIFF entries.
+ * - A 4-byte offset to the next IFD.
+ */
+class ANDROID_API TiffIfd : public TiffWritable {
+ public:
+ TiffIfd(uint32_t ifdId);
+ virtual ~TiffIfd();
+
+ /**
+ * Add a TiffEntry to this IFD or replace an existing entry with the
+ * same tag ID. No validation is done.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t addEntry(const sp<TiffEntry>& entry);
+
+ /**
+ * Set the pointer to the next IFD. This is used to create a linked
+ * list of IFDs as defined by the TIFF 6.0 spec., and is not included
+ * when calculating the size of IFD and entries for the getSize()
+ * method (unlike SubIFDs).
+ */
+ virtual void setNextIfd(const sp<TiffIfd>& ifd);
+
+ /**
+ * Get the pointer to the next IFD, or NULL if none exists.
+ */
+ virtual sp<TiffIfd> getNextIfd() const;
+
+ /**
+ * Write the IFD data. This includes the IFD header, entries, footer,
+ * and the corresponding values for each entry (recursively including
+ * sub-IFDs). The written amount should end on a word boundary, and
+ * the given offset should be word aligned.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const;
+
+ /**
+ * Get the size of the IFD. This includes the IFD header, entries, footer,
+ * and the corresponding values for each entry (recursively including
+ * any sub-IFDs).
+ */
+ virtual size_t getSize() const;
+
+ /**
+ * Get the id of this IFD.
+ */
+ virtual uint32_t getId() const;
+
+ /**
+ * Get an entry with the given tag ID.
+ *
+ * Returns a strong pointer to the entry if it exists, or an empty strong
+ * pointer.
+ */
+ virtual sp<TiffEntry> getEntry(uint16_t tag) const;
+
+ /**
+ * Remove the entry with the given tag ID if it exists.
+ */
+ virtual void removeEntry(uint16_t tag);
+
+ /**
+ * Convenience method to validate and set strip-related image tags.
+ *
+ * This sets all strip related tags, but leaves offset values unitialized.
+ * setStripOffsets must be called with the desired offset before writing.
+ * The strip tag values are calculated from the existing tags for image
+ * dimensions and pixel type set in the IFD.
+ *
+ * Does not handle planar image configurations (PlanarConfiguration != 1).
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t validateAndSetStripTags();
+
+ /**
+ * Returns true if validateAndSetStripTags has been called, but not setStripOffsets.
+ */
+ virtual bool uninitializedOffsets() const;
+
+ /**
+ * Convenience method to set beginning offset for strips.
+ *
+ * Call this to update the strip offsets before calling writeData.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t setStripOffset(uint32_t offset);
+
+ /**
+ * Get the total size of the strips in bytes.
+ *
+ * This sums the byte count at each strip offset, and returns
+ * the total count of bytes stored in strips for this IFD.
+ */
+ virtual uint32_t getStripSize() const;
+
+ /**
+ * Get a formatted string representing this IFD.
+ */
+ virtual String8 toString() const;
+
+ /**
+ * Print a formatted string representing this IFD to logcat.
+ */
+ void log() const;
+
+ /**
+ * Get value used to determine sort order.
+ */
+ virtual uint32_t getComparableValue() const;
+
+ protected:
+ virtual uint32_t checkAndGetOffset(uint32_t offset) const;
+ SortedEntryVector mEntries;
+ sp<TiffIfd> mNextIfd;
+ uint32_t mIfdId;
+ bool mStripOffsetsInitialized;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_IFD_H*/
diff --git a/media/img_utils/include/img_utils/TiffWritable.h b/media/img_utils/include/img_utils/TiffWritable.h
new file mode 100644
index 0000000..a72cecc
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffWritable.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_WRITABLE
+#define IMG_UTILS_TIFF_WRITABLE
+
+#include <img_utils/Orderable.h>
+#include <img_utils/EndianUtils.h>
+#include <img_utils/Output.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * TiffWritable subclasses represent TIFF metadata objects that can be written
+ * to an EndianOutput object. This is used for TIFF entries and IFDs.
+ */
+class ANDROID_API TiffWritable : public Orderable, public LightRefBase<TiffWritable> {
+ public:
+ TiffWritable();
+ virtual ~TiffWritable();
+
+ /**
+ * Write the data to the output. The given offset is used to calculate
+ * the header offset for values written. The offset is defined
+ * relative to the beginning of the TIFF header, and is word aligned.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const = 0;
+
+ /**
+ * Get the size of the data to write.
+ */
+ virtual size_t getSize() const = 0;
+
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_WRITABLE*/
diff --git a/media/img_utils/include/img_utils/TiffWriter.h b/media/img_utils/include/img_utils/TiffWriter.h
new file mode 100644
index 0000000..b7af239
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffWriter.h
@@ -0,0 +1,324 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_WRITER_H
+#define IMG_UTILS_TIFF_WRITER_H
+
+#include <img_utils/EndianUtils.h>
+#include <img_utils/StripSource.h>
+#include <img_utils/TiffEntryImpl.h>
+#include <img_utils/TagDefinitions.h>
+#include <img_utils/TiffIfd.h>
+
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <utils/StrongPointer.h>
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+class TiffEntry;
+class TiffIfd;
+class Output;
+
+/**
+ * This class holds a collection of TIFF IFDs that can be written as a
+ * complete DNG file header.
+ *
+ * This maps to the TIFF header structure that is logically composed of:
+ * - An 8-byte file header containing an endianness indicator, the TIFF
+ * file marker, and the offset to the first IFD.
+ * - A list of TIFF IFD structures.
+ */
+class ANDROID_API TiffWriter : public LightRefBase<TiffWriter> {
+ public:
+ enum SubIfdType {
+ SUBIFD = 0,
+ GPSINFO
+ };
+
+ /**
+ * Constructs a TiffWriter with the default tag mappings. This enables
+ * all of the tags defined in TagDefinitions.h, and uses the following
+ * mapping precedence to resolve collisions:
+ * (highest precedence) TIFF/EP > DNG > EXIF 2.3 > TIFF 6.0
+ */
+ TiffWriter();
+
+ /**
+ * Constructs a TiffWriter with the given tag mappings. The mapping
+ * precedence will be in the order that the definition maps are given,
+ * where the lower index map gets precedence.
+ *
+ * This can be used with user-defined definitions, or definitions form
+ * TagDefinitions.h
+ *
+ * The enabledDefinitions mapping object is owned by the caller, and must
+ * stay alive for the lifespan of the constructed TiffWriter object.
+ */
+ TiffWriter(KeyedVector<uint16_t, const TagDefinition_t*>* enabledDefinitions,
+ size_t length);
+
+ virtual ~TiffWriter();
+
+ /**
+ * Write a TIFF header containing each IFD set. This will recursively
+ * write all SubIFDs and tags.
+ *
+ * Any StripSources passed in will be written to the output as image strips
+ * at the appropriate offests. The StripByteCounts, RowsPerStrip, and
+ * StripOffsets tags must be set to use this. To set these tags in a
+ * given IFD, use the addStrip method.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t write(Output* out, StripSource** sources, size_t sourcesCount,
+ Endianness end = LITTLE);
+
+ /**
+ * Write a TIFF header containing each IFD set. This will recursively
+ * write all SubIFDs and tags.
+ *
+ * Image data for strips or tiles must be written separately at the
+ * appropriate offsets. These offsets must not fall within the file
+ * header written this way. The size of the header written is given
+ * by the getTotalSize() method.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t write(Output* out, Endianness end = LITTLE);
+
+ /**
+ * Get the total size in bytes of the TIFF header. This includes all
+ * IFDs, tags, and values set for this TiffWriter.
+ */
+ virtual uint32_t getTotalSize() const;
+
+ /**
+ * Add an entry to the IFD with the given ID.
+ *
+ * Returns OK on success, or a negative error code on failure. Valid
+ * error codes for this method are:
+ * - BAD_INDEX - The given tag doesn't exist.
+ * - BAD_VALUE - The given count doesn't match the required count for
+ * this tag.
+ * - BAD_TYPE - The type of the given data isn't compatible with the
+ * type required for this tag.
+ * - NAME_NOT_FOUND - No ifd exists with the given ID.
+ */
+ virtual status_t addEntry(const sp<TiffEntry>& entry, uint32_t ifd);
+
+ /**
+ * Build an entry for a known tag and add it to the IFD with the given ID.
+ * This tag must be defined in one of the definition vectors this TIFF writer
+ * was constructed with. The count and type are validated.
+ *
+ * Returns OK on success, or a negative error code on failure. Valid
+ * error codes for this method are:
+ * - BAD_INDEX - The given tag doesn't exist.
+ * - BAD_VALUE - The given count doesn't match the required count for
+ * this tag.
+ * - BAD_TYPE - The type of the given data isn't compatible with the
+ * type required for this tag.
+ * - NAME_NOT_FOUND - No ifd exists with the given ID.
+ */
+ template<typename T>
+ status_t addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd);
+
+ /**
+ * Build an entry for a known tag. This tag must be one of the tags
+ * defined in one of the definition vectors this TIFF writer was constructed
+ * with. The count and type are validated. If this succeeds, the resulting
+ * entry will be placed in the outEntry pointer.
+ *
+ * Returns OK on success, or a negative error code on failure. Valid
+ * error codes for this method are:
+ * - BAD_INDEX - The given tag doesn't exist.
+ * - BAD_VALUE - The given count doesn't match the required count for
+ * this tag.
+ * - BAD_TYPE - The type of the given data isn't compatible with the
+ * type required for this tag.
+ */
+ template<typename T>
+ status_t buildEntry(uint16_t tag, uint32_t count, const T* data,
+ /*out*/sp<TiffEntry>* outEntry) const;
+
+ /**
+ * Convenience function to set the strip related tags for a given IFD.
+ *
+ * Call this before using a StripSource as an input to write.
+ * The following tags must be set before calling this method:
+ * - ImageWidth
+ * - ImageLength
+ * - SamplesPerPixel
+ * - BitsPerSample
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addStrip(uint32_t ifd);
+
+ /**
+ * Return the TIFF entry with the given tag ID in the IFD with the given ID,
+ * or an empty pointer if none exists.
+ */
+ virtual sp<TiffEntry> getEntry(uint16_t tag, uint32_t ifd) const;
+
+ /**
+ * Remove the TIFF entry with the given tag ID in the given IFD if it exists.
+ */
+ virtual void removeEntry(uint16_t tag, uint32_t ifd);
+
+ /**
+ * Create an empty IFD with the given ID and add it to the end of the
+ * list of IFDs.
+ */
+ virtual status_t addIfd(uint32_t ifd);
+
+ /**
+ * Create an empty IFD with the given ID and add it as a SubIfd of the
+ * parent IFD.
+ */
+ virtual status_t addSubIfd(uint32_t parentIfd, uint32_t ifd, SubIfdType type = SUBIFD);
+
+ /**
+ * Returns the default type for the given tag ID.
+ */
+ virtual TagType getDefaultType(uint16_t tag) const;
+
+ /**
+ * Returns the default count for a given tag ID, or 0 if this
+ * tag normally has a variable count.
+ */
+ virtual uint32_t getDefaultCount(uint16_t tag) const;
+
+ /**
+ * Returns true if an IFD with the given ID exists.
+ */
+ virtual bool hasIfd(uint32_t ifd) const;
+
+ /**
+ * Returns true if a definition exist for the given tag ID.
+ */
+ virtual bool checkIfDefined(uint16_t tag) const;
+
+ /**
+ * Returns the name of the tag if a definition exists for the given tag
+ * ID, or null if no definition exists.
+ */
+ virtual const char* getTagName(uint16_t tag) const;
+
+ /**
+ * Print the currently configured IFDs and entries to logcat.
+ */
+ virtual void log() const;
+
+ /**
+ * Build an entry. No validation is done.
+ *
+ * WARNING: Using this method can result in creating poorly formatted
+ * TIFF files.
+ *
+ * Returns a TiffEntry with the given tag, type, count, endianness,
+ * and data.
+ */
+ template<typename T>
+ static sp<TiffEntry> uncheckedBuildEntry(uint16_t tag, TagType type,
+ uint32_t count, Endianness end, const T* data);
+
+ /**
+ * Utility function to build atag-to-definition mapping from a given
+ * array of tag definitions.
+ */
+ static KeyedVector<uint16_t, const TagDefinition_t*> buildTagMap(
+ const TagDefinition_t* definitions, size_t length);
+
+ protected:
+ enum {
+ DEFAULT_NUM_TAG_MAPS = 4,
+ };
+
+ sp<TiffIfd> findLastIfd();
+ status_t writeFileHeader(EndianOutput& out);
+ const TagDefinition_t* lookupDefinition(uint16_t tag) const;
+ status_t calculateOffsets();
+
+ sp<TiffIfd> mIfd;
+ KeyedVector<uint32_t, sp<TiffIfd> > mNamedIfds;
+ KeyedVector<uint16_t, const TagDefinition_t*>* mTagMaps;
+ size_t mNumTagMaps;
+
+ static KeyedVector<uint16_t, const TagDefinition_t*> sTagMaps[];
+};
+
+template<typename T>
+status_t TiffWriter::buildEntry(uint16_t tag, uint32_t count, const T* data,
+ /*out*/sp<TiffEntry>* outEntry) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+
+ if (definition == NULL) {
+ ALOGE("%s: No such tag exists for id %x.", __FUNCTION__, tag);
+ return BAD_INDEX;
+ }
+
+ uint32_t fixedCount = definition->fixedCount;
+ if (fixedCount > 0 && fixedCount != count) {
+ ALOGE("%s: Invalid count %d for tag %x (expects %d).", __FUNCTION__, count, tag,
+ fixedCount);
+ return BAD_VALUE;
+ }
+
+ TagType fixedType = definition->defaultType;
+ if (TiffEntry::forceValidType(fixedType, data) == NULL) {
+ ALOGE("%s: Invalid type used for tag value for tag %x.", __FUNCTION__, tag);
+ return BAD_TYPE;
+ }
+
+ *outEntry = new TiffEntryImpl<T>(tag, fixedType, count,
+ definition->fixedEndian, data);
+
+ return OK;
+}
+
+template<typename T>
+status_t TiffWriter::addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd) {
+ sp<TiffEntry> outEntry;
+
+ status_t ret = buildEntry<T>(tag, count, data, &outEntry);
+ if (ret != OK) {
+ ALOGE("%s: Could not build entry for tag %x.", __FUNCTION__, tag);
+ return ret;
+ }
+
+ return addEntry(outEntry, ifd);
+}
+
+template<typename T>
+sp<TiffEntry> TiffWriter::uncheckedBuildEntry(uint16_t tag, TagType type, uint32_t count,
+ Endianness end, const T* data) {
+ TiffEntryImpl<T>* entry = new TiffEntryImpl<T>(tag, type, count, end, data);
+ return sp<TiffEntry>(entry);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+
+#endif /*IMG_UTILS_TIFF_WRITER_H*/
diff --git a/media/img_utils/src/Android.mk b/media/img_utils/src/Android.mk
new file mode 100644
index 0000000..4074849
--- /dev/null
+++ b/media/img_utils/src/Android.mk
@@ -0,0 +1,62 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ EndianUtils.cpp \
+ FileInput.cpp \
+ FileOutput.cpp \
+ SortedEntryVector.cpp \
+ Input.cpp \
+ Output.cpp \
+ Orderable.cpp \
+ TiffIfd.cpp \
+ TiffWritable.cpp \
+ TiffWriter.cpp \
+ TiffEntry.cpp \
+ TiffEntryImpl.cpp \
+ ByteArrayOutput.cpp \
+ DngUtils.cpp \
+ StripSource.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libexpat \
+ libutils \
+ libcutils \
+ libcamera_metadata \
+ libcamera_client
+
+LOCAL_C_INCLUDES += \
+ $(LOCAL_PATH)/../include \
+ system/media/camera/include
+
+LOCAL_CFLAGS += \
+ -Wall \
+ -Wextra \
+ -Werror \
+ -fvisibility=hidden
+
+ifneq ($(filter userdebug eng,$(TARGET_BUILD_VARIANT)),)
+ # Enable assert() in eng builds
+ LOCAL_CFLAGS += -UNDEBUG -DLOG_NDEBUG=1
+endif
+
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/../include
+
+LOCAL_MODULE := libimg_utils
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/img_utils/src/ByteArrayOutput.cpp b/media/img_utils/src/ByteArrayOutput.cpp
new file mode 100644
index 0000000..db2d248
--- /dev/null
+++ b/media/img_utils/src/ByteArrayOutput.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/ByteArrayOutput.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+ByteArrayOutput::ByteArrayOutput() {}
+
+ByteArrayOutput::~ByteArrayOutput() {}
+
+status_t ByteArrayOutput::open() {
+ return OK;
+}
+
+status_t ByteArrayOutput::write(const uint8_t* buf, size_t offset, size_t count) {
+ if (mByteArray.appendArray(buf + offset, count) < 0) {
+ ALOGE("%s: Failed to write to ByteArrayOutput.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t ByteArrayOutput::close() {
+ mByteArray.clear();
+ return OK;
+}
+
+size_t ByteArrayOutput::getSize() const {
+ return mByteArray.size();
+}
+
+const uint8_t* ByteArrayOutput::getArray() const {
+ return mByteArray.array();
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
new file mode 100644
index 0000000..d3b4a35
--- /dev/null
+++ b/media/img_utils/src/DngUtils.cpp
@@ -0,0 +1,282 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/DngUtils.h>
+
+namespace android {
+namespace img_utils {
+
+OpcodeListBuilder::OpcodeListBuilder() : mCount(0), mOpList(), mEndianOut(&mOpList, BIG) {
+ if(mEndianOut.open() != OK) {
+ ALOGE("%s: Open failed.", __FUNCTION__);
+ }
+}
+
+OpcodeListBuilder::~OpcodeListBuilder() {
+ if(mEndianOut.close() != OK) {
+ ALOGE("%s: Close failed.", __FUNCTION__);
+ }
+}
+
+size_t OpcodeListBuilder::getSize() const {
+ return mOpList.getSize() + sizeof(mCount);
+}
+
+uint32_t OpcodeListBuilder::getCount() const {
+ return mCount;
+}
+
+status_t OpcodeListBuilder::buildOpList(uint8_t* buf) const {
+ uint32_t count = convertToBigEndian(mCount);
+ memcpy(buf, &count, sizeof(count));
+ memcpy(buf + sizeof(count), mOpList.getArray(), mOpList.getSize());
+ return OK;
+}
+
+status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth,
+ uint32_t lsmHeight,
+ uint32_t activeAreaTop,
+ uint32_t activeAreaLeft,
+ uint32_t activeAreaBottom,
+ uint32_t activeAreaRight,
+ CfaLayout cfa,
+ const float* lensShadingMap) {
+ uint32_t activeAreaWidth = activeAreaRight - activeAreaLeft;
+ uint32_t activeAreaHeight = activeAreaBottom - activeAreaTop;
+ double spacingV = 1.0 / lsmHeight;
+ double spacingH = 1.0 / lsmWidth;
+
+ float redMap[lsmWidth * lsmHeight];
+ float greenEvenMap[lsmWidth * lsmHeight];
+ float greenOddMap[lsmWidth * lsmHeight];
+ float blueMap[lsmWidth * lsmHeight];
+
+ size_t lsmMapSize = lsmWidth * lsmHeight * 4;
+
+ // Split lens shading map channels into separate arrays
+ size_t j = 0;
+ for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
+ redMap[j] = lensShadingMap[i + LSM_R_IND];
+ greenEvenMap[j] = lensShadingMap[i + LSM_GE_IND];
+ greenOddMap[j] = lensShadingMap[i + LSM_GO_IND];
+ blueMap[j] = lensShadingMap[i + LSM_B_IND];
+ }
+
+ uint32_t redTop = 0;
+ uint32_t redLeft = 0;
+ uint32_t greenEvenTop = 0;
+ uint32_t greenEvenLeft = 1;
+ uint32_t greenOddTop = 1;
+ uint32_t greenOddLeft = 0;
+ uint32_t blueTop = 1;
+ uint32_t blueLeft = 1;
+
+ switch(cfa) {
+ case CFA_RGGB:
+ redTop = 0;
+ redLeft = 0;
+ greenEvenTop = 0;
+ greenEvenLeft = 1;
+ greenOddTop = 1;
+ greenOddLeft = 0;
+ blueTop = 1;
+ blueLeft = 1;
+ break;
+ case CFA_GRBG:
+ redTop = 0;
+ redLeft = 1;
+ greenEvenTop = 0;
+ greenEvenLeft = 0;
+ greenOddTop = 1;
+ greenOddLeft = 1;
+ blueTop = 1;
+ blueLeft = 0;
+ break;
+ case CFA_GBRG:
+ redTop = 1;
+ redLeft = 0;
+ greenEvenTop = 0;
+ greenEvenLeft = 0;
+ greenOddTop = 1;
+ greenOddLeft = 1;
+ blueTop = 0;
+ blueLeft = 1;
+ break;
+ case CFA_BGGR:
+ redTop = 1;
+ redLeft = 1;
+ greenEvenTop = 0;
+ greenEvenLeft = 1;
+ greenOddTop = 1;
+ greenOddLeft = 0;
+ blueTop = 0;
+ blueLeft = 0;
+ break;
+ default:
+ ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa);
+ return BAD_VALUE;
+ }
+
+ status_t err = addGainMap(/*top*/redTop,
+ /*left*/redLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/redMap);
+ if (err != OK) return err;
+
+ err = addGainMap(/*top*/greenEvenTop,
+ /*left*/greenEvenLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/greenEvenMap);
+ if (err != OK) return err;
+
+ err = addGainMap(/*top*/greenOddTop,
+ /*left*/greenOddLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/greenOddMap);
+ if (err != OK) return err;
+
+ err = addGainMap(/*top*/blueTop,
+ /*left*/blueLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/blueMap);
+ return err;
+}
+
+status_t OpcodeListBuilder::addGainMap(uint32_t top,
+ uint32_t left,
+ uint32_t bottom,
+ uint32_t right,
+ uint32_t plane,
+ uint32_t planes,
+ uint32_t rowPitch,
+ uint32_t colPitch,
+ uint32_t mapPointsV,
+ uint32_t mapPointsH,
+ double mapSpacingV,
+ double mapSpacingH,
+ double mapOriginV,
+ double mapOriginH,
+ uint32_t mapPlanes,
+ const float* mapGains) {
+
+ uint32_t opcodeId = GAIN_MAP_ID;
+
+ status_t err = mEndianOut.write(&opcodeId, 0, 1);
+ if (err != OK) return err;
+
+ uint8_t version[] = {1, 3, 0, 0};
+ err = mEndianOut.write(version, 0, NELEMS(version));
+ if (err != OK) return err;
+
+ // Do not include optional flag for preview, as this can have a large effect on the output.
+ uint32_t flags = FLAG_OPTIONAL;
+
+ err = mEndianOut.write(&flags, 0, 1);
+ if (err != OK) return err;
+
+ const uint32_t NUMBER_INT_ARGS = 11;
+ const uint32_t NUMBER_DOUBLE_ARGS = 4;
+
+ uint32_t totalSize = NUMBER_INT_ARGS * sizeof(uint32_t) + NUMBER_DOUBLE_ARGS * sizeof(double) +
+ mapPointsV * mapPointsH * mapPlanes * sizeof(float);
+
+ err = mEndianOut.write(&totalSize, 0, 1);
+ if (err != OK) return err;
+
+ // Batch writes as much as possible
+ uint32_t settings1[] = { top,
+ left,
+ bottom,
+ right,
+ plane,
+ planes,
+ rowPitch,
+ colPitch,
+ mapPointsV,
+ mapPointsH };
+
+ err = mEndianOut.write(settings1, 0, NELEMS(settings1));
+ if (err != OK) return err;
+
+ double settings2[] = { mapSpacingV,
+ mapSpacingH,
+ mapOriginV,
+ mapOriginH };
+
+ err = mEndianOut.write(settings2, 0, NELEMS(settings2));
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&mapPlanes, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(mapGains, 0, mapPointsV * mapPointsH * mapPlanes);
+ if (err != OK) return err;
+
+ mCount++;
+
+ return OK;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/EndianUtils.cpp b/media/img_utils/src/EndianUtils.cpp
new file mode 100644
index 0000000..8681cbe
--- /dev/null
+++ b/media/img_utils/src/EndianUtils.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/EndianUtils.h>
+
+namespace android {
+namespace img_utils {
+
+EndianOutput::EndianOutput(Output* out, Endianness end)
+ : mOffset(0), mOutput(out), mEndian(end) {}
+
+EndianOutput::~EndianOutput() {}
+
+status_t EndianOutput::open() {
+ mOffset = 0;
+ return mOutput->open();
+}
+
+status_t EndianOutput::close() {
+ return mOutput->close();
+}
+
+void EndianOutput::setEndianness(Endianness end) {
+ mEndian = end;
+}
+
+uint32_t EndianOutput::getCurrentOffset() const {
+ return mOffset;
+}
+
+Endianness EndianOutput::getEndianness() const {
+ return mEndian;
+}
+
+status_t EndianOutput::write(const uint8_t* buf, size_t offset, size_t count) {
+ status_t res = OK;
+ if((res = mOutput->write(buf, offset, count)) == OK) {
+ mOffset += count;
+ }
+ return res;
+}
+
+status_t EndianOutput::write(const int8_t* buf, size_t offset, size_t count) {
+ return write(reinterpret_cast<const uint8_t*>(buf), offset, count);
+}
+
+#define DEFINE_WRITE(_type_) \
+status_t EndianOutput::write(const _type_* buf, size_t offset, size_t count) { \
+ return writeHelper<_type_>(buf, offset, count); \
+}
+
+DEFINE_WRITE(uint16_t)
+DEFINE_WRITE(int16_t)
+DEFINE_WRITE(uint32_t)
+DEFINE_WRITE(int32_t)
+DEFINE_WRITE(uint64_t)
+DEFINE_WRITE(int64_t)
+
+status_t EndianOutput::write(const float* buf, size_t offset, size_t count) {
+ assert(sizeof(float) == sizeof(uint32_t));
+ return writeHelper<uint32_t>(reinterpret_cast<const uint32_t*>(buf), offset, count);
+}
+
+status_t EndianOutput::write(const double* buf, size_t offset, size_t count) {
+ assert(sizeof(double) == sizeof(uint64_t));
+ return writeHelper<uint64_t>(reinterpret_cast<const uint64_t*>(buf), offset, count);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/FileInput.cpp b/media/img_utils/src/FileInput.cpp
new file mode 100644
index 0000000..4c85a51
--- /dev/null
+++ b/media/img_utils/src/FileInput.cpp
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/FileInput.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+FileInput::FileInput(String8 path) : mFp(NULL), mPath(path), mOpen(false) {}
+
+FileInput::~FileInput() {
+ if (mOpen) {
+ ALOGE("%s: FileInput destroyed without calling close!", __FUNCTION__);
+ close();
+ }
+
+}
+
+status_t FileInput::open() {
+ if (mOpen) {
+ ALOGW("%s: Open called when file %s already open.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+ mFp = ::fopen(mPath, "rb");
+ if (!mFp) {
+ ALOGE("%s: Could not open file %s", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+ mOpen = true;
+ return OK;
+}
+
+ssize_t FileInput::read(uint8_t* buf, size_t offset, size_t count) {
+ if (!mOpen) {
+ ALOGE("%s: Could not read file %s, file not open.", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+
+ size_t bytesRead = ::fread(buf + offset, sizeof(uint8_t), count, mFp);
+ int error = ::ferror(mFp);
+ if (error != 0) {
+ ALOGE("%s: Error %d occurred while reading file %s.", __FUNCTION__, error, mPath.string());
+ return BAD_VALUE;
+ }
+
+ // End of file reached
+ if (::feof(mFp) != 0 && bytesRead == 0) {
+ return NOT_ENOUGH_DATA;
+ }
+
+ return bytesRead;
+}
+
+status_t FileInput::close() {
+ if(!mOpen) {
+ ALOGW("%s: Close called when file %s already close.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+
+ status_t ret = OK;
+ if(::fclose(mFp) != 0) {
+ ALOGE("%s: Failed to close file %s.", __FUNCTION__, mPath.string());
+ ret = BAD_VALUE;
+ }
+ mOpen = false;
+ return ret;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/FileOutput.cpp b/media/img_utils/src/FileOutput.cpp
new file mode 100644
index 0000000..0346762
--- /dev/null
+++ b/media/img_utils/src/FileOutput.cpp
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/FileOutput.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+FileOutput::FileOutput(String8 path) : mFp(NULL), mPath(path), mOpen(false) {}
+
+FileOutput::~FileOutput() {
+ if (mOpen) {
+ ALOGW("%s: Destructor called with %s still open.", __FUNCTION__, mPath.string());
+ close();
+ }
+}
+
+status_t FileOutput::open() {
+ if (mOpen) {
+ ALOGW("%s: Open called when file %s already open.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+ mFp = ::fopen(mPath, "wb");
+ if (!mFp) {
+ ALOGE("%s: Could not open file %s", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+ mOpen = true;
+ return OK;
+}
+
+status_t FileOutput::write(const uint8_t* buf, size_t offset, size_t count) {
+ if (!mOpen) {
+ ALOGE("%s: Could not write file %s, file not open.", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+
+ ::fwrite(buf + offset, sizeof(uint8_t), count, mFp);
+
+ int error = ::ferror(mFp);
+ if (error != 0) {
+ ALOGE("%s: Error %d occurred while writing file %s.", __FUNCTION__, error, mPath.string());
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t FileOutput::close() {
+ if(!mOpen) {
+ ALOGW("%s: Close called when file %s already close.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+
+ status_t ret = OK;
+ if(::fclose(mFp) != 0) {
+ ALOGE("%s: Failed to close file %s.", __FUNCTION__, mPath.string());
+ ret = BAD_VALUE;
+ }
+ mOpen = false;
+ return ret;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/Input.cpp b/media/img_utils/src/Input.cpp
new file mode 100644
index 0000000..3782014
--- /dev/null
+++ b/media/img_utils/src/Input.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/Input.h>
+
+namespace android {
+namespace img_utils {
+
+Input::~Input() {}
+
+status_t Input::open() { return OK; }
+
+status_t Input::close() { return OK; }
+
+ssize_t Input::skip(size_t count) {
+ const size_t SKIP_BUF_SIZE = 1024;
+ uint8_t skipBuf[SKIP_BUF_SIZE];
+
+ size_t remaining = count;
+ while (remaining > 0) {
+ size_t amt = (SKIP_BUF_SIZE > remaining) ? remaining : SKIP_BUF_SIZE;
+ ssize_t ret = read(skipBuf, 0, amt);
+ if (ret < 0) {
+ if(ret == NOT_ENOUGH_DATA) {
+ // End of file encountered
+ if (remaining == count) {
+ // Read no bytes, return EOF
+ return NOT_ENOUGH_DATA;
+ } else {
+ // Return num bytes read
+ return count - remaining;
+ }
+ }
+ // Return error code.
+ return ret;
+ }
+ remaining -= ret;
+ }
+ return count;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
diff --git a/media/img_utils/src/Orderable.cpp b/media/img_utils/src/Orderable.cpp
new file mode 100644
index 0000000..300f122
--- /dev/null
+++ b/media/img_utils/src/Orderable.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/Orderable.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+#define COMPARE(op) \
+bool Orderable::operator op (const Orderable& orderable) const { \
+ return getComparableValue() op orderable.getComparableValue(); \
+}
+
+COMPARE(>)
+COMPARE(<)
+COMPARE(>=)
+COMPARE(<=)
+COMPARE(==)
+COMPARE(!=)
+
+Orderable::~Orderable() {}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/Output.cpp b/media/img_utils/src/Output.cpp
new file mode 100644
index 0000000..0e395b9
--- /dev/null
+++ b/media/img_utils/src/Output.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <img_utils/Output.h>
+
+namespace android {
+namespace img_utils {
+
+Output::~Output() {}
+status_t Output::open() { return OK; }
+status_t Output::close() { return OK; }
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/SortedEntryVector.cpp b/media/img_utils/src/SortedEntryVector.cpp
new file mode 100644
index 0000000..f0e1fa1
--- /dev/null
+++ b/media/img_utils/src/SortedEntryVector.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/SortedEntryVector.h>
+
+#include <utils/TypeHelpers.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+SortedEntryVector::~SortedEntryVector() {}
+
+ssize_t SortedEntryVector::indexOfTag(uint16_t tag) const {
+ // TODO: Use binary search here.
+ for (size_t i = 0; i < size(); ++i) {
+ if (itemAt(i)->getTag() == tag) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+int SortedEntryVector::do_compare(const void* lhs, const void* rhs) const {
+ const sp<TiffEntry>* lEntry = reinterpret_cast<const sp<TiffEntry>*>(lhs);
+ const sp<TiffEntry>* rEntry = reinterpret_cast<const sp<TiffEntry>*>(rhs);
+ return compare_type(**lEntry, **rEntry);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/StripSource.cpp b/media/img_utils/src/StripSource.cpp
new file mode 100644
index 0000000..57b6082
--- /dev/null
+++ b/media/img_utils/src/StripSource.cpp
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/StripSource.h>
+
+namespace android {
+namespace img_utils {
+
+StripSource::~StripSource() {}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffEntry.cpp b/media/img_utils/src/TiffEntry.cpp
new file mode 100644
index 0000000..1b20e36
--- /dev/null
+++ b/media/img_utils/src/TiffEntry.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/TiffIfd.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/TiffEntry.h>
+
+#include <utils/Errors.h>
+#include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+
+namespace android {
+namespace img_utils {
+
+TiffEntry::~TiffEntry() {}
+
+/**
+ * Specialize for each valid type, including sub-IFDs.
+ *
+ * Values with types other than the ones given here should not compile.
+ */
+
+template<>
+const sp<TiffIfd>* TiffEntry::forceValidType<sp<TiffIfd> >(TagType type, const sp<TiffIfd>* value) {
+ if (type == LONG) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'ifd' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const uint8_t* TiffEntry::forceValidType<uint8_t>(TagType type, const uint8_t* value) {
+ if (type == BYTE || type == ASCII || type == UNDEFINED) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'uint8_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const int8_t* TiffEntry::forceValidType<int8_t>(TagType type, const int8_t* value) {
+ if (type == SBYTE || type == ASCII || type == UNDEFINED) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'int8_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const uint16_t* TiffEntry::forceValidType<uint16_t>(TagType type, const uint16_t* value) {
+ if (type == SHORT) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'uint16_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const int16_t* TiffEntry::forceValidType<int16_t>(TagType type, const int16_t* value) {
+ if (type == SSHORT) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'int16_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const uint32_t* TiffEntry::forceValidType<uint32_t>(TagType type, const uint32_t* value) {
+ if (type == LONG || type == RATIONAL) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'uint32_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const int32_t* TiffEntry::forceValidType<int32_t>(TagType type, const int32_t* value) {
+ if (type == SLONG || type == SRATIONAL) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'int32_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const double* TiffEntry::forceValidType<double>(TagType type, const double* value) {
+ if (type == DOUBLE) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'double' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const float* TiffEntry::forceValidType<float>(TagType type, const float* value) {
+ if (type == FLOAT) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'float' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+String8 TiffEntry::toString() const {
+ String8 output;
+ uint32_t count = getCount();
+ output.appendFormat("[id: %x, type: %d, count: %u, value: '", getTag(), getType(), count);
+
+ size_t cappedCount = count;
+ if (count > MAX_PRINT_STRING_LENGTH) {
+ cappedCount = MAX_PRINT_STRING_LENGTH;
+ }
+
+ TagType type = getType();
+ switch (type) {
+ case UNDEFINED:
+ case BYTE: {
+ const uint8_t* typed_data = getData<uint8_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%u ", typed_data[i]);
+ }
+ break;
+ }
+ case ASCII: {
+ const char* typed_data = reinterpret_cast<const char*>(getData<uint8_t>());
+ size_t len = count;
+ if (count > MAX_PRINT_STRING_LENGTH) {
+ len = MAX_PRINT_STRING_LENGTH;
+ }
+ output.append(typed_data, len);
+ break;
+ }
+ case SHORT: {
+ const uint16_t* typed_data = getData<uint16_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%u ", typed_data[i]);
+ }
+ break;
+ }
+ case LONG: {
+ const uint32_t* typed_data = getData<uint32_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%u ", typed_data[i]);
+ }
+ break;
+ }
+ case RATIONAL: {
+ const uint32_t* typed_data = getData<uint32_t>();
+ cappedCount <<= 1;
+ for (size_t i = 0; i < cappedCount; i+=2) {
+ output.appendFormat("%u/%u ", typed_data[i], typed_data[i + 1]);
+ }
+ break;
+ }
+ case SBYTE: {
+ const int8_t* typed_data = getData<int8_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%d ", typed_data[i]);
+ }
+ break;
+ }
+ case SSHORT: {
+ const int16_t* typed_data = getData<int16_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%d ", typed_data[i]);
+ }
+ break;
+ }
+ case SLONG: {
+ const int32_t* typed_data = getData<int32_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%d ", typed_data[i]);
+ }
+ break;
+ }
+ case SRATIONAL: {
+ const int32_t* typed_data = getData<int32_t>();
+ cappedCount <<= 1;
+ for (size_t i = 0; i < cappedCount; i+=2) {
+ output.appendFormat("%d/%d ", typed_data[i], typed_data[i + 1]);
+ }
+ break;
+ }
+ case FLOAT: {
+ const float* typed_data = getData<float>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%f ", typed_data[i]);
+ }
+ break;
+ }
+ case DOUBLE: {
+ const double* typed_data = getData<double>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%f ", typed_data[i]);
+ }
+ break;
+ }
+ default: {
+ output.append("unknown type ");
+ break;
+ }
+ }
+
+ if (count > MAX_PRINT_STRING_LENGTH) {
+ output.append("...");
+ }
+ output.append("']");
+ return output;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffEntryImpl.cpp b/media/img_utils/src/TiffEntryImpl.cpp
new file mode 100644
index 0000000..257c266
--- /dev/null
+++ b/media/img_utils/src/TiffEntryImpl.cpp
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/TiffEntryImpl.h>
+
+#include <utils/Vector.h>
+
+namespace android {
+namespace img_utils {
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffIfd.cpp b/media/img_utils/src/TiffIfd.cpp
new file mode 100644
index 0000000..3fb00cc
--- /dev/null
+++ b/media/img_utils/src/TiffIfd.cpp
@@ -0,0 +1,386 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TiffIfd"
+
+#include <img_utils/TagDefinitions.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/TiffIfd.h>
+#include <img_utils/TiffWriter.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+TiffIfd::TiffIfd(uint32_t ifdId)
+ : mNextIfd(), mIfdId(ifdId), mStripOffsetsInitialized(false) {}
+
+TiffIfd::~TiffIfd() {}
+
+status_t TiffIfd::addEntry(const sp<TiffEntry>& entry) {
+ size_t size = mEntries.size();
+ if (size >= MAX_IFD_ENTRIES) {
+ ALOGW("%s: Failed to add entry for tag 0x%x to IFD %u, too many entries in IFD!",
+ __FUNCTION__, entry->getTag(), mIfdId);
+ return BAD_INDEX;
+ }
+
+ if (mEntries.add(entry) < 0) {
+ ALOGW("%s: Failed to add entry for tag 0x%x to ifd %u.", __FUNCTION__, entry->getTag(),
+ mIfdId);
+ return BAD_INDEX;
+ }
+ return OK;
+}
+
+sp<TiffEntry> TiffIfd::getEntry(uint16_t tag) const {
+ ssize_t index = mEntries.indexOfTag(tag);
+ if (index < 0) {
+ ALOGW("%s: No entry for tag 0x%x in ifd %u.", __FUNCTION__, tag, mIfdId);
+ return NULL;
+ }
+ return mEntries[index];
+}
+
+void TiffIfd::removeEntry(uint16_t tag) {
+ ssize_t index = mEntries.indexOfTag(tag);
+ if (index >= 0) {
+ mEntries.removeAt(index);
+ }
+}
+
+
+void TiffIfd::setNextIfd(const sp<TiffIfd>& ifd) {
+ mNextIfd = ifd;
+}
+
+sp<TiffIfd> TiffIfd::getNextIfd() const {
+ return mNextIfd;
+}
+
+uint32_t TiffIfd::checkAndGetOffset(uint32_t offset) const {
+ size_t size = mEntries.size();
+
+ if (size > MAX_IFD_ENTRIES) {
+ ALOGW("%s: Could not calculate IFD offsets, IFD %u contains too many entries.",
+ __FUNCTION__, mIfdId);
+ return BAD_OFFSET;
+ }
+
+ if (size <= 0) {
+ ALOGW("%s: Could not calculate IFD offsets, IFD %u contains no entries.", __FUNCTION__,
+ mIfdId);
+ return BAD_OFFSET;
+ }
+
+ if (offset == BAD_OFFSET) {
+ ALOGW("%s: Could not calculate IFD offsets, IFD %u had a bad initial offset.",
+ __FUNCTION__, mIfdId);
+ return BAD_OFFSET;
+ }
+
+ uint32_t ifdSize = calculateIfdSize(size);
+ WORD_ALIGN(ifdSize);
+ return offset + ifdSize;
+}
+
+status_t TiffIfd::writeData(uint32_t offset, /*out*/EndianOutput* out) const {
+ assert((offset % TIFF_WORD_SIZE) == 0);
+ status_t ret = OK;
+
+ ALOGV("%s: IFD %u written to offset %u", __FUNCTION__, mIfdId, offset );
+ uint32_t valueOffset = checkAndGetOffset(offset);
+ if (valueOffset == 0) {
+ return BAD_VALUE;
+ }
+
+ size_t size = mEntries.size();
+
+ // Writer IFD header (2 bytes, number of entries).
+ uint16_t header = static_cast<uint16_t>(size);
+ BAIL_ON_FAIL(out->write(&header, 0, 1), ret);
+
+ // Write tag entries
+ for (size_t i = 0; i < size; ++i) {
+ BAIL_ON_FAIL(mEntries[i]->writeTagInfo(valueOffset, out), ret);
+ valueOffset += mEntries[i]->getSize();
+ }
+
+ // Writer IFD footer (4 bytes, offset to next IFD).
+ uint32_t footer = (mNextIfd != NULL) ? offset + getSize() : 0;
+ BAIL_ON_FAIL(out->write(&footer, 0, 1), ret);
+
+ assert(out->getCurrentOffset() == offset + calculateIfdSize(size));
+
+ // Write zeroes till word aligned
+ ZERO_TILL_WORD(out, calculateIfdSize(size), ret);
+
+ // Write values for each tag entry
+ for (size_t i = 0; i < size; ++i) {
+ size_t last = out->getCurrentOffset();
+ // Only write values that are too large to fit in the 12-byte TIFF entry
+ if (mEntries[i]->getSize() > OFFSET_SIZE) {
+ BAIL_ON_FAIL(mEntries[i]->writeData(out->getCurrentOffset(), out), ret);
+ }
+ size_t next = out->getCurrentOffset();
+ size_t diff = (next - last);
+ size_t actual = mEntries[i]->getSize();
+ if (diff != actual) {
+ ALOGW("Sizes do not match for tag %x. Expected %zu, received %zu",
+ mEntries[i]->getTag(), actual, diff);
+ }
+ }
+
+ assert(out->getCurrentOffset() == offset + getSize());
+
+ return ret;
+}
+
+size_t TiffIfd::getSize() const {
+ size_t size = mEntries.size();
+ uint32_t total = calculateIfdSize(size);
+ WORD_ALIGN(total);
+ for (size_t i = 0; i < size; ++i) {
+ total += mEntries[i]->getSize();
+ }
+ return total;
+}
+
+uint32_t TiffIfd::getId() const {
+ return mIfdId;
+}
+
+uint32_t TiffIfd::getComparableValue() const {
+ return mIfdId;
+}
+
+status_t TiffIfd::validateAndSetStripTags() {
+ sp<TiffEntry> widthEntry = getEntry(TAG_IMAGEWIDTH);
+ if (widthEntry == NULL) {
+ ALOGE("%s: IFD %u doesn't have a ImageWidth tag set", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ sp<TiffEntry> heightEntry = getEntry(TAG_IMAGELENGTH);
+ if (heightEntry == NULL) {
+ ALOGE("%s: IFD %u doesn't have a ImageLength tag set", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ sp<TiffEntry> samplesEntry = getEntry(TAG_SAMPLESPERPIXEL);
+ if (samplesEntry == NULL) {
+ ALOGE("%s: IFD %u doesn't have a SamplesPerPixel tag set", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ sp<TiffEntry> bitsEntry = getEntry(TAG_BITSPERSAMPLE);
+ if (bitsEntry == NULL) {
+ ALOGE("%s: IFD %u doesn't have a BitsPerSample tag set", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ uint32_t width = *(widthEntry->getData<uint32_t>());
+ uint32_t height = *(heightEntry->getData<uint32_t>());
+ uint16_t bitsPerSample = *(bitsEntry->getData<uint16_t>());
+ uint16_t samplesPerPixel = *(samplesEntry->getData<uint16_t>());
+
+ if ((bitsPerSample % 8) != 0) {
+ ALOGE("%s: BitsPerSample %d in IFD %u is not byte-aligned.", __FUNCTION__,
+ bitsPerSample, mIfdId);
+ return BAD_VALUE;
+ }
+
+ uint32_t bytesPerSample = bitsPerSample / 8;
+
+ // Choose strip size as close to 8kb as possible without splitting rows.
+ // If the row length is >8kb, each strip will only contain a single row.
+ const uint32_t rowLengthBytes = bytesPerSample * samplesPerPixel * width;
+ const uint32_t idealChunkSize = (1 << 13); // 8kb
+ uint32_t rowsPerChunk = idealChunkSize / rowLengthBytes;
+ rowsPerChunk = (rowsPerChunk == 0) ? 1 : rowsPerChunk;
+ const uint32_t actualChunkSize = rowLengthBytes * rowsPerChunk;
+
+ const uint32_t lastChunkRows = height % rowsPerChunk;
+ const uint32_t lastChunkSize = lastChunkRows * rowLengthBytes;
+
+ if (actualChunkSize > /*max strip size for TIFF/EP*/65536) {
+ ALOGE("%s: Strip length too long.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ size_t numStrips = height / rowsPerChunk;
+
+ // Add another strip for the incomplete chunk.
+ if (lastChunkRows > 0) {
+ numStrips += 1;
+ }
+
+ // Put each row in it's own strip
+ uint32_t rowsPerStripVal = rowsPerChunk;
+ sp<TiffEntry> rowsPerStrip = TiffWriter::uncheckedBuildEntry(TAG_ROWSPERSTRIP, LONG, 1,
+ UNDEFINED_ENDIAN, &rowsPerStripVal);
+
+ if (rowsPerStrip == NULL) {
+ ALOGE("%s: Could not build entry for RowsPerStrip tag.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ Vector<uint32_t> byteCounts;
+
+ for (size_t i = 0; i < numStrips; ++i) {
+ if (lastChunkRows > 0 && i == (numStrips - 1)) {
+ byteCounts.add(lastChunkSize);
+ } else {
+ byteCounts.add(actualChunkSize);
+ }
+ }
+
+ // Set byte counts for each strip
+ sp<TiffEntry> stripByteCounts = TiffWriter::uncheckedBuildEntry(TAG_STRIPBYTECOUNTS, LONG,
+ static_cast<uint32_t>(numStrips), UNDEFINED_ENDIAN, byteCounts.array());
+
+ if (stripByteCounts == NULL) {
+ ALOGE("%s: Could not build entry for StripByteCounts tag.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ Vector<uint32_t> stripOffsetsVector;
+ stripOffsetsVector.resize(numStrips);
+
+ // Set uninitialized offsets
+ sp<TiffEntry> stripOffsets = TiffWriter::uncheckedBuildEntry(TAG_STRIPOFFSETS, LONG,
+ static_cast<uint32_t>(numStrips), UNDEFINED_ENDIAN, stripOffsetsVector.array());
+
+ if (stripOffsets == NULL) {
+ ALOGE("%s: Could not build entry for StripOffsets tag.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ if(addEntry(stripByteCounts) != OK) {
+ ALOGE("%s: Could not add entry for StripByteCounts to IFD %u", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ if(addEntry(rowsPerStrip) != OK) {
+ ALOGE("%s: Could not add entry for StripByteCounts to IFD %u", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ if(addEntry(stripOffsets) != OK) {
+ ALOGE("%s: Could not add entry for StripByteCounts to IFD %u", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ mStripOffsetsInitialized = true;
+ return OK;
+}
+
+bool TiffIfd::uninitializedOffsets() const {
+ return mStripOffsetsInitialized;
+}
+
+status_t TiffIfd::setStripOffset(uint32_t offset) {
+
+ // Get old offsets and bytecounts
+ sp<TiffEntry> oldOffsets = getEntry(TAG_STRIPOFFSETS);
+ if (oldOffsets == NULL) {
+ ALOGE("%s: IFD %u does not contain StripOffsets entry.", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ sp<TiffEntry> stripByteCounts = getEntry(TAG_STRIPBYTECOUNTS);
+ if (stripByteCounts == NULL) {
+ ALOGE("%s: IFD %u does not contain StripByteCounts entry.", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ uint32_t offsetsCount = oldOffsets->getCount();
+ uint32_t byteCount = stripByteCounts->getCount();
+ if (offsetsCount != byteCount) {
+ ALOGE("%s: StripOffsets count (%u) doesn't match StripByteCounts count (%u) in IFD %u",
+ __FUNCTION__, offsetsCount, byteCount, mIfdId);
+ return BAD_VALUE;
+ }
+
+ const uint32_t* stripByteCountsArray = stripByteCounts->getData<uint32_t>();
+
+ size_t numStrips = offsetsCount;
+
+ Vector<uint32_t> stripOffsets;
+
+ // Calculate updated byte offsets
+ for (size_t i = 0; i < numStrips; ++i) {
+ stripOffsets.add(offset);
+ offset += stripByteCountsArray[i];
+ }
+
+ sp<TiffEntry> newOffsets = TiffWriter::uncheckedBuildEntry(TAG_STRIPOFFSETS, LONG,
+ static_cast<uint32_t>(numStrips), UNDEFINED_ENDIAN, stripOffsets.array());
+
+ if (newOffsets == NULL) {
+ ALOGE("%s: Coult not build updated offsets entry in IFD %u", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ if (addEntry(newOffsets) != OK) {
+ ALOGE("%s: Failed to add updated offsets entry in IFD %u", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+uint32_t TiffIfd::getStripSize() const {
+ sp<TiffEntry> stripByteCounts = getEntry(TAG_STRIPBYTECOUNTS);
+ if (stripByteCounts == NULL) {
+ ALOGE("%s: IFD %u does not contain StripByteCounts entry.", __FUNCTION__, mIfdId);
+ return BAD_VALUE;
+ }
+
+ uint32_t count = stripByteCounts->getCount();
+ const uint32_t* byteCounts = stripByteCounts->getData<uint32_t>();
+
+ uint32_t total = 0;
+ for (size_t i = 0; i < static_cast<size_t>(count); ++i) {
+ total += byteCounts[i];
+ }
+ return total;
+}
+
+String8 TiffIfd::toString() const {
+ size_t s = mEntries.size();
+ String8 output;
+ output.appendFormat("[ifd: %x, num_entries: %zu, entries:\n", getId(), s);
+ for(size_t i = 0; i < mEntries.size(); ++i) {
+ output.append("\t");
+ output.append(mEntries[i]->toString());
+ output.append("\n");
+ }
+ output.append(", next_ifd: %x]", ((mNextIfd != NULL) ? mNextIfd->getId() : 0));
+ return output;
+}
+
+void TiffIfd::log() const {
+ size_t s = mEntries.size();
+ ALOGI("[ifd: %x, num_entries: %zu, entries:\n", getId(), s);
+ for(size_t i = 0; i < s; ++i) {
+ ALOGI("\t%s", mEntries[i]->toString().string());
+ }
+ ALOGI(", next_ifd: %x]", ((mNextIfd != NULL) ? mNextIfd->getId() : 0));
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffWritable.cpp b/media/img_utils/src/TiffWritable.cpp
new file mode 100644
index 0000000..f8d7de7
--- /dev/null
+++ b/media/img_utils/src/TiffWritable.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <img_utils/TiffWritable.h>
+#include <img_utils/TiffHelpers.h>
+
+#include <assert.h>
+
+namespace android {
+namespace img_utils {
+
+TiffWritable::TiffWritable() {}
+
+TiffWritable::~TiffWritable() {}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffWriter.cpp b/media/img_utils/src/TiffWriter.cpp
new file mode 100644
index 0000000..a6f9218
--- /dev/null
+++ b/media/img_utils/src/TiffWriter.cpp
@@ -0,0 +1,390 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TiffWriter"
+
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/TiffWriter.h>
+#include <img_utils/TagDefinitions.h>
+
+#include <assert.h>
+
+namespace android {
+namespace img_utils {
+
+KeyedVector<uint16_t, const TagDefinition_t*> TiffWriter::buildTagMap(
+ const TagDefinition_t* definitions, size_t length) {
+ KeyedVector<uint16_t, const TagDefinition_t*> map;
+ for(size_t i = 0; i < length; ++i) {
+ map.add(definitions[i].tagId, definitions + i);
+ }
+ return map;
+}
+
+#define COMPARE(op) \
+bool Orderable::operator op (const Orderable& orderable) const { \
+ return getComparableValue() op orderable.getComparableValue(); \
+}
+
+#define ARRAY_SIZE(array) \
+ (sizeof(array) / sizeof(array[0]))
+
+KeyedVector<uint16_t, const TagDefinition_t*> TiffWriter::sTagMaps[] = {
+ buildTagMap(TIFF_EP_TAG_DEFINITIONS, ARRAY_SIZE(TIFF_EP_TAG_DEFINITIONS)),
+ buildTagMap(DNG_TAG_DEFINITIONS, ARRAY_SIZE(DNG_TAG_DEFINITIONS)),
+ buildTagMap(EXIF_2_3_TAG_DEFINITIONS, ARRAY_SIZE(EXIF_2_3_TAG_DEFINITIONS)),
+ buildTagMap(TIFF_6_TAG_DEFINITIONS, ARRAY_SIZE(TIFF_6_TAG_DEFINITIONS))
+};
+
+TiffWriter::TiffWriter() : mTagMaps(sTagMaps), mNumTagMaps(DEFAULT_NUM_TAG_MAPS) {}
+
+TiffWriter::TiffWriter(KeyedVector<uint16_t, const TagDefinition_t*>* enabledDefinitions,
+ size_t length) : mTagMaps(enabledDefinitions), mNumTagMaps(length) {}
+
+TiffWriter::~TiffWriter() {}
+
+status_t TiffWriter::write(Output* out, StripSource** sources, size_t sourcesCount,
+ Endianness end) {
+ status_t ret = OK;
+ EndianOutput endOut(out, end);
+
+ if (mIfd == NULL) {
+ ALOGE("%s: Tiff header is empty.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ uint32_t totalSize = getTotalSize();
+
+ KeyedVector<uint32_t, uint32_t> offsetVector;
+
+ for (size_t i = 0; i < mNamedIfds.size(); ++i) {
+ if (mNamedIfds[i]->uninitializedOffsets()) {
+ uint32_t stripSize = mNamedIfds[i]->getStripSize();
+ if (mNamedIfds[i]->setStripOffset(totalSize) != OK) {
+ ALOGE("%s: Could not set strip offsets.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ totalSize += stripSize;
+ WORD_ALIGN(totalSize);
+ offsetVector.add(mNamedIfds.keyAt(i), totalSize);
+ }
+ }
+
+ size_t offVecSize = offsetVector.size();
+ if (offVecSize != sourcesCount) {
+ ALOGE("%s: Mismatch between number of IFDs with uninitialized strips (%zu) and"
+ " sources (%zu).", __FUNCTION__, offVecSize, sourcesCount);
+ return BAD_VALUE;
+ }
+
+ BAIL_ON_FAIL(writeFileHeader(endOut), ret);
+
+ uint32_t offset = FILE_HEADER_SIZE;
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ BAIL_ON_FAIL(ifd->writeData(offset, &endOut), ret);
+ offset += ifd->getSize();
+ ifd = ifd->getNextIfd();
+ }
+
+ if (LOG_NDEBUG == 0) {
+ log();
+ }
+
+ for (size_t i = 0; i < offVecSize; ++i) {
+ uint32_t ifdKey = offsetVector.keyAt(i);
+ uint32_t sizeToWrite = mNamedIfds[ifdKey]->getStripSize();
+ bool found = false;
+ for (size_t j = 0; j < sourcesCount; ++j) {
+ if (sources[j]->getIfd() == ifdKey) {
+ if ((ret = sources[i]->writeToStream(endOut, sizeToWrite)) != OK) {
+ ALOGE("%s: Could not write to stream, received %d.", __FUNCTION__, ret);
+ return ret;
+ }
+ ZERO_TILL_WORD(&endOut, sizeToWrite, ret);
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ ALOGE("%s: No stream for byte strips for IFD %u", __FUNCTION__, ifdKey);
+ return BAD_VALUE;
+ }
+ assert(offsetVector[i] == endOut.getCurrentOffset());
+ }
+
+ return ret;
+}
+
+status_t TiffWriter::write(Output* out, Endianness end) {
+ status_t ret = OK;
+ EndianOutput endOut(out, end);
+
+ if (mIfd == NULL) {
+ ALOGE("%s: Tiff header is empty.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ BAIL_ON_FAIL(writeFileHeader(endOut), ret);
+
+ uint32_t offset = FILE_HEADER_SIZE;
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ BAIL_ON_FAIL(ifd->writeData(offset, &endOut), ret);
+ offset += ifd->getSize();
+ ifd = ifd->getNextIfd();
+ }
+ return ret;
+}
+
+
+const TagDefinition_t* TiffWriter::lookupDefinition(uint16_t tag) const {
+ const TagDefinition_t* definition = NULL;
+ for (size_t i = 0; i < mNumTagMaps; ++i) {
+ ssize_t index = mTagMaps[i].indexOfKey(tag);
+ if (index >= 0) {
+ definition = mTagMaps[i][index];
+ break;
+ }
+ }
+
+ if (definition == NULL) {
+ ALOGE("%s: No definition exists for tag with id %x.", __FUNCTION__, tag);
+ }
+ return definition;
+}
+
+sp<TiffEntry> TiffWriter::getEntry(uint16_t tag, uint32_t ifd) const {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index < 0) {
+ ALOGE("%s: No IFD %d set for this writer.", __FUNCTION__, ifd);
+ return NULL;
+ }
+ return mNamedIfds[index]->getEntry(tag);
+}
+
+void TiffWriter::removeEntry(uint16_t tag, uint32_t ifd) {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index >= 0) {
+ mNamedIfds[index]->removeEntry(tag);
+ }
+}
+
+status_t TiffWriter::addEntry(const sp<TiffEntry>& entry, uint32_t ifd) {
+ uint16_t tag = entry->getTag();
+
+ const TagDefinition_t* definition = lookupDefinition(tag);
+
+ if (definition == NULL) {
+ ALOGE("%s: No definition exists for tag 0x%x.", __FUNCTION__, tag);
+ return BAD_INDEX;
+ }
+
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+
+ // Add a new IFD if necessary
+ if (index < 0) {
+ ALOGE("%s: No IFD %u exists.", __FUNCTION__, ifd);
+ return NAME_NOT_FOUND;
+ }
+
+ sp<TiffIfd> selectedIfd = mNamedIfds[index];
+ return selectedIfd->addEntry(entry);
+}
+
+status_t TiffWriter::addStrip(uint32_t ifd) {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index < 0) {
+ ALOGE("%s: Ifd %u doesn't exist, cannot add strip entries.", __FUNCTION__, ifd);
+ return BAD_VALUE;
+ }
+ sp<TiffIfd> selected = mNamedIfds[index];
+ return selected->validateAndSetStripTags();
+}
+
+status_t TiffWriter::addIfd(uint32_t ifd) {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index >= 0) {
+ ALOGE("%s: Ifd with ID 0x%x already exists.", __FUNCTION__, ifd);
+ return BAD_VALUE;
+ }
+
+ sp<TiffIfd> newIfd = new TiffIfd(ifd);
+ if (mIfd == NULL) {
+ mIfd = newIfd;
+ } else {
+ sp<TiffIfd> last = findLastIfd();
+ last->setNextIfd(newIfd);
+ }
+
+ if(mNamedIfds.add(ifd, newIfd) < 0) {
+ ALOGE("%s: Failed to add new IFD 0x%x.", __FUNCTION__, ifd);
+ return BAD_VALUE;
+ }
+
+ return OK;
+}
+
+status_t TiffWriter::addSubIfd(uint32_t parentIfd, uint32_t ifd, SubIfdType type) {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index >= 0) {
+ ALOGE("%s: Ifd with ID 0x%x already exists.", __FUNCTION__, ifd);
+ return BAD_VALUE;
+ }
+
+ ssize_t parentIndex = mNamedIfds.indexOfKey(parentIfd);
+ if (parentIndex < 0) {
+ ALOGE("%s: Parent IFD with ID 0x%x does not exist.", __FUNCTION__, parentIfd);
+ return BAD_VALUE;
+ }
+
+ sp<TiffIfd> parent = mNamedIfds[parentIndex];
+ sp<TiffIfd> newIfd = new TiffIfd(ifd);
+
+ uint16_t subIfdTag;
+ if (type == SUBIFD) {
+ subIfdTag = TAG_SUBIFDS;
+ } else if (type == GPSINFO) {
+ subIfdTag = TAG_GPSINFO;
+ } else {
+ ALOGE("%s: Unknown SubIFD type %d.", __FUNCTION__, type);
+ return BAD_VALUE;
+ }
+
+ sp<TiffEntry> subIfds = parent->getEntry(subIfdTag);
+ if (subIfds == NULL) {
+ if (buildEntry(subIfdTag, 1, &newIfd, &subIfds) < 0) {
+ ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd);
+ return BAD_VALUE;
+ }
+ } else {
+ if (type == GPSINFO) {
+ ALOGE("%s: Cannot add GPSInfo SubIFD to IFD %u, one already exists.", __FUNCTION__,
+ ifd);
+ return BAD_VALUE;
+ }
+
+ Vector<sp<TiffIfd> > subIfdList;
+ const sp<TiffIfd>* oldIfdArray = subIfds->getData<sp<TiffIfd> >();
+ if (subIfdList.appendArray(oldIfdArray, subIfds->getCount()) < 0) {
+ ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd);
+ return BAD_VALUE;
+ }
+
+ if (subIfdList.add(newIfd) < 0) {
+ ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd);
+ return BAD_VALUE;
+ }
+
+ uint32_t count = subIfdList.size();
+ if (buildEntry(subIfdTag, count, subIfdList.array(), &subIfds) < 0) {
+ ALOGE("%s: Failed to build SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd);
+ return BAD_VALUE;
+ }
+ }
+
+ if (parent->addEntry(subIfds) < 0) {
+ ALOGE("%s: Failed to add SubIfd entry in IFD 0x%x.", __FUNCTION__, parentIfd);
+ return BAD_VALUE;
+ }
+
+ if(mNamedIfds.add(ifd, newIfd) < 0) {
+ ALOGE("%s: Failed to add new IFD 0x%x.", __FUNCTION__, ifd);
+ return BAD_VALUE;
+ }
+
+ return OK;
+}
+
+TagType TiffWriter::getDefaultType(uint16_t tag) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+ if (definition == NULL) {
+ ALOGE("%s: Could not find definition for tag %x", __FUNCTION__, tag);
+ return UNKNOWN_TAGTYPE;
+ }
+ return definition->defaultType;
+}
+
+uint32_t TiffWriter::getDefaultCount(uint16_t tag) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+ if (definition == NULL) {
+ ALOGE("%s: Could not find definition for tag %x", __FUNCTION__, tag);
+ return 0;
+ }
+ return definition->fixedCount;
+}
+
+bool TiffWriter::hasIfd(uint32_t ifd) const {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ return index >= 0;
+}
+
+bool TiffWriter::checkIfDefined(uint16_t tag) const {
+ return lookupDefinition(tag) != NULL;
+}
+
+const char* TiffWriter::getTagName(uint16_t tag) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+ if (definition == NULL) {
+ return NULL;
+ }
+ return definition->tagName;
+}
+
+sp<TiffIfd> TiffWriter::findLastIfd() {
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ sp<TiffIfd> nextIfd = ifd->getNextIfd();
+ if (nextIfd == NULL) {
+ break;
+ }
+ ifd = nextIfd;
+ }
+ return ifd;
+}
+
+status_t TiffWriter::writeFileHeader(EndianOutput& out) {
+ status_t ret = OK;
+ uint16_t endMarker = (out.getEndianness() == BIG) ? BIG_ENDIAN_MARKER : LITTLE_ENDIAN_MARKER;
+ BAIL_ON_FAIL(out.write(&endMarker, 0, 1), ret);
+
+ uint16_t tiffMarker = TIFF_FILE_MARKER;
+ BAIL_ON_FAIL(out.write(&tiffMarker, 0, 1), ret);
+
+ uint32_t offsetMarker = FILE_HEADER_SIZE;
+ BAIL_ON_FAIL(out.write(&offsetMarker, 0, 1), ret);
+ return ret;
+}
+
+uint32_t TiffWriter::getTotalSize() const {
+ uint32_t totalSize = FILE_HEADER_SIZE;
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ totalSize += ifd->getSize();
+ ifd = ifd->getNextIfd();
+ }
+ return totalSize;
+}
+
+void TiffWriter::log() const {
+ ALOGI("%s: TiffWriter:", __FUNCTION__);
+ size_t length = mNamedIfds.size();
+ for (size_t i = 0; i < length; ++i) {
+ mNamedIfds[i]->log();
+ }
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/libcpustats/Android.mk b/media/libcpustats/Android.mk
index b506353..ee283a6 100644
--- a/media/libcpustats/Android.mk
+++ b/media/libcpustats/Android.mk
@@ -1,4 +1,4 @@
-LOCAL_PATH:= $(call my-dir)
+LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
@@ -8,4 +8,6 @@ LOCAL_SRC_FILES := \
LOCAL_MODULE := libcpustats
+LOCAL_CFLAGS := -std=gnu++11 -Werror
+
include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libcpustats/ThreadCpuUsage.cpp b/media/libcpustats/ThreadCpuUsage.cpp
index 637402a..b43b36c 100644
--- a/media/libcpustats/ThreadCpuUsage.cpp
+++ b/media/libcpustats/ThreadCpuUsage.cpp
@@ -19,9 +19,9 @@
#include <errno.h>
#include <stdlib.h>
+#include <string.h>
#include <time.h>
-#include <utils/Debug.h>
#include <utils/Log.h>
#include <cpustats/ThreadCpuUsage.h>
@@ -75,7 +75,6 @@ bool ThreadCpuUsage::setEnabled(bool isEnabled)
bool ThreadCpuUsage::sampleAndEnable(double& ns)
{
- bool ret;
bool wasEverEnabled = mWasEverEnabled;
if (enable()) {
// already enabled, so add a new sample relative to previous
@@ -218,7 +217,7 @@ uint32_t ThreadCpuUsage::getCpukHz(int cpuNum)
#define FREQ_SIZE 64
char freq_path[FREQ_SIZE];
#define FREQ_DIGIT 27
- COMPILE_TIME_ASSERT_FUNCTION_SCOPE(MAX_CPU <= 10);
+ static_assert(MAX_CPU <= 10, "MAX_CPU too large");
#define FREQ_PATH "/sys/devices/system/cpu/cpu?/cpufreq/scaling_cur_freq"
strlcpy(freq_path, FREQ_PATH, sizeof(freq_path));
freq_path[FREQ_DIGIT] = cpuNum + '0';
diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk
index 2bb6dbe..e0ca8af 100644
--- a/media/libeffects/downmix/Android.mk
+++ b/media/libeffects/downmix/Android.mk
@@ -15,16 +15,10 @@ LOCAL_MODULE_TAGS := optional
LOCAL_MODULE_RELATIVE_PATH := soundfx
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
-LOCAL_PRELINK_MODULE := false
-
LOCAL_CFLAGS += -fvisibility=hidden
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index a39d837..6686f27 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -16,7 +16,8 @@
#define LOG_TAG "EffectDownmix"
//#define LOG_NDEBUG 0
-#include <cutils/log.h>
+#include <log/log.h>
+#include <inttypes.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
@@ -29,25 +30,13 @@
#define MINUS_3_DB_IN_Q19_12 2896 // -3dB = 0.707 * 2^12 = 2896
+// subset of possible audio_channel_mask_t values, and AUDIO_CHANNEL_OUT_* renamed to CHANNEL_MASK_*
typedef enum {
- CHANNEL_MASK_SURROUND = AUDIO_CHANNEL_OUT_SURROUND,
- CHANNEL_MASK_QUAD_BACK = AUDIO_CHANNEL_OUT_QUAD,
- // like AUDIO_CHANNEL_OUT_QUAD with *_SIDE_* instead of *_BACK_*, same channel order
- CHANNEL_MASK_QUAD_SIDE =
- AUDIO_CHANNEL_OUT_FRONT_LEFT |
- AUDIO_CHANNEL_OUT_FRONT_RIGHT |
- AUDIO_CHANNEL_OUT_SIDE_LEFT |
- AUDIO_CHANNEL_OUT_SIDE_RIGHT,
- CHANNEL_MASK_5POINT1_BACK = AUDIO_CHANNEL_OUT_5POINT1,
- // like AUDIO_CHANNEL_OUT_5POINT1 with *_SIDE_* instead of *_BACK_*, same channel order
- CHANNEL_MASK_5POINT1_SIDE =
- AUDIO_CHANNEL_OUT_FRONT_LEFT |
- AUDIO_CHANNEL_OUT_FRONT_RIGHT |
- AUDIO_CHANNEL_OUT_FRONT_CENTER |
- AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
- AUDIO_CHANNEL_OUT_SIDE_LEFT |
- AUDIO_CHANNEL_OUT_SIDE_RIGHT,
- CHANNEL_MASK_7POINT1_SIDE_BACK = AUDIO_CHANNEL_OUT_7POINT1,
+ CHANNEL_MASK_QUAD_BACK = AUDIO_CHANNEL_OUT_QUAD_BACK,
+ CHANNEL_MASK_QUAD_SIDE = AUDIO_CHANNEL_OUT_QUAD_SIDE,
+ CHANNEL_MASK_5POINT1_BACK = AUDIO_CHANNEL_OUT_5POINT1_BACK,
+ CHANNEL_MASK_5POINT1_SIDE = AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+ CHANNEL_MASK_7POINT1 = AUDIO_CHANNEL_OUT_7POINT1,
} downmix_input_channel_mask_t;
// effect_handle_t interface implementation for downmix effect
@@ -99,7 +88,7 @@ const int kNbEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *
// strictly for testing, logs the indices of the channels for a given mask,
// uses the same code as Downmix_foldGeneric()
void Downmix_testIndexComputation(uint32_t mask) {
- ALOGI("Testing index computation for 0x%x:", mask);
+ ALOGI("Testing index computation for 0x%" PRIx32 ":", mask);
// check against unsupported channels
if (mask & kUnsupported) {
ALOGE("Unsupported channels (top or front left/right of center)");
@@ -129,7 +118,7 @@ void Downmix_testIndexComputation(uint32_t mask) {
hasBacks = true;
}
- const int numChan = popcount(mask);
+ const int numChan = audio_channel_count_from_out_mask(mask);
const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
const bool hasLFE =
((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
@@ -220,7 +209,7 @@ int32_t DownmixLib_Create(const effect_uuid_t *uuid,
*pHandle = (effect_handle_t) module;
- ALOGV("DownmixLib_Create() %p , size %d", module, sizeof(downmix_module_t));
+ ALOGV("DownmixLib_Create() %p , size %zu", module, sizeof(downmix_module_t));
return 0;
}
@@ -254,7 +243,7 @@ int32_t DownmixLib_GetDescriptor(const effect_uuid_t *uuid, effect_descriptor_t
ALOGV("DownmixLib_GetDescriptor() i=%d", i);
if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) {
memcpy(pDescriptor, gDescriptors[i], sizeof(effect_descriptor_t));
- ALOGV("EffectGetDescriptor - UUID matched downmix type %d, UUID = %x",
+ ALOGV("EffectGetDescriptor - UUID matched downmix type %d, UUID = %" PRIx32,
i, gDescriptors[i]->uuid.timeLow);
return 0;
}
@@ -328,7 +317,7 @@ static int Downmix_Process(effect_handle_t self,
// bypass the optimized downmix routines for the common formats
if (!Downmix_foldGeneric(
downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
- ALOGE("Multichannel configuration 0x%x is not supported", downmixInputChannelMask);
+ ALOGE("Multichannel configuration 0x%" PRIx32 " is not supported", downmixInputChannelMask);
return -EINVAL;
}
break;
@@ -339,20 +328,17 @@ static int Downmix_Process(effect_handle_t self,
case CHANNEL_MASK_QUAD_SIDE:
Downmix_foldFromQuad(pSrc, pDst, numFrames, accumulate);
break;
- case CHANNEL_MASK_SURROUND:
- Downmix_foldFromSurround(pSrc, pDst, numFrames, accumulate);
- break;
case CHANNEL_MASK_5POINT1_BACK:
case CHANNEL_MASK_5POINT1_SIDE:
Downmix_foldFrom5Point1(pSrc, pDst, numFrames, accumulate);
break;
- case CHANNEL_MASK_7POINT1_SIDE_BACK:
+ case CHANNEL_MASK_7POINT1:
Downmix_foldFrom7Point1(pSrc, pDst, numFrames, accumulate);
break;
default:
if (!Downmix_foldGeneric(
downmixInputChannelMask, pSrc, pDst, numFrames, accumulate)) {
- ALOGE("Multichannel configuration 0x%x is not supported", downmixInputChannelMask);
+ ALOGE("Multichannel configuration 0x%" PRIx32 " is not supported", downmixInputChannelMask);
return -EINVAL;
}
break;
@@ -380,7 +366,7 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
pDownmixer = (downmix_object_t*) &pDwmModule->context;
- ALOGV("Downmix_Command command %d cmdSize %d",cmdCode, cmdSize);
+ ALOGV("Downmix_Command command %" PRIu32 " cmdSize %" PRIu32, cmdCode, cmdSize);
switch (cmdCode) {
case EFFECT_CMD_INIT:
@@ -404,7 +390,7 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
break;
case EFFECT_CMD_GET_PARAM:
- ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM pCmdData %p, *replySize %d, pReplyData: %p",
+ ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM pCmdData %p, *replySize %" PRIu32 ", pReplyData: %p",
pCmdData, *replySize, pReplyData);
if (pCmdData == NULL || cmdSize < (int)(sizeof(effect_param_t) + sizeof(int32_t)) ||
pReplyData == NULL ||
@@ -413,7 +399,7 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
}
effect_param_t *rep = (effect_param_t *) pReplyData;
memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + sizeof(int32_t));
- ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM param %d, replySize %d",
+ ALOGV("Downmix_Command EFFECT_CMD_GET_PARAM param %" PRId32 ", replySize %" PRIu32,
*(int32_t *)rep->data, rep->vsize);
rep->status = Downmix_getParameter(pDownmixer, *(int32_t *)rep->data, &rep->vsize,
rep->data + sizeof(int32_t));
@@ -421,8 +407,8 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
break;
case EFFECT_CMD_SET_PARAM:
- ALOGV("Downmix_Command EFFECT_CMD_SET_PARAM cmdSize %d pCmdData %p, *replySize %d, " \
- "pReplyData %p", cmdSize, pCmdData, *replySize, pReplyData);
+ ALOGV("Downmix_Command EFFECT_CMD_SET_PARAM cmdSize %d pCmdData %p, *replySize %" PRIu32
+ ", pReplyData %p", cmdSize, pCmdData, *replySize, pReplyData);
if (pCmdData == NULL || (cmdSize < (int)(sizeof(effect_param_t) + sizeof(int32_t)))
|| pReplyData == NULL || *replySize != (int)sizeof(int32_t)) {
return -EINVAL;
@@ -471,7 +457,7 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
return -EINVAL;
}
// FIXME change type if playing on headset vs speaker
- ALOGV("Downmix_Command EFFECT_CMD_SET_DEVICE: 0x%08x", *(uint32_t *)pCmdData);
+ ALOGV("Downmix_Command EFFECT_CMD_SET_DEVICE: 0x%08" PRIx32, *(uint32_t *)pCmdData);
break;
case EFFECT_CMD_SET_VOLUME: {
@@ -491,7 +477,7 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
if (pCmdData == NULL || cmdSize != (int)sizeof(uint32_t)) {
return -EINVAL;
}
- ALOGV("Downmix_Command EFFECT_CMD_SET_AUDIO_MODE: %d", *(uint32_t *)pCmdData);
+ ALOGV("Downmix_Command EFFECT_CMD_SET_AUDIO_MODE: %" PRIu32, *(uint32_t *)pCmdData);
break;
case EFFECT_CMD_SET_CONFIG_REVERSE:
@@ -500,7 +486,7 @@ static int Downmix_Command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdS
break;
default:
- ALOGW("Downmix_Command invalid command %d",cmdCode);
+ ALOGW("Downmix_Command invalid command %" PRIu32, cmdCode);
return -EINVAL;
}
@@ -643,7 +629,8 @@ int Downmix_Configure(downmix_module_t *pDwmModule, effect_config_t *pConfig, bo
ALOGE("Downmix_Configure error: input channel mask can't be 0");
return -EINVAL;
}
- pDownmixer->input_channel_count = popcount(pConfig->inputCfg.channels);
+ pDownmixer->input_channel_count =
+ audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
}
Downmix_Reset(pDownmixer, init);
@@ -702,28 +689,28 @@ int Downmix_Reset(downmix_object_t *pDownmixer, bool init) {
int Downmix_setParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t size, void *pValue) {
int16_t value16;
- ALOGV("Downmix_setParameter, context %p, param %d, value16 %d, value32 %d",
+ ALOGV("Downmix_setParameter, context %p, param %" PRId32 ", value16 %" PRId16 ", value32 %" PRId32,
pDownmixer, param, *(int16_t *)pValue, *(int32_t *)pValue);
switch (param) {
case DOWNMIX_PARAM_TYPE:
if (size != sizeof(downmix_type_t)) {
- ALOGE("Downmix_setParameter(DOWNMIX_PARAM_TYPE) invalid size %u, should be %zu",
+ ALOGE("Downmix_setParameter(DOWNMIX_PARAM_TYPE) invalid size %" PRIu32 ", should be %zu",
size, sizeof(downmix_type_t));
return -EINVAL;
}
value16 = *(int16_t *)pValue;
- ALOGV("set DOWNMIX_PARAM_TYPE, type %d", value16);
+ ALOGV("set DOWNMIX_PARAM_TYPE, type %" PRId16, value16);
if (!((value16 > DOWNMIX_TYPE_INVALID) && (value16 <= DOWNMIX_TYPE_LAST))) {
- ALOGE("Downmix_setParameter invalid DOWNMIX_PARAM_TYPE value %d", value16);
+ ALOGE("Downmix_setParameter invalid DOWNMIX_PARAM_TYPE value %" PRId16, value16);
return -EINVAL;
} else {
pDownmixer->type = (downmix_type_t) value16;
break;
default:
- ALOGE("Downmix_setParameter unknown parameter %d", param);
+ ALOGE("Downmix_setParameter unknown parameter %" PRId32, param);
return -EINVAL;
}
}
@@ -762,17 +749,17 @@ int Downmix_getParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t *
case DOWNMIX_PARAM_TYPE:
if (*pSize < sizeof(int16_t)) {
- ALOGE("Downmix_getParameter invalid parameter size %zu for DOWNMIX_PARAM_TYPE", *pSize);
+ ALOGE("Downmix_getParameter invalid parameter size %" PRIu32 " for DOWNMIX_PARAM_TYPE", *pSize);
return -EINVAL;
}
pValue16 = (int16_t *)pValue;
*pValue16 = (int16_t) pDownmixer->type;
*pSize = sizeof(int16_t);
- ALOGV("Downmix_getParameter DOWNMIX_PARAM_TYPE is %d", *pValue16);
+ ALOGV("Downmix_getParameter DOWNMIX_PARAM_TYPE is %" PRId16, *pValue16);
break;
default:
- ALOGE("Downmix_getParameter unknown parameter %d", param);
+ ALOGE("Downmix_getParameter unknown parameter %" PRId16, param);
return -EINVAL;
}
@@ -827,65 +814,6 @@ void Downmix_foldFromQuad(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool ac
/*----------------------------------------------------------------------------
- * Downmix_foldFromSurround()
- *----------------------------------------------------------------------------
- * Purpose:
- * downmix a "surround sound" (mono rear) signal to stereo
- *
- * Inputs:
- * pSrc surround signal to downmix
- * numFrames the number of surround frames to downmix
- * accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
- * or overwrite pDst (when false)
- *
- * Outputs:
- * pDst downmixed stereo audio samples
- *
- *----------------------------------------------------------------------------
- */
-void Downmix_foldFromSurround(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate) {
- int32_t lt, rt, centerPlusRearContrib; // samples in Q19.12 format
- // sample at index 0 is FL
- // sample at index 1 is FR
- // sample at index 2 is FC
- // sample at index 3 is RC
- // code is mostly duplicated between the two values of accumulate to avoid repeating the test
- // for every sample
- if (accumulate) {
- while (numFrames) {
- // centerPlusRearContrib = FC(-3dB) + RC(-3dB)
- centerPlusRearContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12) + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
- // FL + centerPlusRearContrib
- lt = (pSrc[0] << 12) + centerPlusRearContrib;
- // FR + centerPlusRearContrib
- rt = (pSrc[1] << 12) + centerPlusRearContrib;
- // accumulate in destination
- pDst[0] = clamp16(pDst[0] + (lt >> 13));
- pDst[1] = clamp16(pDst[1] + (rt >> 13));
- pSrc += 4;
- pDst += 2;
- numFrames--;
- }
- } else { // same code as above but without adding and clamping pDst[i] to itself
- while (numFrames) {
- // centerPlusRearContrib = FC(-3dB) + RC(-3dB)
- centerPlusRearContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12) + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
- // FL + centerPlusRearContrib
- lt = (pSrc[0] << 12) + centerPlusRearContrib;
- // FR + centerPlusRearContrib
- rt = (pSrc[1] << 12) + centerPlusRearContrib;
- // store in destination
- pDst[0] = clamp16(lt >> 13); // differs from when accumulate is true above
- pDst[1] = clamp16(rt >> 13); // differs from when accumulate is true above
- pSrc += 4;
- pDst += 2;
- numFrames--;
- }
- }
-}
-
-
-/*----------------------------------------------------------------------------
* Downmix_foldFrom5Point1()
*----------------------------------------------------------------------------
* Purpose:
@@ -1070,7 +998,7 @@ bool Downmix_foldGeneric(
hasBacks = true;
}
- const int numChan = popcount(mask);
+ const int numChan = audio_channel_count_from_out_mask(mask);
const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
const bool hasLFE =
((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
diff --git a/media/libeffects/downmix/EffectDownmix.h b/media/libeffects/downmix/EffectDownmix.h
index fcb3c9e..2399abd 100644
--- a/media/libeffects/downmix/EffectDownmix.h
+++ b/media/libeffects/downmix/EffectDownmix.h
@@ -97,7 +97,6 @@ int Downmix_setParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t s
int Downmix_getParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t *pSize, void *pValue);
void Downmix_foldFromQuad(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
-void Downmix_foldFromSurround(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
void Downmix_foldFrom5Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
void Downmix_foldFrom7Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
bool Downmix_foldGeneric(
diff --git a/media/libeffects/loudness/Android.mk b/media/libeffects/loudness/Android.mk
index edf964e..55d0611 100644
--- a/media/libeffects/loudness/Android.mk
+++ b/media/libeffects/loudness/Android.mk
@@ -12,16 +12,11 @@ LOCAL_CFLAGS+= -O2 -fvisibility=hidden
LOCAL_SHARED_LIBRARIES := \
libcutils \
liblog \
- libstlport
LOCAL_MODULE_RELATIVE_PATH := soundfx
LOCAL_MODULE:= libldnhncr
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-effects) \
- bionic \
- bionic/libstdc++/include \
- external/stlport/stlport
-
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index db5c78f..6aeb919 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -19,11 +19,13 @@
#define ARRAY_SIZE(array) (sizeof array / sizeof array[0])
//#define LOG_NDEBUG 0
-#include <cutils/log.h>
#include <assert.h>
+#include <inttypes.h>
+#include <new>
#include <stdlib.h>
#include <string.h>
-#include <new>
+
+#include <cutils/log.h>
#include "EffectBundle.h"
@@ -161,7 +163,7 @@ int Effect_setEnabled(EffectContext *pContext, bool enabled);
extern "C" int EffectCreate(const effect_uuid_t *uuid,
int32_t sessionId,
- int32_t ioId,
+ int32_t ioId __unused,
effect_handle_t *pHandle){
int ret = 0;
int sessionNo;
@@ -221,6 +223,8 @@ extern "C" int EffectCreate(const effect_uuid_t *uuid,
pContext->pBundledContext->bBassTempDisabled = LVM_FALSE;
pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE;
pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
+ pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE;
+ pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
pContext->pBundledContext->NumberEffectsEnabled = 0;
pContext->pBundledContext->NumberEffectsCalled = 0;
pContext->pBundledContext->firstVolume = LVM_TRUE;
@@ -560,11 +564,12 @@ int LvmBundle_init(EffectContext *pContext){
MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %ld bytes "
- "for region %u\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u\n", MemTab.Region[i].Size, i );
bMallocFailure = LVM_TRUE;
}else{
- ALOGV("\tLvmBundle_init CreateInstance allocated %ld bytes for region %u at %p\n",
+ ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
+ " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
@@ -576,11 +581,11 @@ int LvmBundle_init(EffectContext *pContext){
if(bMallocFailure == LVM_TRUE){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %ld bytes "
- "for region %u Not freeing\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u Not freeing\n", MemTab.Region[i].Size, i );
}else{
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %ld bytes "
- "for region %u at %p- free\n",
+ ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %" PRIu32
+ " bytes for region %u at %p- free\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
}
@@ -761,6 +766,122 @@ int LvmBundle_process(LVM_INT16 *pIn,
return 0;
} /* end LvmBundle_process */
+
+//----------------------------------------------------------------------------
+// EqualizerUpdateActiveParams()
+//----------------------------------------------------------------------------
+// Purpose: Update ActiveParams for Equalizer
+//
+// Inputs:
+// pContext: effect engine context
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+void EqualizerUpdateActiveParams(EffectContext *pContext) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+
+ /* Get the current settings */
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerUpdateActiveParams")
+ //ALOGV("\tEqualizerUpdateActiveParams Succesfully returned from LVM_GetControlParameters\n");
+ //ALOGV("\tEqualizerUpdateActiveParams just Got -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
+ ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
+ ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
+ }
+
+ /* Activate the initial settings */
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerUpdateActiveParams")
+ //ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+}
+
+//----------------------------------------------------------------------------
+// LvmEffect_limitLevel()
+//----------------------------------------------------------------------------
+// Purpose: limit the overall level to a value less than 0 dB preserving
+// the overall EQ band gain and BassBoost relative levels.
+//
+// Inputs:
+// pContext: effect engine context
+//
+// Outputs:
+//
+//----------------------------------------------------------------------------
+void LvmEffect_limitLevel(EffectContext *pContext) {
+ LVM_ControlParams_t ActiveParams; /* Current control Parameters */
+ LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
+
+ /* Get the current settings */
+ LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "LvmEffect_limitLevel")
+ //ALOGV("\tLvmEffect_limitLevel Succesfully returned from LVM_GetControlParameters\n");
+ //ALOGV("\tLvmEffect_limitLevel just Got -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+ int gainCorrection = 0;
+ //Count the energy contribution per band for EQ and BassBoost only if they are active.
+ float energyContribution = 0;
+
+ //EQ contribution
+ if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {
+ for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
+ float bandEnergy = (pContext->pBundledContext->bandGaindB[i] *
+ LimitLevel_bandEnergyContribution[i])/15.0;
+ if (bandEnergy > 0)
+ energyContribution += bandEnergy;
+ }
+ }
+
+ //BassBoost contribution
+ if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) {
+ float bandEnergy = (pContext->pBundledContext->BassStrengthSaved *
+ LimitLevel_bassBoostEnergyContribution)/1000.0;
+ if (bandEnergy > 0)
+ energyContribution += bandEnergy;
+ }
+
+ //Virtualizer contribution
+ if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
+ energyContribution += LimitLevel_virtualizerContribution;
+ }
+
+ //roundoff
+ int maxLevelRound = (int)(energyContribution + 0.99);
+ if (maxLevelRound + pContext->pBundledContext->volume > 0) {
+ gainCorrection = maxLevelRound + pContext->pBundledContext->volume;
+ }
+
+ ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
+ if (ActiveParams.VC_EffectLevel < -96) {
+ ActiveParams.VC_EffectLevel = -96;
+ }
+ ALOGV("\tVol:%d, GainCorrection: %d, Actual vol: %d", pContext->pBundledContext->volume,
+ gainCorrection, ActiveParams.VC_EffectLevel);
+
+ /* Activate the initial settings */
+ LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "LvmEffect_limitLevel")
+ //ALOGV("\tLvmEffect_limitLevel just Set -> %d\n",
+ // ActiveParams.pEQNB_BandDefinition[band].Gain);
+
+ //ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
+ if (pContext->pBundledContext->firstVolume == LVM_TRUE){
+ LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
+ LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
+ ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
+ pContext->pBundledContext->firstVolume = LVM_FALSE;
+ }
+}
+
//----------------------------------------------------------------------------
// LvmEffect_enable()
//----------------------------------------------------------------------------
@@ -809,6 +930,7 @@ int LvmEffect_enable(EffectContext *pContext){
//ALOGV("\tLvmEffect_enable Succesfully called LVM_SetControlParameters\n");
//ALOGV("\tLvmEffect_enable end");
+ LvmEffect_limitLevel(pContext);
return 0;
}
@@ -859,6 +981,7 @@ int LvmEffect_disable(EffectContext *pContext){
//ALOGV("\tLvmEffect_disable Succesfully called LVM_SetControlParameters\n");
//ALOGV("\tLvmEffect_disable end");
+ LvmEffect_limitLevel(pContext);
return 0;
}
@@ -889,16 +1012,16 @@ void LvmEffect_free(EffectContext *pContext){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].Size != 0){
if (MemTab.Region[i].pBaseAddress != NULL){
- ALOGV("\tLvmEffect_free - START freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tLvmEffect_free - START freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
- ALOGV("\tLvmEffect_free - END freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tLvmEffect_free - END freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}else{
- ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %ld bytes "
- "for region %u at %p ERROR\n",
+ ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %" PRIu32
+ " bytes for region %u at %p ERROR\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
@@ -1094,6 +1217,8 @@ void BassSetStrength(EffectContext *pContext, uint32_t strength){
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "BassSetStrength")
//ALOGV("\tBassSetStrength Succesfully called LVM_SetControlParameters\n");
+
+ LvmEffect_limitLevel(pContext);
} /* end BassSetStrength */
//----------------------------------------------------------------------------
@@ -1154,113 +1279,189 @@ void VirtualizerSetStrength(EffectContext *pContext, uint32_t strength){
/* Virtualizer parameters */
ActiveParams.CS_EffectLevel = (int)((strength*32767)/1000);
- //ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength );
- //ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", ActiveParams.CS_EffectLevel );
+ ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength );
+ ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", ActiveParams.CS_EffectLevel );
/* Activate the initial settings */
LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "VirtualizerSetStrength")
//ALOGV("\tVirtualizerSetStrength Succesfully called LVM_SetControlParameters\n\n");
+ LvmEffect_limitLevel(pContext);
} /* end setStrength */
+//----------------------------------------------------------------------------
+// VirtualizerIsDeviceSupported()
+//----------------------------------------------------------------------------
+// Purpose:
+// Check if an audio device type is supported by this implementation
+//
+// Inputs:
+// deviceType the type of device that affects the processing (e.g. for binaural vs transaural)
+// Output:
+// -EINVAL if the configuration is not supported or it is unknown
+// 0 if the configuration is supported
+//----------------------------------------------------------------------------
+int VirtualizerIsDeviceSupported(audio_devices_t deviceType) {
+ switch (deviceType) {
+ case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+ case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+ case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+ return 0;
+ default :
+ return -EINVAL;
+ }
+}
//----------------------------------------------------------------------------
-// EqualizerLimitBandLevels()
+// VirtualizerIsConfigurationSupported()
//----------------------------------------------------------------------------
-// Purpose: limit all EQ band gains to a value less than 0 dB while
-// preserving the relative band levels.
+// Purpose:
+// Check if a channel mask + audio device type is supported by this implementation
//
// Inputs:
-// pContext: effect engine context
+// channelMask the channel mask of the input to virtualize
+// deviceType the type of device that affects the processing (e.g. for binaural vs transaural)
+// Output:
+// -EINVAL if the configuration is not supported or it is unknown
+// 0 if the configuration is supported
+//----------------------------------------------------------------------------
+int VirtualizerIsConfigurationSupported(audio_channel_mask_t channelMask,
+ audio_devices_t deviceType) {
+ uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+ if ((channelCount == 0) || (channelCount > 2)) {
+ return -EINVAL;
+ }
+
+ return VirtualizerIsDeviceSupported(deviceType);
+}
+
+//----------------------------------------------------------------------------
+// VirtualizerForceVirtualizationMode()
+//----------------------------------------------------------------------------
+// Purpose:
+// Force the virtualization mode to that of the given audio device
//
-// Outputs:
+// Inputs:
+// pContext effect engine context
+// forcedDevice the type of device whose virtualization mode we'll always use
+// Output:
+// -EINVAL if the device is not supported or is unknown
+// 0 if the device is supported and the virtualization mode forced
//
//----------------------------------------------------------------------------
-void EqualizerLimitBandLevels(EffectContext *pContext) {
- LVM_ControlParams_t ActiveParams; /* Current control Parameters */
- LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */
-
- /* Get the current settings */
- LvmStatus = LVM_GetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
- LVM_ERROR_CHECK(LvmStatus, "LVM_GetControlParameters", "EqualizerLimitBandLevels")
- //ALOGV("\tEqualizerLimitBandLevels Succesfully returned from LVM_GetControlParameters\n");
- //ALOGV("\tEqualizerLimitBandLevels just Got -> %d\n",
- // ActiveParams.pEQNB_BandDefinition[band].Gain);
-
- // Apply a volume correction to avoid clipping in the EQ based on 2 factors:
- // - the maximum EQ band gain: the volume correction is such that the total of volume + max
- // band gain is <= 0 dB
- // - the average gain in all bands weighted by their proximity to max gain band.
- int maxGain = 0;
- int avgGain = 0;
- int avgCount = 0;
- for (int i = 0; i < FIVEBAND_NUMBANDS; i++) {
- if (pContext->pBundledContext->bandGaindB[i] >= maxGain) {
- int tmpMaxGain = pContext->pBundledContext->bandGaindB[i];
- int tmpAvgGain = 0;
- int tmpAvgCount = 0;
- for (int j = 0; j < FIVEBAND_NUMBANDS; j++) {
- int gain = pContext->pBundledContext->bandGaindB[j];
- // skip current band and gains < 0 dB
- if (j == i || gain < 0)
- continue;
- // no need to continue if one band not processed yet has a higher gain than current
- // max
- if (gain > tmpMaxGain) {
- // force skipping "if (tmpAvgGain >= avgGain)" below as tmpAvgGain is not
- // meaningful in this case
- tmpAvgGain = -1;
- break;
- }
+int VirtualizerForceVirtualizationMode(EffectContext *pContext, audio_devices_t forcedDevice) {
+ ALOGV("VirtualizerForceVirtualizationMode: forcedDev=0x%x enabled=%d tmpDisabled=%d",
+ forcedDevice, pContext->pBundledContext->bVirtualizerEnabled,
+ pContext->pBundledContext->bVirtualizerTempDisabled);
+ int status = 0;
+ bool useVirtualizer = false;
- int weight = 1;
- if (j < (i + 2) && j > (i - 2))
- weight = 4;
- tmpAvgGain += weight * gain;
- tmpAvgCount += weight;
- }
- if (tmpAvgGain >= avgGain) {
- maxGain = tmpMaxGain;
- avgGain = tmpAvgGain;
- avgCount = tmpAvgCount;
- }
+ if (VirtualizerIsDeviceSupported(forcedDevice) != 0) {
+ if (forcedDevice != AUDIO_DEVICE_NONE) {
+ //forced device is not supported, make it behave as a reset of forced mode
+ forcedDevice = AUDIO_DEVICE_NONE;
+ // but return an error
+ status = -EINVAL;
}
- ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i];
- ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i];
- ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i];
}
- int gainCorrection = 0;
- if (maxGain + pContext->pBundledContext->volume > 0) {
- gainCorrection = maxGain + pContext->pBundledContext->volume;
- }
- if (avgCount) {
- gainCorrection += avgGain/avgCount;
+ if (forcedDevice == AUDIO_DEVICE_NONE) {
+ // disabling forced virtualization mode:
+ // verify whether the virtualization should be enabled or disabled
+ if (VirtualizerIsDeviceSupported(pContext->pBundledContext->nOutputDevice) == 0) {
+ useVirtualizer = (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE);
+ }
+ pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE;
+ } else {
+ // forcing virtualization mode: here we already know the device is supported
+ pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+ // only enable for a supported mode, when the effect is enabled
+ useVirtualizer = (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE);
}
- ALOGV("EqualizerLimitBandLevels() gainCorrection %d maxGain %d avgGain %d avgCount %d",
- gainCorrection, maxGain, avgGain, avgCount);
-
- ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection;
- if (ActiveParams.VC_EffectLevel < -96) {
- ActiveParams.VC_EffectLevel = -96;
+ if (useVirtualizer) {
+ if (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_TRUE) {
+ ALOGV("\tVirtualizerForceVirtualizationMode re-enable LVM_VIRTUALIZER");
+ android::LvmEffect_enable(pContext);
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
+ } else {
+ ALOGV("\tVirtualizerForceVirtualizationMode leaving LVM_VIRTUALIZER enabled");
+ }
+ } else {
+ if (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE) {
+ ALOGV("\tVirtualizerForceVirtualizationMode disable LVM_VIRTUALIZER");
+ android::LvmEffect_disable(pContext);
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
+ } else {
+ ALOGV("\tVirtualizerForceVirtualizationMode leaving LVM_VIRTUALIZER disabled");
+ }
}
- /* Activate the initial settings */
- LvmStatus = LVM_SetControlParameters(pContext->pBundledContext->hInstance, &ActiveParams);
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetControlParameters", "EqualizerLimitBandLevels")
- //ALOGV("\tEqualizerLimitBandLevels just Set -> %d\n",
- // ActiveParams.pEQNB_BandDefinition[band].Gain);
+ ALOGV("\tafter VirtualizerForceVirtualizationMode: enabled=%d tmpDisabled=%d",
+ pContext->pBundledContext->bVirtualizerEnabled,
+ pContext->pBundledContext->bVirtualizerTempDisabled);
- //ALOGV("\tEqualizerLimitBandLevels just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel );
- if(pContext->pBundledContext->firstVolume == LVM_TRUE){
- LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams);
- LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process")
- ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks");
- pContext->pBundledContext->firstVolume = LVM_FALSE;
- }
+ return status;
+}
+//----------------------------------------------------------------------------
+// VirtualizerGetSpeakerAngles()
+//----------------------------------------------------------------------------
+// Purpose:
+// Get the virtual speaker angles for a channel mask + audio device type
+// configuration which is guaranteed to be supported by this implementation
+//
+// Inputs:
+// channelMask: the channel mask of the input to virtualize
+// deviceType the type of device that affects the processing (e.g. for binaural vs transaural)
+// Input/Output:
+// pSpeakerAngles the array of integer where each speaker angle is written as a triplet in the
+// following format:
+// int32_t a bit mask with a single value selected for each speaker, following
+// the convention of the audio_channel_mask_t type
+// int32_t a value in degrees expressing the speaker azimuth, where 0 is in front
+// of the user, 180 behind, -90 to the left, 90 to the right of the user
+// int32_t a value in degrees expressing the speaker elevation, where 0 is the
+// horizontal plane, +90 is directly above the user, -90 below
+//
+//----------------------------------------------------------------------------
+void VirtualizerGetSpeakerAngles(audio_channel_mask_t channelMask __unused,
+ audio_devices_t deviceType __unused, int32_t *pSpeakerAngles) {
+ // the channel count is guaranteed to be 1 or 2
+ // the device is guaranteed to be of type headphone
+ // this virtualizer is always 2in with speakers at -90 and 90deg of azimuth, 0deg of elevation
+ *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_LEFT;
+ *pSpeakerAngles++ = -90; // azimuth
+ *pSpeakerAngles++ = 0; // elevation
+ *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_RIGHT;
+ *pSpeakerAngles++ = 90; // azimuth
+ *pSpeakerAngles = 0; // elevation
}
+//----------------------------------------------------------------------------
+// VirtualizerGetVirtualizationMode()
+//----------------------------------------------------------------------------
+// Purpose:
+// Retrieve the current device whose processing mode is used by this effect
+//
+// Output:
+// AUDIO_DEVICE_NONE if the effect is not virtualizing
+// or the device type if the effect is virtualizing
+//----------------------------------------------------------------------------
+audio_devices_t VirtualizerGetVirtualizationMode(EffectContext *pContext) {
+ audio_devices_t virtDevice = AUDIO_DEVICE_NONE;
+ if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE)
+ && (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) {
+ if (pContext->pBundledContext->nVirtualizerForcedDevice != AUDIO_DEVICE_NONE) {
+ // virtualization mode is forced, return that device
+ virtDevice = pContext->pBundledContext->nVirtualizerForcedDevice;
+ } else {
+ // no forced mode, return the current device
+ virtDevice = pContext->pBundledContext->nOutputDevice;
+ }
+ }
+ ALOGV("VirtualizerGetVirtualizationMode() returning 0x%x", virtDevice);
+ return virtDevice;
+}
//----------------------------------------------------------------------------
// EqualizerGetBandLevel()
@@ -1304,7 +1505,8 @@ void EqualizerSetBandLevel(EffectContext *pContext, int band, short Gain){
pContext->pBundledContext->bandGaindB[band] = gainRounded;
pContext->pBundledContext->CurPreset = PRESET_CUSTOM;
- EqualizerLimitBandLevels(pContext);
+ EqualizerUpdateActiveParams(pContext);
+ LvmEffect_limitLevel(pContext);
}
//----------------------------------------------------------------------------
@@ -1357,7 +1559,7 @@ int32_t EqualizerGetCentreFrequency(EffectContext *pContext, int32_t band){
// pLow: lower band range
// pLow: upper band range
//----------------------------------------------------------------------------
-int32_t EqualizerGetBandFreqRange(EffectContext *pContext, int32_t band, uint32_t *pLow,
+int32_t EqualizerGetBandFreqRange(EffectContext *pContext __unused, int32_t band, uint32_t *pLow,
uint32_t *pHi){
*pLow = bandFreqRange[band][0];
*pHi = bandFreqRange[band][1];
@@ -1381,7 +1583,7 @@ int32_t EqualizerGetBandFreqRange(EffectContext *pContext, int32_t band, uint32_
// pLow: lower band range
// pLow: upper band range
//----------------------------------------------------------------------------
-int32_t EqualizerGetBand(EffectContext *pContext, uint32_t targetFreq){
+int32_t EqualizerGetBand(EffectContext *pContext __unused, uint32_t targetFreq){
int band = 0;
if(targetFreq < bandFreqRange[0][0]){
@@ -1439,7 +1641,8 @@ void EqualizerSetPreset(EffectContext *pContext, int preset){
EQNB_5BandSoftPresets[i + preset * FIVEBAND_NUMBANDS];
}
- EqualizerLimitBandLevels(pContext);
+ EqualizerUpdateActiveParams(pContext);
+ LvmEffect_limitLevel(pContext);
//ALOGV("\tEqualizerSetPreset Succesfully called LVM_SetControlParameters\n");
return;
@@ -1494,7 +1697,7 @@ int VolumeSetVolumeLevel(EffectContext *pContext, int16_t level){
pContext->pBundledContext->volume = level / 100;
}
- EqualizerLimitBandLevels(pContext);
+ LvmEffect_limitLevel(pContext);
return 0;
} /* end VolumeSetVolumeLevel */
@@ -1543,7 +1746,7 @@ int32_t VolumeSetMute(EffectContext *pContext, uint32_t mute){
pContext->pBundledContext->volume = pContext->pBundledContext->levelSaved;
}
- EqualizerLimitBandLevels(pContext);
+ LvmEffect_limitLevel(pContext);
return 0;
} /* end setMute */
@@ -1881,7 +2084,6 @@ int Virtualizer_getParameter(EffectContext *pContext,
int status = 0;
int32_t *pParamTemp = (int32_t *)pParam;
int32_t param = *pParamTemp++;
- int32_t param2;
char *name;
//ALOGV("\tVirtualizer_getParameter start");
@@ -1901,7 +2103,17 @@ int Virtualizer_getParameter(EffectContext *pContext,
}
*pValueSize = sizeof(int16_t);
break;
-
+ case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES:
+ // return value size can only be interpreted as relative to input value,
+ // deferring validity check to below
+ break;
+ case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE:
+ if (*pValueSize != sizeof(uint32_t)){
+ ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid pValueSize %d",*pValueSize);
+ return -EINVAL;
+ }
+ *pValueSize = sizeof(uint32_t);
+ break;
default:
ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid param %d", param);
return -EINVAL;
@@ -1922,13 +2134,36 @@ int Virtualizer_getParameter(EffectContext *pContext,
// *(int16_t *)pValue);
break;
+ case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: {
+ const audio_channel_mask_t channelMask = (audio_channel_mask_t) *pParamTemp++;
+ const audio_devices_t deviceType = (audio_devices_t) *pParamTemp;
+ uint32_t nbChannels = audio_channel_count_from_out_mask(channelMask);
+ if (*pValueSize < 3 * nbChannels * sizeof(int32_t)){
+ ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid pValueSize %d",*pValueSize);
+ return -EINVAL;
+ }
+ // verify the configuration is supported
+ status = VirtualizerIsConfigurationSupported(channelMask, deviceType);
+ if (status == 0) {
+ ALOGV("VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES supports mask=0x%x device=0x%x",
+ channelMask, deviceType);
+ // configuration is supported, get the angles
+ VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t *)pValue);
+ }
+ }
+ break;
+
+ case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE:
+ *(uint32_t *)pValue = (uint32_t) VirtualizerGetVirtualizationMode(pContext);
+ break;
+
default:
ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid param %d", param);
status = -EINVAL;
break;
}
- //ALOGV("\tVirtualizer_getParameter end");
+ ALOGV("\tVirtualizer_getParameter end returning status=%d", status);
return status;
} /* end Virtualizer_getParameter */
@@ -1963,6 +2198,15 @@ int Virtualizer_setParameter (EffectContext *pContext, void *pParam, void *pValu
VirtualizerSetStrength(pContext, (int32_t)strength);
//ALOGV("\tVirtualizer_setParameter() Called pVirtualizer->setStrength");
break;
+
+ case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: {
+ const audio_devices_t deviceType = *(audio_devices_t *) pValue;
+ status = VirtualizerForceVirtualizationMode(pContext, deviceType);
+ //ALOGV("VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=0x%x result=%d",
+ // deviceType, status);
+ }
+ break;
+
default:
ALOGV("\tLVM_ERROR : Virtualizer_setParameter() invalid param %d", param);
break;
@@ -2863,7 +3107,6 @@ int Effect_command(effect_handle_t self,
(void *)p->data,
&p->vsize,
p->data + voffset);
-
*replySize = sizeof(effect_param_t) + voffset + p->vsize;
//ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM "
@@ -2974,14 +3217,17 @@ int Effect_command(effect_handle_t self,
p->data + p->psize);
}
if(pContext->EffectType == LVM_VIRTUALIZER){
+ // Warning this log will fail to properly read an int32_t value, assumes int16_t
//ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d",
// *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)),
// *replySize,
// *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t)));
- if (pCmdData == NULL||
- cmdSize != (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t))||
- pReplyData == NULL||
+ if (pCmdData == NULL ||
+ // legal parameters are int16_t or int32_t
+ cmdSize > (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int32_t)) ||
+ cmdSize < (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) ||
+ pReplyData == NULL ||
*replySize != sizeof(int32_t)){
ALOGV("\tLVM_ERROR : Virtualizer_command cmdCode Case: "
"EFFECT_CMD_SET_PARAM: ERROR");
@@ -3073,6 +3319,7 @@ int Effect_command(effect_handle_t self,
{
ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start");
uint32_t device = *(uint32_t *)pCmdData;
+ pContext->pBundledContext->nOutputDevice = (audio_devices_t) device;
if (pContext->EffectType == LVM_BASS_BOOST) {
if((device == AUDIO_DEVICE_OUT_SPEAKER) ||
@@ -3108,37 +3355,38 @@ int Effect_command(effect_handle_t self,
}
}
if (pContext->EffectType == LVM_VIRTUALIZER) {
- if((device == AUDIO_DEVICE_OUT_SPEAKER)||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT)||
- (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){
- ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
- ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
-
- //If a device doesnt support virtualizer the effect must be temporarily disabled
- // the effect must still report its original state as this can only be changed
- // by the ENABLE/DISABLE command
-
- if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
- ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
- android::LvmEffect_disable(pContext);
+ if (pContext->pBundledContext->nVirtualizerForcedDevice == AUDIO_DEVICE_NONE) {
+ // default case unless configuration is forced
+ if (android::VirtualizerIsDeviceSupported(device) != 0) {
+ ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d",
+ *(int32_t *)pCmdData);
+ ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER");
+
+ //If a device doesnt support virtualizer the effect must be temporarily
+ // disabled the effect must still report its original state as this can
+ // only be changed by the ENABLE/DISABLE command
+
+ if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) {
+ ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d",
+ *(int32_t *)pCmdData);
+ android::LvmEffect_disable(pContext);
+ }
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
+ } else {
+ ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
+ *(int32_t *)pCmdData);
+
+ // If a device supports virtualizer and the effect has been temporarily
+ // disabled previously then re-enable it
+
+ if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
+ ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
+ *(int32_t *)pCmdData);
+ android::LvmEffect_enable(pContext);
+ }
+ pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
}
- pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE;
- } else {
- ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
-
- // If a device supports virtualizer and the effect has been temporarily disabled
- // previously then re-enable it
-
- if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){
- ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d",
- *(int32_t *)pCmdData);
- android::LvmEffect_enable(pContext);
- }
- pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE;
- }
+ } // else virtualization mode is forced to a certain device, nothing to do
}
ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end");
break;
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
index 330bb32..b3071f4 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h
@@ -73,6 +73,8 @@ struct BundledEffectContext{
bool bBassTempDisabled; /* Flag for Bass to be re-enabled */
bool bVirtualizerEnabled; /* Flag for Virtualizer */
bool bVirtualizerTempDisabled; /* Flag for effect to be re-enabled */
+ audio_devices_t nOutputDevice; /* Output device for the effect */
+ audio_devices_t nVirtualizerForcedDevice; /* Forced device virtualization mode*/
int NumberEffectsEnabled; /* Effects in this session */
int NumberEffectsCalled; /* Effects called so far */
bool firstVolume; /* No smoothing on first Vol change */
@@ -140,6 +142,7 @@ static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = {
{1800001, 7000000},
{7000001, 1}};
+//Note: If these frequencies change, please update LimitLevel values accordingly.
static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = {
60, /* Frequencies in Hz */
230,
@@ -190,6 +193,20 @@ static const PresetConfig gEqualizerPresets[] = {
{"Pop"},
{"Rock"}};
+/* The following tables have been computed using the actual levels measured by the output of
+ * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of
+ * the actual energy that 'could' be present in the given band.
+ * If the frequency values in EQNB_5BandPresetsFrequencies change, these values might need to be
+ * updated.
+ */
+
+static const float LimitLevel_bandEnergyContribution[FIVEBAND_NUMBANDS] = {
+ 5.0, 6.5, 6.45, 4.8, 1.7 };
+
+static const float LimitLevel_bassBoostEnergyContribution = 6.7;
+
+static const float LimitLevel_virtualizerContribution = 1.9;
+
#if __cplusplus
} // extern "C"
#endif
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index c6d3759..13f1a0d 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -19,11 +19,13 @@
#define ARRAY_SIZE(array) (sizeof array / sizeof array[0])
//#define LOG_NDEBUG 0
-#include <cutils/log.h>
#include <assert.h>
+#include <inttypes.h>
+#include <new>
#include <stdlib.h>
#include <string.h>
-#include <new>
+
+#include <cutils/log.h>
#include "EffectReverb.h"
// from Reverb/lib
#include "LVREV.h"
@@ -269,7 +271,7 @@ extern "C" int EffectCreate(const effect_uuid_t *uuid,
pContext->InFrames32 = (LVM_INT32 *)malloc(LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2);
pContext->OutFrames32 = (LVM_INT32 *)malloc(LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2);
- ALOGV("\tEffectCreate %p, size %d", pContext, sizeof(ReverbContext));
+ ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
ALOGV("\tEffectCreate end\n");
return 0;
} /* end EffectCreate */
@@ -570,15 +572,15 @@ void Reverb_free(ReverbContext *pContext){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].Size != 0){
if (MemTab.Region[i].pBaseAddress != NULL){
- ALOGV("\tfree() - START freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tfree() - START freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
- ALOGV("\tfree() - END freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tfree() - END freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}else{
- ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %ld bytes "
+ ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32 " bytes "
"for region %u at %p ERROR\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
@@ -771,11 +773,12 @@ int Reverb_init(ReverbContext *pContext){
MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %ld "
- "bytes for region %u\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u\n", MemTab.Region[i].Size, i );
bMallocFailure = LVM_TRUE;
}else{
- ALOGV("\tReverb_init CreateInstance allocate %ld bytes for region %u at %p\n",
+ ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
+ " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
@@ -787,11 +790,11 @@ int Reverb_init(ReverbContext *pContext){
if(bMallocFailure == LVM_TRUE){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %ld bytes "
- "for region %u - Not freeing\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u - Not freeing\n", MemTab.Region[i].Size, i );
}else{
- ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %ld bytes "
- "for region %u at %p- free\n",
+ ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
+ " bytes for region %u at %p- free\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
}
diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk
index 9e8cb83..ea3c59d 100644
--- a/media/libeffects/preprocessing/Android.mk
+++ b/media/libeffects/preprocessing/Android.mk
@@ -24,12 +24,7 @@ LOCAL_SHARED_LIBRARIES := \
libutils \
liblog
-ifeq ($(TARGET_SIMULATOR),true)
-LOCAL_LDLIBS += -ldl
-else
LOCAL_SHARED_LIBRARIES += libdl
-endif
-
LOCAL_CFLAGS += -fvisibility=hidden
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index a96a703..cf98f56 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -879,8 +879,8 @@ int Session_ReleaseEffect(preproc_session_t *session,
int Session_SetConfig(preproc_session_t *session, effect_config_t *config)
{
uint32_t sr;
- uint32_t inCnl = popcount(config->inputCfg.channels);
- uint32_t outCnl = popcount(config->outputCfg.channels);
+ uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
+ uint32_t outCnl = audio_channel_count_from_out_mask(config->outputCfg.channels);
if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
config->inputCfg.format != config->outputCfg.format ||
@@ -1035,7 +1035,7 @@ int Session_SetReverseConfig(preproc_session_t *session, effect_config_t *config
config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
return -EINVAL;
}
- uint32_t inCnl = popcount(config->inputCfg.channels);
+ uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
int status = session->apm->set_num_reverse_channels(inCnl);
if (status < 0) {
return -EINVAL;
diff --git a/media/libeffects/proxy/Android.mk b/media/libeffects/proxy/Android.mk
index b438796..2ba452e 100644
--- a/media/libeffects/proxy/Android.mk
+++ b/media/libeffects/proxy/Android.mk
@@ -28,7 +28,6 @@ LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libdl libeffects
LOCAL_C_INCLUDES := \
system/media/audio_effects/include \
- bionic/libc/include \
frameworks/av/media/libeffects/factory
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/testlibs/Android.mk_ b/media/libeffects/testlibs/Android.mk_
index 672ebba..14c373f 100644
--- a/media/libeffects/testlibs/Android.mk_
+++ b/media/libeffects/testlibs/Android.mk_
@@ -3,24 +3,18 @@ LOCAL_PATH:= $(call my-dir)
# Test Reverb library
include $(CLEAR_VARS)
-LOCAL_SRC_FILES:= \
+LOCAL_SRC_FILES := \
EffectReverb.c.arm \
EffectsMath.c.arm
-LOCAL_CFLAGS+= -O2
+
+LOCAL_CFLAGS := -O2
LOCAL_SHARED_LIBRARIES := \
- libcutils
+ libcutils \
+ libdl
LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libreverbtest
-
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
-ifneq ($(TARGET_SIMULATOR),true)
-LOCAL_SHARED_LIBRARIES += libdl
-endif
+LOCAL_MODULE := libreverbtest
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-effects) \
@@ -33,7 +27,7 @@ include $(BUILD_SHARED_LIBRARY)
# Test Equalizer library
include $(CLEAR_VARS)
-LOCAL_SRC_FILES:= \
+LOCAL_SRC_FILES := \
EffectsMath.c.arm \
EffectEqualizer.cpp \
AudioBiquadFilter.cpp.arm \
@@ -42,21 +36,14 @@ LOCAL_SRC_FILES:= \
AudioShelvingFilter.cpp.arm \
AudioEqualizer.cpp.arm
-LOCAL_CFLAGS+= -O2
+LOCAL_CFLAGS := -O2
LOCAL_SHARED_LIBRARIES := \
- libcutils
+ libcutils \
+ libdl
LOCAL_MODULE_RELATIVE_PATH := soundfx
-LOCAL_MODULE:= libequalizertest
-
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
-ifneq ($(TARGET_SIMULATOR),true)
-LOCAL_SHARED_LIBRARIES += libdl
-endif
+LOCAL_MODULE := libequalizertest
LOCAL_C_INCLUDES := \
$(call include-path-for, graphics corecg) \
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index dd2d306..c92c543 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -17,7 +17,6 @@ LOCAL_MODULE_RELATIVE_PATH := soundfx
LOCAL_MODULE:= libvisualizer
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
$(call include-path-for, audio-effects)
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index 5bdaa03..e5089da 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -16,8 +16,9 @@
#define LOG_TAG "EffectVisualizer"
//#define LOG_NDEBUG 0
-#include <cutils/log.h>
+#include <log/log.h>
#include <assert.h>
+#include <inttypes.h>
#include <stdlib.h>
#include <string.h>
#include <new>
@@ -206,7 +207,8 @@ int Visualizer_init(VisualizerContext *pContext)
pContext->mScalingMode = VISUALIZER_SCALING_MODE_NORMALIZED;
// measurement initialization
- pContext->mChannelCount = popcount(pContext->mConfig.inputCfg.channels);
+ pContext->mChannelCount =
+ audio_channel_count_from_out_mask(pContext->mConfig.inputCfg.channels);
pContext->mMeasurementMode = MEASUREMENT_MODE_NONE;
pContext->mMeasurementWindowSizeInBuffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS;
pContext->mMeasurementBufferIdx = 0;
@@ -226,8 +228,8 @@ int Visualizer_init(VisualizerContext *pContext)
//
int VisualizerLib_Create(const effect_uuid_t *uuid,
- int32_t sessionId,
- int32_t ioId,
+ int32_t /*sessionId*/,
+ int32_t /*ioId*/,
effect_handle_t *pHandle) {
int ret;
int i;
@@ -418,7 +420,7 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
return -EINVAL;
}
-// ALOGV("Visualizer_command command %d cmdSize %d",cmdCode, cmdSize);
+// ALOGV("Visualizer_command command %" PRIu32 " cmdSize %" PRIu32, cmdCode, cmdSize);
switch (cmdCode) {
case EFFECT_CMD_INIT:
@@ -484,19 +486,19 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
}
switch (*(uint32_t *)p->data) {
case VISUALIZER_PARAM_CAPTURE_SIZE:
- ALOGV("get mCaptureSize = %d", pContext->mCaptureSize);
+ ALOGV("get mCaptureSize = %" PRIu32, pContext->mCaptureSize);
*((uint32_t *)p->data + 1) = pContext->mCaptureSize;
p->vsize = sizeof(uint32_t);
*replySize += sizeof(uint32_t);
break;
case VISUALIZER_PARAM_SCALING_MODE:
- ALOGV("get mScalingMode = %d", pContext->mScalingMode);
+ ALOGV("get mScalingMode = %" PRIu32, pContext->mScalingMode);
*((uint32_t *)p->data + 1) = pContext->mScalingMode;
p->vsize = sizeof(uint32_t);
*replySize += sizeof(uint32_t);
break;
case VISUALIZER_PARAM_MEASUREMENT_MODE:
- ALOGV("get mMeasurementMode = %d", pContext->mMeasurementMode);
+ ALOGV("get mMeasurementMode = %" PRIu32, pContext->mMeasurementMode);
*((uint32_t *)p->data + 1) = pContext->mMeasurementMode;
p->vsize = sizeof(uint32_t);
*replySize += sizeof(uint32_t);
@@ -520,19 +522,19 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
switch (*(uint32_t *)p->data) {
case VISUALIZER_PARAM_CAPTURE_SIZE:
pContext->mCaptureSize = *((uint32_t *)p->data + 1);
- ALOGV("set mCaptureSize = %d", pContext->mCaptureSize);
+ ALOGV("set mCaptureSize = %" PRIu32, pContext->mCaptureSize);
break;
case VISUALIZER_PARAM_SCALING_MODE:
pContext->mScalingMode = *((uint32_t *)p->data + 1);
- ALOGV("set mScalingMode = %d", pContext->mScalingMode);
+ ALOGV("set mScalingMode = %" PRIu32, pContext->mScalingMode);
break;
case VISUALIZER_PARAM_LATENCY:
pContext->mLatency = *((uint32_t *)p->data + 1);
- ALOGV("set mLatency = %d", pContext->mLatency);
+ ALOGV("set mLatency = %" PRIu32, pContext->mLatency);
break;
case VISUALIZER_PARAM_MEASUREMENT_MODE:
pContext->mMeasurementMode = *((uint32_t *)p->data + 1);
- ALOGV("set mMeasurementMode = %d", pContext->mMeasurementMode);
+ ALOGV("set mMeasurementMode = %" PRIu32, pContext->mMeasurementMode);
break;
default:
*(int32_t *)pReplyData = -EINVAL;
@@ -545,9 +547,9 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
case VISUALIZER_CMD_CAPTURE: {
- int32_t captureSize = pContext->mCaptureSize;
+ uint32_t captureSize = pContext->mCaptureSize;
if (pReplyData == NULL || *replySize != captureSize) {
- ALOGV("VISUALIZER_CMD_CAPTURE() error *replySize %d captureSize %d",
+ ALOGV("VISUALIZER_CMD_CAPTURE() error *replySize %" PRIu32 " captureSize %" PRIu32,
*replySize, captureSize);
return -EINVAL;
}
@@ -573,7 +575,7 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
int32_t capturePoint = pContext->mCaptureIdx - captureSize - deltaSmpl;
if (capturePoint < 0) {
- int32_t size = -capturePoint;
+ uint32_t size = -capturePoint;
if (size > captureSize) {
size = captureSize;
}
@@ -604,7 +606,7 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
// measurements aren't relevant anymore and shouldn't bias the new one)
const int32_t delayMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext);
if (delayMs > DISCARD_MEASUREMENTS_TIME_MS) {
- ALOGV("Discarding measurements, last measurement is %dms old", delayMs);
+ ALOGV("Discarding measurements, last measurement is %" PRId32 "ms old", delayMs);
for (uint32_t i=0 ; i<pContext->mMeasurementWindowSizeInBuffers ; i++) {
pContext->mPastMeasurements[i].mIsValid = false;
pContext->mPastMeasurements[i].mPeakU16 = 0;
@@ -638,14 +640,14 @@ int Visualizer_command(effect_handle_t self, uint32_t cmdCode, uint32_t cmdSize,
} else {
pIntReplyData[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peakU16 / 32767.0f));
}
- ALOGV("VISUALIZER_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)",
+ ALOGV("VISUALIZER_CMD_MEASURE peak=%" PRIu16 " (%" PRId32 "mB), rms=%.1f (%" PRId32 "mB)",
peakU16, pIntReplyData[MEASUREMENT_IDX_PEAK],
rms, pIntReplyData[MEASUREMENT_IDX_RMS]);
}
break;
default:
- ALOGW("Visualizer_command invalid command %d",cmdCode);
+ ALOGW("Visualizer_command invalid command %" PRIu32, cmdCode);
return -EINVAL;
}
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 56e7787..6c585fb 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -25,6 +25,9 @@ LOCAL_SRC_FILES:= \
AudioRecord.cpp \
AudioSystem.cpp \
mediaplayer.cpp \
+ IMediaCodecList.cpp \
+ IMediaHTTPConnection.cpp \
+ IMediaHTTPService.cpp \
IMediaLogService.cpp \
IMediaPlayerService.cpp \
IMediaPlayerClient.cpp \
@@ -34,17 +37,20 @@ LOCAL_SRC_FILES:= \
IRemoteDisplay.cpp \
IRemoteDisplayClient.cpp \
IStreamSource.cpp \
+ MediaCodecInfo.cpp \
Metadata.cpp \
mediarecorder.cpp \
IMediaMetadataRetriever.cpp \
mediametadataretriever.cpp \
+ MidiIoWrapper.cpp \
ToneGenerator.cpp \
JetPlayer.cpp \
IOMX.cpp \
IAudioPolicyService.cpp \
+ IAudioPolicyServiceClient.cpp \
MediaScanner.cpp \
MediaScannerClient.cpp \
- autodetect.cpp \
+ CharacterEncodingDetector.cpp \
IMediaDeathNotifier.cpp \
MediaProfiles.cpp \
IEffect.cpp \
@@ -52,32 +58,39 @@ LOCAL_SRC_FILES:= \
AudioEffect.cpp \
Visualizer.cpp \
MemoryLeakTrackUtil.cpp \
- SoundPool.cpp \
- SoundPoolThread.cpp \
- StringArray.cpp
+ StringArray.cpp \
+ AudioPolicy.cpp
LOCAL_SRC_FILES += ../libnbaio/roundup.c
-# for <cutils/atomic-inline.h>
-LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0)
-LOCAL_SRC_FILES += SingleStateQueue.cpp
-LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"'
-# Consider a separate a library for SingleStateQueueInstantiations.
-
LOCAL_SHARED_LIBRARIES := \
- libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \
+ libui liblog libcutils libutils libbinder libsonivox libicuuc libicui18n libexpat \
libcamera_client libstagefright_foundation \
- libgui libdl libaudioutils
+ libgui libdl libaudioutils libnbaio
+
+LOCAL_STATIC_LIBRARIES += libinstantssq
-LOCAL_WHOLE_STATIC_LIBRARY := libmedia_helper
+LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
LOCAL_MODULE:= libmedia
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
$(TOP)/frameworks/native/include/media/openmax \
- external/icu4c/common \
+ $(TOP)/frameworks/av/include/media/ \
+ $(TOP)/frameworks/av/media/libstagefright \
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES += SingleStateQueue.cpp
+LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"'
+
+LOCAL_MODULE := libinstantssq
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 8dfffb3..af103c1 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -145,15 +145,19 @@ status_t AudioEffect::set(const effect_uuid_t *type,
return mStatus;
}
- mIEffect = iEffect;
mCblkMemory = cblk;
mCblk = static_cast<effect_param_cblk_t*>(cblk->pointer());
int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int);
mCblk->buffer = (uint8_t *)mCblk + bufOffset;
- iEffect->asBinder()->linkToDeath(mIEffectClient);
- ALOGV("set() %p OK effect: %s id: %d status %d enabled %d", this, mDescriptor.name, mId,
- mStatus, mEnabled);
+ IInterface::asBinder(iEffect)->linkToDeath(mIEffectClient);
+ mClientPid = IPCThreadState::self()->getCallingPid();
+ ALOGV("set() %p OK effect: %s id: %d status %d enabled %d pid %d", this, mDescriptor.name, mId,
+ mStatus, mEnabled, mClientPid);
+
+ if (mSessionId > AUDIO_SESSION_OUTPUT_MIX) {
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
+ }
return mStatus;
}
@@ -164,9 +168,12 @@ AudioEffect::~AudioEffect()
ALOGV("Destructor %p", this);
if (mStatus == NO_ERROR || mStatus == ALREADY_EXISTS) {
+ if (mSessionId > AUDIO_SESSION_OUTPUT_MIX) {
+ AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
+ }
if (mIEffect != NULL) {
mIEffect->disconnect();
- mIEffect->asBinder()->unlinkToDeath(mIEffectClient);
+ IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);
}
IPCThreadState::self()->flushCommands();
}
@@ -380,9 +387,9 @@ void AudioEffect::enableStatusChanged(bool enabled)
}
void AudioEffect::commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
+ uint32_t cmdSize __unused,
void *cmdData,
- uint32_t replySize,
+ uint32_t replySize __unused,
void *replyData)
{
if (cmdData == NULL || replyData == NULL) {
diff --git a/media/libmedia/AudioPolicy.cpp b/media/libmedia/AudioPolicy.cpp
new file mode 100644
index 0000000..d2d0971
--- /dev/null
+++ b/media/libmedia/AudioPolicy.cpp
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioPolicy"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+#include <media/AudioPolicy.h>
+
+namespace android {
+
+//
+// AttributeMatchCriterion implementation
+//
+AttributeMatchCriterion::AttributeMatchCriterion(audio_usage_t usage,
+ audio_source_t source,
+ uint32_t rule)
+: mRule(rule)
+{
+ if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
+ mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
+ mAttr.mUsage = usage;
+ } else {
+ mAttr.mSource = source;
+ }
+}
+
+status_t AttributeMatchCriterion::readFromParcel(Parcel *parcel)
+{
+ mRule = parcel->readInt32();
+ if (mRule == RULE_MATCH_ATTRIBUTE_USAGE ||
+ mRule == RULE_EXCLUDE_ATTRIBUTE_USAGE) {
+ mAttr.mUsage = (audio_usage_t)parcel->readInt32();
+ } else {
+ mAttr.mSource = (audio_source_t)parcel->readInt32();
+ }
+ return NO_ERROR;
+}
+
+status_t AttributeMatchCriterion::writeToParcel(Parcel *parcel) const
+{
+ parcel->writeInt32(mRule);
+ parcel->writeInt32(mAttr.mUsage);
+ return NO_ERROR;
+}
+
+//
+// AudioMix implementation
+//
+
+status_t AudioMix::readFromParcel(Parcel *parcel)
+{
+ mMixType = parcel->readInt32();
+ mFormat.sample_rate = (uint32_t)parcel->readInt32();
+ mFormat.channel_mask = (audio_channel_mask_t)parcel->readInt32();
+ mFormat.format = (audio_format_t)parcel->readInt32();
+ mRouteFlags = parcel->readInt32();
+ mRegistrationId = parcel->readString8();
+ size_t size = (size_t)parcel->readInt32();
+ if (size > MAX_CRITERIA_PER_MIX) {
+ size = MAX_CRITERIA_PER_MIX;
+ }
+ for (size_t i = 0; i < size; i++) {
+ AttributeMatchCriterion criterion;
+ if (criterion.readFromParcel(parcel) == NO_ERROR) {
+ mCriteria.add(criterion);
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t AudioMix::writeToParcel(Parcel *parcel) const
+{
+ parcel->writeInt32(mMixType);
+ parcel->writeInt32(mFormat.sample_rate);
+ parcel->writeInt32(mFormat.channel_mask);
+ parcel->writeInt32(mFormat.format);
+ parcel->writeInt32(mRouteFlags);
+ parcel->writeString8(mRegistrationId);
+ size_t size = mCriteria.size();
+ if (size > MAX_CRITERIA_PER_MIX) {
+ size = MAX_CRITERIA_PER_MIX;
+ }
+ size_t sizePosition = parcel->dataPosition();
+ parcel->writeInt32(size);
+ size_t finalSize = size;
+ for (size_t i = 0; i < size; i++) {
+ size_t position = parcel->dataPosition();
+ if (mCriteria[i].writeToParcel(parcel) != NO_ERROR) {
+ parcel->setDataPosition(position);
+ finalSize--;
+ }
+ }
+ if (size != finalSize) {
+ size_t position = parcel->dataPosition();
+ parcel->setDataPosition(sizePosition);
+ parcel->writeInt32(finalSize);
+ parcel->setDataPosition(position);
+ }
+ return NO_ERROR;
+}
+
+}; // namespace android
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 666fafa..07ca14f 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -18,7 +18,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioRecord"
+#include <inttypes.h>
#include <sys/resource.h>
+
#include <binder/IPCThreadState.h>
#include <media/AudioRecord.h>
#include <utils/Log.h>
@@ -41,37 +43,30 @@ status_t AudioRecord::getMinFrameCount(
return BAD_VALUE;
}
- // default to 0 in case of error
- *frameCount = 0;
-
- size_t size = 0;
+ size_t size;
status_t status = AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &size);
if (status != NO_ERROR) {
- ALOGE("AudioSystem could not query the input buffer size; status %d", status);
- return NO_INIT;
+ ALOGE("AudioSystem could not query the input buffer size for sampleRate %u, format %#x, "
+ "channelMask %#x; status %d", sampleRate, format, channelMask, status);
+ return status;
}
- if (size == 0) {
- ALOGE("Unsupported configuration: sampleRate %u, format %d, channelMask %#x",
+ // We double the size of input buffer for ping pong use of record buffer.
+ // Assumes audio_is_linear_pcm(format)
+ if ((*frameCount = (size * 2) / (audio_channel_count_from_in_mask(channelMask) *
+ audio_bytes_per_sample(format))) == 0) {
+ ALOGE("Unsupported configuration: sampleRate %u, format %#x, channelMask %#x",
sampleRate, format, channelMask);
return BAD_VALUE;
}
- // We double the size of input buffer for ping pong use of record buffer.
- size <<= 1;
-
- // Assumes audio_is_linear_pcm(format)
- uint32_t channelCount = popcount(channelMask);
- size /= channelCount * audio_bytes_per_sample(format);
-
- *frameCount = size;
return NO_ERROR;
}
// ---------------------------------------------------------------------------
AudioRecord::AudioRecord()
- : mStatus(NO_INIT), mSessionId(0),
+ : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT)
{
}
@@ -81,20 +76,22 @@ AudioRecord::AudioRecord(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCount,
+ size_t frameCount,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
- audio_input_flags_t flags)
- : mStatus(NO_INIT), mSessionId(0),
+ audio_input_flags_t flags,
+ const audio_attributes_t* pAttributes)
+ : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
mProxy(NULL)
{
mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
- notificationFrames, false /*threadCanCallJava*/, sessionId, transferType);
+ notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags,
+ pAttributes);
}
AudioRecord::~AudioRecord()
@@ -110,12 +107,12 @@ AudioRecord::~AudioRecord()
mAudioRecordThread->requestExitAndWait();
mAudioRecordThread.clear();
}
- if (mAudioRecord != 0) {
- mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
- mAudioRecord.clear();
- }
+ IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
+ mAudioRecord.clear();
+ mCblkMemory.clear();
+ mBufferMemory.clear();
IPCThreadState::self()->flushCommands();
- AudioSystem::releaseAudioSessionId(mSessionId);
+ AudioSystem::releaseAudioSessionId(mSessionId, -1);
}
}
@@ -124,15 +121,21 @@ status_t AudioRecord::set(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCountInt,
+ size_t frameCount,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
bool threadCanCallJava,
int sessionId,
transfer_type transferType,
- audio_input_flags_t flags)
+ audio_input_flags_t flags,
+ const audio_attributes_t* pAttributes)
{
+ ALOGV("set(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
+ "notificationFrames %u, sessionId %d, transferType %d, flags %#x",
+ inputSource, sampleRate, format, channelMask, frameCount, notificationFrames,
+ sessionId, transferType, flags);
+
switch (transferType) {
case TRANSFER_DEFAULT:
if (cbf == NULL || threadCanCallJava) {
@@ -156,27 +159,23 @@ status_t AudioRecord::set(
}
mTransfer = transferType;
- // FIXME "int" here is legacy and will be replaced by size_t later
- if (frameCountInt < 0) {
- ALOGE("Invalid frame count %d", frameCountInt);
- return BAD_VALUE;
- }
- size_t frameCount = frameCountInt;
-
- ALOGV("set(): sampleRate %u, channelMask %#x, frameCount %u", sampleRate, channelMask,
- frameCount);
-
AutoMutex lock(mLock);
+ // invariant that mAudioRecord != 0 is true only after set() returns successfully
if (mAudioRecord != 0) {
ALOGE("Track already in use");
return INVALID_OPERATION;
}
- if (inputSource == AUDIO_SOURCE_DEFAULT) {
- inputSource = AUDIO_SOURCE_MIC;
+ if (pAttributes == NULL) {
+ memset(&mAttributes, 0, sizeof(audio_attributes_t));
+ mAttributes.source = inputSource;
+ } else {
+ // stream type shouldn't be looked at, this track has audio attributes
+ memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
+ ALOGV("Building AudioRecord with attributes: source=%d flags=0x%x tags=[%s]",
+ mAttributes.source, mAttributes.flags, mAttributes.tags);
}
- mInputSource = inputSource;
if (sampleRate == 0) {
ALOGE("Invalid sample rate %u", sampleRate);
@@ -191,12 +190,12 @@ status_t AudioRecord::set(
// validate parameters
if (!audio_is_valid_format(format)) {
- ALOGE("Invalid format %d", format);
+ ALOGE("Invalid format %#x", format);
return BAD_VALUE;
}
// Temporary restriction: AudioFlinger currently supports 16-bit PCM only
if (format != AUDIO_FORMAT_PCM_16_BIT) {
- ALOGE("Format %d is not supported", format);
+ ALOGE("Format %#x is not supported", format);
return BAD_VALUE;
}
mFormat = format;
@@ -206,61 +205,50 @@ status_t AudioRecord::set(
return BAD_VALUE;
}
mChannelMask = channelMask;
- uint32_t channelCount = popcount(channelMask);
+ uint32_t channelCount = audio_channel_count_from_in_mask(channelMask);
mChannelCount = channelCount;
- // Assumes audio_is_linear_pcm(format), else sizeof(uint8_t)
- mFrameSize = channelCount * audio_bytes_per_sample(format);
-
- // validate framecount
- size_t minFrameCount = 0;
- status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
- sampleRate, format, channelMask);
- if (status != NO_ERROR) {
- ALOGE("getMinFrameCount() failed; status %d", status);
- return status;
+ if (audio_is_linear_pcm(format)) {
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ } else {
+ mFrameSize = sizeof(uint8_t);
}
- ALOGV("AudioRecord::set() minFrameCount = %d", minFrameCount);
- if (frameCount == 0) {
- frameCount = minFrameCount;
- } else if (frameCount < minFrameCount) {
- ALOGE("frameCount %u < minFrameCount %u", frameCount, minFrameCount);
- return BAD_VALUE;
- }
- mFrameCount = frameCount;
+ // mFrameCount is initialized in openRecord_l
+ mReqFrameCount = frameCount;
mNotificationFramesReq = notificationFrames;
- mNotificationFramesAct = 0;
+ // mNotificationFramesAct is initialized in openRecord_l
- if (sessionId == 0 ) {
- mSessionId = AudioSystem::newAudioSessionId();
+ if (sessionId == AUDIO_SESSION_ALLOCATE) {
+ mSessionId = AudioSystem::newAudioUniqueId();
} else {
mSessionId = sessionId;
}
ALOGV("set(): mSessionId %d", mSessionId);
mFlags = flags;
-
- // create the IAudioRecord
- status = openRecord_l(0 /*epoch*/);
- if (status) {
- return status;
- }
+ mCbf = cbf;
if (cbf != NULL) {
mAudioRecordThread = new AudioRecordThread(*this, threadCanCallJava);
mAudioRecordThread->run("AudioRecord", ANDROID_PRIORITY_AUDIO);
}
- mStatus = NO_ERROR;
+ // create the IAudioRecord
+ status_t status = openRecord_l(0 /*epoch*/);
- // Update buffer size in case it has been limited by AudioFlinger during track creation
- mFrameCount = mCblk->frameCount_;
+ if (status != NO_ERROR) {
+ if (mAudioRecordThread != 0) {
+ mAudioRecordThread->requestExit(); // see comment in AudioRecord.h
+ mAudioRecordThread->requestExitAndWait();
+ mAudioRecordThread.clear();
+ }
+ return status;
+ }
+ mStatus = NO_ERROR;
mActive = false;
- mCbf = cbf;
- mRefreshRemaining = true;
mUserData = user;
// TODO: add audio hardware input latency here
mLatency = (1000*mFrameCount) / sampleRate;
@@ -268,7 +256,7 @@ status_t AudioRecord::set(
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
- AudioSystem::acquireAudioSessionId(mSessionId);
+ AudioSystem::acquireAudioSessionId(mSessionId, -1);
mSequence = 1;
mObservedSequence = mSequence;
mInOverrun = false;
@@ -289,6 +277,9 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession)
// reset current position as seen by client to 0
mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
+ // force refresh of remaining frames by processAudioBuffer() as last
+ // read before stop could be partial.
+ mRefreshRemaining = true;
mNewPosition = mProxy->getPosition() + mUpdatePeriod;
int32_t flags = android_atomic_acquire_load(&mCblk->mFlags);
@@ -352,6 +343,7 @@ bool AudioRecord::stopped() const
status_t AudioRecord::setMarkerPosition(uint32_t marker)
{
+ // The only purpose of setting marker position is to get a callback
if (mCbf == NULL) {
return INVALID_OPERATION;
}
@@ -377,6 +369,7 @@ status_t AudioRecord::getMarkerPosition(uint32_t *marker) const
status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod)
{
+ // The only purpose of setting position update period is to get a callback
if (mCbf == NULL) {
return INVALID_OPERATION;
}
@@ -412,7 +405,7 @@ status_t AudioRecord::getPosition(uint32_t *position) const
return NO_ERROR;
}
-unsigned int AudioRecord::getInputFramesLost() const
+uint32_t AudioRecord::getInputFramesLost() const
{
// no need to check mActive, because if inactive this will return 0, which is what we want
return AudioSystem::getInputFramesLost(getInput());
@@ -430,56 +423,81 @@ status_t AudioRecord::openRecord_l(size_t epoch)
return NO_INIT;
}
- IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT;
- pid_t tid = -1;
+ // Fast tracks must be at the primary _output_ [sic] sampling rate,
+ // because there is currently no concept of a primary input sampling rate
+ uint32_t afSampleRate = AudioSystem::getPrimaryOutputSamplingRate();
+ if (afSampleRate == 0) {
+ ALOGW("getPrimaryOutputSamplingRate failed");
+ }
// Client can only express a preference for FAST. Server will perform additional tests.
- // The only supported use case for FAST is callback transfer mode.
- if (mFlags & AUDIO_INPUT_FLAG_FAST) {
- if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) {
- ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
- // once denied, do not request again if IAudioRecord is re-created
- mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
- } else {
- trackFlags |= IAudioFlinger::TRACK_FAST;
- tid = mAudioRecordThread->getTid();
- }
+ if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !(
+ // use case: callback transfer mode
+ (mTransfer == TRANSFER_CALLBACK) &&
+ // matching sample rate
+ (mSampleRate == afSampleRate))) {
+ ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
+ // once denied, do not request again if IAudioRecord is re-created
+ mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
}
- mNotificationFramesAct = mNotificationFramesReq;
+ IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT;
- if (!(mFlags & AUDIO_INPUT_FLAG_FAST)) {
- // Make sure that application is notified with sufficient margin before overrun
- if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) {
- mNotificationFramesAct = mFrameCount/2;
+ pid_t tid = -1;
+ if (mFlags & AUDIO_INPUT_FLAG_FAST) {
+ trackFlags |= IAudioFlinger::TRACK_FAST;
+ if (mAudioRecordThread != 0) {
+ tid = mAudioRecordThread->getTid();
}
}
- audio_io_handle_t input = AudioSystem::getInput(mInputSource, mSampleRate, mFormat,
- mChannelMask, mSessionId);
- if (input == 0) {
- ALOGE("Could not get audio input for record source %d", mInputSource);
+ audio_io_handle_t input;
+ status = AudioSystem::getInputForAttr(&mAttributes, &input, (audio_session_t)mSessionId,
+ mSampleRate, mFormat, mChannelMask, mFlags);
+
+ if (status != NO_ERROR) {
+ ALOGE("Could not get audio input for record source %d, sample rate %u, format %#x, "
+ "channel mask %#x, session %d, flags %#x",
+ mAttributes.source, mSampleRate, mFormat, mChannelMask, mSessionId, mFlags);
return BAD_VALUE;
}
+ {
+ // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
+ // we must release it ourselves if anything goes wrong.
+ size_t frameCount = mReqFrameCount;
+ size_t temp = frameCount; // temp may be replaced by a revised value of frameCount,
+ // but we will still need the original value also
int originalSessionId = mSessionId;
+
+ // The notification frame count is the period between callbacks, as suggested by the server.
+ size_t notificationFrames = mNotificationFramesReq;
+
+ sp<IMemory> iMem; // for cblk
+ sp<IMemory> bufferMem;
sp<IAudioRecord> record = audioFlinger->openRecord(input,
mSampleRate, mFormat,
mChannelMask,
- mFrameCount,
+ &temp,
&trackFlags,
tid,
&mSessionId,
+ &notificationFrames,
+ iMem,
+ bufferMem,
&status);
- ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId,
+ ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
"session ID changed from %d to %d", originalSessionId, mSessionId);
- if (record == 0 || status != NO_ERROR) {
+ if (status != NO_ERROR) {
ALOGE("AudioFlinger could not create record track, status: %d", status);
- AudioSystem::releaseInput(input);
- return status;
+ goto release;
}
- sp<IMemory> iMem = record->getCblk();
+ ALOG_ASSERT(record != 0);
+
+ // AudioFlinger now owns the reference to the I/O handle,
+ // so we are no longer responsible for releasing it.
+
if (iMem == 0) {
ALOGE("Could not get control block");
return NO_INIT;
@@ -489,37 +507,67 @@ status_t AudioRecord::openRecord_l(size_t epoch)
ALOGE("Could not get control block pointer");
return NO_INIT;
}
+ audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
+
+ // Starting address of buffers in shared memory.
+ // The buffers are either immediately after the control block,
+ // or in a separate area at discretion of server.
+ void *buffers;
+ if (bufferMem == 0) {
+ buffers = cblk + 1;
+ } else {
+ buffers = bufferMem->pointer();
+ if (buffers == NULL) {
+ ALOGE("Could not get buffer pointer");
+ return NO_INIT;
+ }
+ }
+
+ // invariant that mAudioRecord != 0 is true only after set() returns successfully
if (mAudioRecord != 0) {
- mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
+ IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);
mDeathNotifier.clear();
}
- mInput = input;
mAudioRecord = record;
mCblkMemory = iMem;
- audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
+ mBufferMemory = bufferMem;
+ IPCThreadState::self()->flushCommands();
+
mCblk = cblk;
- // FIXME missing fast track frameCount logic
+ // note that temp is the (possibly revised) value of frameCount
+ if (temp < frameCount || (frameCount == 0 && temp == 0)) {
+ ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
+ }
+ frameCount = temp;
+
mAwaitBoost = false;
if (mFlags & AUDIO_INPUT_FLAG_FAST) {
if (trackFlags & IAudioFlinger::TRACK_FAST) {
- ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", mFrameCount);
+ ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu", frameCount);
mAwaitBoost = true;
- // double-buffering is not required for fast tracks, due to tighter scheduling
- if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount) {
- mNotificationFramesAct = mFrameCount;
- }
} else {
- ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", mFrameCount);
+ ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
// once denied, do not request again if IAudioRecord is re-created
mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
- if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) {
- mNotificationFramesAct = mFrameCount/2;
- }
}
}
- // starting address of buffers in shared memory
- void *buffers = (char*)cblk + sizeof(audio_track_cblk_t);
+ // Make sure that application is notified with sufficient margin before overrun
+ if (notificationFrames == 0 || notificationFrames > frameCount) {
+ ALOGW("Received notificationFrames %zu for frameCount %zu", notificationFrames, frameCount);
+ }
+ mNotificationFramesAct = notificationFrames;
+
+ // We retain a copy of the I/O handle, but don't own the reference
+ mInput = input;
+ mRefreshRemaining = true;
+
+ mFrameCount = frameCount;
+ // If IAudioRecord is re-created, don't let the requested frameCount
+ // decrease. This can confuse clients that cache frameCount().
+ if (frameCount > mReqFrameCount) {
+ mReqFrameCount = frameCount;
+ }
// update proxy
mProxy = new AudioRecordClientProxy(cblk, buffers, mFrameCount, mFrameSize);
@@ -527,9 +575,17 @@ status_t AudioRecord::openRecord_l(size_t epoch)
mProxy->setMinimum(mNotificationFramesAct);
mDeathNotifier = new DeathNotifier(this);
- mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this);
+ IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this);
return NO_ERROR;
+ }
+
+release:
+ AudioSystem::releaseInput(input, (audio_session_t)mSessionId);
+ if (status == NO_ERROR) {
+ status = NO_INIT;
+ }
+ return status;
}
status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -545,13 +601,13 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
}
const struct timespec *requested;
+ struct timespec timeout;
if (waitCount == -1) {
requested = &ClientProxy::kForever;
} else if (waitCount == 0) {
requested = &ClientProxy::kNonBlocking;
} else if (waitCount > 0) {
long long ms = WAIT_PERIOD_MS * (long long) waitCount;
- struct timespec timeout;
timeout.tv_sec = ms / 1000;
timeout.tv_nsec = (int) (ms % 1000) * 1000000;
requested = &timeout;
@@ -580,6 +636,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
// keep them from going away if another thread re-creates the track during obtainBuffer()
sp<AudioRecordClientProxy> proxy;
sp<IMemory> iMem;
+ sp<IMemory> bufferMem;
{
// start of lock scope
AutoMutex lock(mLock);
@@ -591,6 +648,9 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
if (newSequence == oldSequence) {
status = restoreRecord_l("obtainBuffer");
if (status != NO_ERROR) {
+ buffer.mFrameCount = 0;
+ buffer.mRaw = NULL;
+ buffer.mNonContig = 0;
break;
}
}
@@ -600,6 +660,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
// Keep the extra references
proxy = mProxy;
iMem = mCblkMemory;
+ bufferMem = mBufferMemory;
// Non-blocking if track is stopped
if (!mActive) {
@@ -660,7 +721,7 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
// sanity-check. user is most-likely passing an error code, and it would
// make the return value ambiguous (actualSize vs error).
- ALOGE("AudioRecord::read(buffer=%p, size=%u (%d)", buffer, userSize, userSize);
+ ALOGE("AudioRecord::read(buffer=%p, size=%zu (%zu)", buffer, userSize, userSize);
return BAD_VALUE;
}
@@ -692,7 +753,7 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
// -------------------------------------------------------------------------
-nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
+nsecs_t AudioRecord::processAudioBuffer()
{
mLock.lock();
if (mAwaitBoost) {
@@ -760,17 +821,17 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
}
// Cache other fields that will be needed soon
- size_t notificationFrames = mNotificationFramesAct;
+ uint32_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
mRetryOnPartialBuffer = false;
}
size_t misalignment = mProxy->getMisalignment();
- int32_t sequence = mSequence;
+ uint32_t sequence = mSequence;
// These fields don't need to be cached, because they are assigned only by set():
- // mTransfer, mCbf, mUserData, mSampleRate
+ // mTransfer, mCbf, mUserData, mSampleRate, mFrameSize
mLock.unlock();
@@ -841,11 +902,11 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
size_t nonContig;
status_t err = obtainBuffer(&audioBuffer, requested, NULL, &nonContig);
LOG_ALWAYS_FATAL_IF((err != NO_ERROR) != (audioBuffer.frameCount == 0),
- "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount);
+ "obtainBuffer() err=%d frameCount=%zu", err, audioBuffer.frameCount);
requested = &ClientProxy::kNonBlocking;
size_t avail = audioBuffer.frameCount + nonContig;
- ALOGV("obtainBuffer(%u) returned %u = %u + %u",
- mRemainingFrames, avail, audioBuffer.frameCount, nonContig);
+ ALOGV("obtainBuffer(%u) returned %zu = %zu + %zu err %d",
+ mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
if (err != NO_ERROR) {
if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR) {
break;
@@ -872,8 +933,8 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
// Sanity check on returned size
if (ssize_t(readSize) < 0 || readSize > reqSize) {
- ALOGE("EVENT_MORE_DATA requested %u bytes but callback returned %d bytes",
- reqSize, (int) readSize);
+ ALOGE("EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
+ reqSize, ssize_t(readSize));
return NS_NEVER;
}
@@ -932,7 +993,7 @@ status_t AudioRecord::restoreRecord_l(const char *from)
status_t result;
// if the new IAudioRecord is created, openRecord_l() will modify the
- // following member variables: mAudioRecord, mCblkMemory and mCblk.
+ // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.
// It will also delete the strong references on previous IAudioRecord and IMemory
size_t position = mProxy->getPosition();
mNewPosition = position + mUpdatePeriod;
@@ -954,7 +1015,7 @@ status_t AudioRecord::restoreRecord_l(const char *from)
// =========================================================================
-void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who)
+void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
{
sp<AudioRecord> audioRecord = mAudioRecord.promote();
if (audioRecord != 0) {
@@ -966,7 +1027,8 @@ void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who)
// =========================================================================
AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava)
- : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
+ : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL),
+ mIgnoreNextPausedInt(false)
{
}
@@ -983,6 +1045,10 @@ bool AudioRecord::AudioRecordThread::threadLoop()
// caller will check for exitPending()
return true;
}
+ if (mIgnoreNextPausedInt) {
+ mIgnoreNextPausedInt = false;
+ mPausedInt = false;
+ }
if (mPausedInt) {
if (mPausedNs > 0) {
(void) mMyCond.waitRelative(mMyLock, mPausedNs);
@@ -993,7 +1059,7 @@ bool AudioRecord::AudioRecordThread::threadLoop()
return true;
}
}
- nsecs_t ns = mReceiver.processAudioBuffer(this);
+ nsecs_t ns = mReceiver.processAudioBuffer();
switch (ns) {
case 0:
return true;
@@ -1007,7 +1073,7 @@ bool AudioRecord::AudioRecordThread::threadLoop()
ns = 1000000000LL;
// fall through
default:
- LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
+ LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
pauseInternal(ns);
return true;
}
@@ -1017,12 +1083,7 @@ void AudioRecord::AudioRecordThread::requestExit()
{
// must be in this order to avoid a race condition
Thread::requestExit();
- AutoMutex _l(mMyLock);
- if (mPaused || mPausedInt) {
- mPaused = false;
- mPausedInt = false;
- mMyCond.signal();
- }
+ resume();
}
void AudioRecord::AudioRecordThread::pause()
@@ -1034,6 +1095,7 @@ void AudioRecord::AudioRecordThread::pause()
void AudioRecord::AudioRecordThread::resume()
{
AutoMutex _l(mMyLock);
+ mIgnoreNextPausedInt = true;
if (mPaused || mPausedInt) {
mPaused = false;
mPausedInt = false;
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index cc5b810..9cae21c 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -32,48 +32,59 @@ namespace android {
// client singleton for AudioFlinger binder interface
Mutex AudioSystem::gLock;
+Mutex AudioSystem::gLockCache;
+Mutex AudioSystem::gLockAPS;
+Mutex AudioSystem::gLockAPC;
sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
-// Cached values
-DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(0);
+// Cached values for output handles
+DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(NULL);
// Cached values for recording queries, all protected by gLock
-uint32_t AudioSystem::gPrevInSamplingRate = 16000;
-audio_format_t AudioSystem::gPrevInFormat = AUDIO_FORMAT_PCM_16_BIT;
-audio_channel_mask_t AudioSystem::gPrevInChannelMask = AUDIO_CHANNEL_IN_MONO;
-size_t AudioSystem::gInBuffSize = 0;
+uint32_t AudioSystem::gPrevInSamplingRate;
+audio_format_t AudioSystem::gPrevInFormat;
+audio_channel_mask_t AudioSystem::gPrevInChannelMask;
+size_t AudioSystem::gInBuffSize = 0; // zero indicates cache is invalid
+sp<AudioSystem::AudioPortCallback> AudioSystem::gAudioPortCallback;
// establish binder interface to AudioFlinger service
-const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
-{
- Mutex::Autolock _l(gLock);
- if (gAudioFlinger == 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.audio_flinger"));
- if (binder != 0)
- break;
- ALOGW("AudioFlinger not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
- if (gAudioFlingerClient == NULL) {
- gAudioFlingerClient = new AudioFlingerClient();
- } else {
- if (gAudioErrorCallback) {
- gAudioErrorCallback(NO_ERROR);
+const sp<IAudioFlinger> AudioSystem::get_audio_flinger()
+{
+ sp<IAudioFlinger> af;
+ sp<AudioFlingerClient> afc;
+ {
+ Mutex::Autolock _l(gLock);
+ if (gAudioFlinger == 0) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder;
+ do {
+ binder = sm->getService(String16("media.audio_flinger"));
+ if (binder != 0)
+ break;
+ ALOGW("AudioFlinger not published, waiting...");
+ usleep(500000); // 0.5 s
+ } while (true);
+ if (gAudioFlingerClient == NULL) {
+ gAudioFlingerClient = new AudioFlingerClient();
+ } else {
+ if (gAudioErrorCallback) {
+ gAudioErrorCallback(NO_ERROR);
+ }
}
+ binder->linkToDeath(gAudioFlingerClient);
+ gAudioFlinger = interface_cast<IAudioFlinger>(binder);
+ LOG_ALWAYS_FATAL_IF(gAudioFlinger == 0);
+ afc = gAudioFlingerClient;
}
- binder->linkToDeath(gAudioFlingerClient);
- gAudioFlinger = interface_cast<IAudioFlinger>(binder);
- gAudioFlinger->registerClient(gAudioFlingerClient);
+ af = gAudioFlinger;
}
- ALOGE_IF(gAudioFlinger==0, "no AudioFlinger!?");
-
- return gAudioFlinger;
+ if (afc != 0) {
+ af->registerClient(afc);
+ }
+ return af;
}
/* static */ status_t AudioSystem::checkAudioFlinger()
@@ -84,13 +95,15 @@ const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
return DEAD_OBJECT;
}
-status_t AudioSystem::muteMicrophone(bool state) {
+status_t AudioSystem::muteMicrophone(bool state)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setMicMute(state);
}
-status_t AudioSystem::isMicrophoneMuted(bool* state) {
+status_t AudioSystem::isMicrophoneMuted(bool* state)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*state = af->getMicMute();
@@ -175,13 +188,15 @@ status_t AudioSystem::setMode(audio_mode_t mode)
return af->setMode(mode);
}
-status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
+status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setParameters(ioHandle, keyValuePairs);
}
-String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
+String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
String8 result = String8("");
if (af == 0) return result;
@@ -190,6 +205,16 @@ String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& ke
return result;
}
+status_t AudioSystem::setParameters(const String8& keyValuePairs)
+{
+ return setParameters(AUDIO_IO_HANDLE_NONE, keyValuePairs);
+}
+
+String8 AudioSystem::getParameters(const String8& keys)
+{
+ return getParameters(AUDIO_IO_HANDLE_NONE, keys);
+}
+
// convert volume steps to natural log scale
// change this value to change volume scaling
@@ -227,31 +252,33 @@ status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream
return PERMISSION_DENIED;
}
- return getSamplingRate(output, streamType, samplingRate);
+ return getSamplingRate(output, samplingRate);
}
status_t AudioSystem::getSamplingRate(audio_io_handle_t output,
- audio_stream_type_t streamType,
uint32_t* samplingRate)
{
- OutputDescriptor *outputDesc;
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+
+ Mutex::Autolock _l(gLockCache);
- gLock.lock();
- outputDesc = AudioSystem::gOutputs.valueFor(output);
+ OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == NULL) {
ALOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output);
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ gLockCache.unlock();
*samplingRate = af->sampleRate(output);
+ gLockCache.lock();
} else {
ALOGV("getOutputSamplingRate() reading from output desc");
*samplingRate = outputDesc->samplingRate;
- gLock.unlock();
+ }
+ if (*samplingRate == 0) {
+ ALOGE("AudioSystem::getSamplingRate failed for output %d", output);
+ return BAD_VALUE;
}
- ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %u", streamType, output,
- *samplingRate);
+ ALOGV("getSamplingRate() output %d, sampling rate %u", output, *samplingRate);
return NO_ERROR;
}
@@ -265,33 +292,35 @@ status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_
}
output = getOutput(streamType);
- if (output == 0) {
+ if (output == AUDIO_IO_HANDLE_NONE) {
return PERMISSION_DENIED;
}
- return getFrameCount(output, streamType, frameCount);
+ return getFrameCount(output, frameCount);
}
status_t AudioSystem::getFrameCount(audio_io_handle_t output,
- audio_stream_type_t streamType,
size_t* frameCount)
{
- OutputDescriptor *outputDesc;
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
+
+ Mutex::Autolock _l(gLockCache);
- gLock.lock();
- outputDesc = AudioSystem::gOutputs.valueFor(output);
+ OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == NULL) {
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ gLockCache.unlock();
*frameCount = af->frameCount(output);
+ gLockCache.lock();
} else {
*frameCount = outputDesc->frameCount;
- gLock.unlock();
+ }
+ if (*frameCount == 0) {
+ ALOGE("AudioSystem::getFrameCount failed for output %d", output);
+ return BAD_VALUE;
}
- ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output,
- *frameCount);
+ ALOGV("getFrameCount() output %d, frameCount %zu", output, *frameCount);
return NO_ERROR;
}
@@ -305,32 +334,31 @@ status_t AudioSystem::getOutputLatency(uint32_t* latency, audio_stream_type_t st
}
output = getOutput(streamType);
- if (output == 0) {
+ if (output == AUDIO_IO_HANDLE_NONE) {
return PERMISSION_DENIED;
}
- return getLatency(output, streamType, latency);
+ return getLatency(output, latency);
}
status_t AudioSystem::getLatency(audio_io_handle_t output,
- audio_stream_type_t streamType,
uint32_t* latency)
{
- OutputDescriptor *outputDesc;
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return PERMISSION_DENIED;
- gLock.lock();
- outputDesc = AudioSystem::gOutputs.valueFor(output);
+ Mutex::Autolock _l(gLockCache);
+
+ OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output);
if (outputDesc == NULL) {
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
+ gLockCache.unlock();
*latency = af->latency(output);
+ gLockCache.lock();
} else {
*latency = outputDesc->latency;
- gLock.unlock();
}
- ALOGV("getLatency() streamType %d, output %d, latency %d", streamType, output, *latency);
+ ALOGV("getLatency() output %d, latency %d", output, *latency);
return NO_ERROR;
}
@@ -338,18 +366,24 @@ status_t AudioSystem::getLatency(audio_io_handle_t output,
status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t format,
audio_channel_mask_t channelMask, size_t* buffSize)
{
- gLock.lock();
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) {
+ return PERMISSION_DENIED;
+ }
+ Mutex::Autolock _l(gLockCache);
// Do we have a stale gInBufferSize or are we requesting the input buffer size for new values
size_t inBuffSize = gInBuffSize;
if ((inBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat)
|| (channelMask != gPrevInChannelMask)) {
- gLock.unlock();
- const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) {
- return PERMISSION_DENIED;
- }
+ gLockCache.unlock();
inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
- gLock.lock();
+ gLockCache.lock();
+ if (inBuffSize == 0) {
+ ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x",
+ sampleRate, format, channelMask);
+ return BAD_VALUE;
+ }
+ // A benign race is possible here: we could overwrite a fresher cache entry
// save the request params
gPrevInSamplingRate = sampleRate;
gPrevInFormat = format;
@@ -357,7 +391,6 @@ status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t for
gInBuffSize = inBuffSize;
}
- gLock.unlock();
*buffSize = inBuffSize;
return NO_ERROR;
@@ -371,63 +404,74 @@ status_t AudioSystem::setVoiceVolume(float value)
}
status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t *halFrames,
- uint32_t *dspFrames, audio_stream_type_t stream)
+ uint32_t *dspFrames)
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
- if (stream == AUDIO_STREAM_DEFAULT) {
- stream = AUDIO_STREAM_MUSIC;
- }
-
- if (output == 0) {
- output = getOutput(stream);
- }
-
return af->getRenderPosition(halFrames, dspFrames, output);
}
-size_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
+uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- unsigned int result = 0;
+ uint32_t result = 0;
if (af == 0) return result;
- if (ioHandle == 0) return result;
+ if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
result = af->getInputFramesLost(ioHandle);
return result;
}
-int AudioSystem::newAudioSessionId() {
+audio_unique_id_t AudioSystem::newAudioUniqueId()
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return 0;
- return af->newAudioSessionId();
+ if (af == 0) return AUDIO_UNIQUE_ID_ALLOCATE;
+ return af->newAudioUniqueId();
}
-void AudioSystem::acquireAudioSessionId(int audioSession) {
+void AudioSystem::acquireAudioSessionId(int audioSession, pid_t pid)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af != 0) {
- af->acquireAudioSessionId(audioSession);
+ af->acquireAudioSessionId(audioSession, pid);
}
}
-void AudioSystem::releaseAudioSessionId(int audioSession) {
+void AudioSystem::releaseAudioSessionId(int audioSession, pid_t pid)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af != 0) {
- af->releaseAudioSessionId(audioSession);
+ af->releaseAudioSessionId(audioSession, pid);
}
}
+audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId)
+{
+ const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
+ if (af == 0) return AUDIO_HW_SYNC_INVALID;
+ return af->getAudioHwSyncForSession(sessionId);
+}
+
// ---------------------------------------------------------------------------
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) {
- Mutex::Autolock _l(AudioSystem::gLock);
+void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
+{
+ audio_error_callback cb = NULL;
+ {
+ Mutex::Autolock _l(AudioSystem::gLock);
+ AudioSystem::gAudioFlinger.clear();
+ cb = gAudioErrorCallback;
+ }
- AudioSystem::gAudioFlinger.clear();
- // clear output handles and stream to output map caches
- AudioSystem::gOutputs.clear();
+ {
+ // clear output handles and stream to output map caches
+ Mutex::Autolock _l(gLockCache);
+ AudioSystem::gOutputs.clear();
+ }
- if (gAudioErrorCallback) {
- gAudioErrorCallback(DEAD_OBJECT);
+ if (cb) {
+ cb(DEAD_OBJECT);
}
ALOGW("AudioFlinger server died!");
}
@@ -438,9 +482,9 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
const OutputDescriptor *desc;
audio_stream_type_t stream;
- if (ioHandle == 0) return;
+ if (ioHandle == AUDIO_IO_HANDLE_NONE) return;
- Mutex::Autolock _l(AudioSystem::gLock);
+ Mutex::Autolock _l(AudioSystem::gLockCache);
switch (event) {
case STREAM_CONFIG_CHANGED:
@@ -455,7 +499,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
OutputDescriptor *outputDesc = new OutputDescriptor(*desc);
gOutputs.add(ioHandle, outputDesc);
- ALOGV("ioConfigChanged() new output samplingRate %u, format %d channel mask %#x frameCount %u "
+ ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x frameCount %zu "
"latency %d",
outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask,
outputDesc->frameCount, outputDesc->latency);
@@ -479,8 +523,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
if (param2 == NULL) break;
desc = (const OutputDescriptor *)param2;
- ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channel mask %#x "
- "frameCount %d latency %d",
+ ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x channel mask %#x "
+ "frameCount %zu latency %d",
ioHandle, desc->samplingRate, desc->format,
desc->channelMask, desc->frameCount, desc->latency);
OutputDescriptor *outputDesc = gOutputs.valueAt(index);
@@ -496,53 +540,50 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
}
}
-void AudioSystem::setErrorCallback(audio_error_callback cb) {
+void AudioSystem::setErrorCallback(audio_error_callback cb)
+{
Mutex::Autolock _l(gLock);
gAudioErrorCallback = cb;
}
-bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType) {
- switch (streamType) {
- case AUDIO_STREAM_MUSIC:
- case AUDIO_STREAM_VOICE_CALL:
- case AUDIO_STREAM_BLUETOOTH_SCO:
- case AUDIO_STREAM_SYSTEM:
- return true;
- default:
- return false;
- }
-}
-
-
// client singleton for AudioPolicyService binder interface
+// protected by gLockAPS
sp<IAudioPolicyService> AudioSystem::gAudioPolicyService;
sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient;
// establish binder interface to AudioPolicy service
-const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
-{
- gLock.lock();
- if (gAudioPolicyService == 0) {
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("media.audio_policy"));
- if (binder != 0)
- break;
- ALOGW("AudioPolicyService not published, waiting...");
- usleep(500000); // 0.5 s
- } while (true);
- if (gAudioPolicyServiceClient == NULL) {
- gAudioPolicyServiceClient = new AudioPolicyServiceClient();
+const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service()
+{
+ sp<IAudioPolicyService> ap;
+ sp<AudioPolicyServiceClient> apc;
+ {
+ Mutex::Autolock _l(gLockAPS);
+ if (gAudioPolicyService == 0) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder;
+ do {
+ binder = sm->getService(String16("media.audio_policy"));
+ if (binder != 0)
+ break;
+ ALOGW("AudioPolicyService not published, waiting...");
+ usleep(500000); // 0.5 s
+ } while (true);
+ if (gAudioPolicyServiceClient == NULL) {
+ gAudioPolicyServiceClient = new AudioPolicyServiceClient();
+ }
+ binder->linkToDeath(gAudioPolicyServiceClient);
+ gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
+ LOG_ALWAYS_FATAL_IF(gAudioPolicyService == 0);
+ apc = gAudioPolicyServiceClient;
}
- binder->linkToDeath(gAudioPolicyServiceClient);
- gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
- gLock.unlock();
- } else {
- gLock.unlock();
+ ap = gAudioPolicyService;
}
- return gAudioPolicyService;
+ if (apc != 0) {
+ ap->registerClient(apc);
+ }
+
+ return ap;
}
// ---------------------------------------------------------------------------
@@ -608,9 +649,26 @@ audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream,
return aps->getOutput(stream, samplingRate, format, channelMask, flags, offloadInfo);
}
+status_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *output,
+ audio_session_t session,
+ audio_stream_type_t *stream,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ const audio_offload_info_t *offloadInfo)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return NO_INIT;
+ return aps->getOutputForAttr(attr, output, session, stream,
+ samplingRate, format, channelMask,
+ flags, offloadInfo);
+}
+
status_t AudioSystem::startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
@@ -619,50 +677,57 @@ status_t AudioSystem::startOutput(audio_io_handle_t output,
status_t AudioSystem::stopOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
return aps->stopOutput(output, stream, session);
}
-void AudioSystem::releaseOutput(audio_io_handle_t output)
+void AudioSystem::releaseOutput(audio_io_handle_t output,
+ audio_stream_type_t stream,
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return;
- aps->releaseOutput(output);
+ aps->releaseOutput(output, stream, session);
}
-audio_io_handle_t AudioSystem::getInput(audio_source_t inputSource,
- uint32_t samplingRate,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- int sessionId)
+status_t AudioSystem::getInputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *input,
+ audio_session_t session,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_input_flags_t flags)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return 0;
- return aps->getInput(inputSource, samplingRate, format, channelMask, sessionId);
+ if (aps == 0) return NO_INIT;
+ return aps->getInputForAttr(attr, input, session, samplingRate, format, channelMask, flags);
}
-status_t AudioSystem::startInput(audio_io_handle_t input)
+status_t AudioSystem::startInput(audio_io_handle_t input,
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- return aps->startInput(input);
+ return aps->startInput(input, session);
}
-status_t AudioSystem::stopInput(audio_io_handle_t input)
+status_t AudioSystem::stopInput(audio_io_handle_t input,
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return PERMISSION_DENIED;
- return aps->stopInput(input);
+ return aps->stopInput(input, session);
}
-void AudioSystem::releaseInput(audio_io_handle_t input)
+void AudioSystem::releaseInput(audio_io_handle_t input,
+ audio_session_t session)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
if (aps == 0) return;
- aps->releaseInput(input);
+ aps->releaseInput(input, session);
}
status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
@@ -702,14 +767,15 @@ uint32_t AudioSystem::getStrategyForStream(audio_stream_type_t stream)
audio_devices_t AudioSystem::getDevicesForStream(audio_stream_type_t stream)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return (audio_devices_t)0;
+ if (aps == 0) return AUDIO_DEVICE_NONE;
return aps->getDevicesForStream(stream);
}
audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ // FIXME change return type to status_t, and return PERMISSION_DENIED here
+ if (aps == 0) return AUDIO_IO_HANDLE_NONE;
return aps->getOutputForEffect(desc);
}
@@ -789,9 +855,21 @@ status_t AudioSystem::setLowRamDevice(bool isLowRamDevice)
void AudioSystem::clearAudioConfigCache()
{
- Mutex::Autolock _l(gLock);
+ // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances
ALOGV("clearAudioConfigCache()");
- gOutputs.clear();
+ {
+ Mutex::Autolock _l(gLockCache);
+ gOutputs.clear();
+ }
+ {
+ Mutex::Autolock _l(gLock);
+ gAudioFlinger.clear();
+ }
+ {
+ Mutex::Autolock _l(gLockAPS);
+ gAudioPolicyService.clear();
+ }
+ // Do not clear gAudioPortCallback
}
bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
@@ -802,13 +880,123 @@ bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
return aps->isOffloadSupported(info);
}
+status_t AudioSystem::listAudioPorts(audio_port_role_t role,
+ audio_port_type_t type,
+ unsigned int *num_ports,
+ struct audio_port *ports,
+ unsigned int *generation)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->listAudioPorts(role, type, num_ports, ports, generation);
+}
+
+status_t AudioSystem::getAudioPort(struct audio_port *port)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->getAudioPort(port);
+}
+
+status_t AudioSystem::createAudioPatch(const struct audio_patch *patch,
+ audio_patch_handle_t *handle)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->createAudioPatch(patch, handle);
+}
+
+status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->releaseAudioPatch(handle);
+}
+
+status_t AudioSystem::listAudioPatches(unsigned int *num_patches,
+ struct audio_patch *patches,
+ unsigned int *generation)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->listAudioPatches(num_patches, patches, generation);
+}
+
+status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setAudioPortConfig(config);
+}
+
+void AudioSystem::setAudioPortCallback(sp<AudioPortCallback> callBack)
+{
+ Mutex::Autolock _l(gLockAPC);
+ gAudioPortCallback = callBack;
+}
+
+status_t AudioSystem::acquireSoundTriggerSession(audio_session_t *session,
+ audio_io_handle_t *ioHandle,
+ audio_devices_t *device)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->acquireSoundTriggerSession(session, ioHandle, device);
+}
+
+status_t AudioSystem::releaseSoundTriggerSession(audio_session_t session)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->releaseSoundTriggerSession(session);
+}
+
+audio_mode_t AudioSystem::getPhoneState()
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return AUDIO_MODE_INVALID;
+ return aps->getPhoneState();
+}
+
+status_t AudioSystem::registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->registerPolicyMixes(mixes, registration);
+}
+
// ---------------------------------------------------------------------------
-void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
- Mutex::Autolock _l(AudioSystem::gLock);
- AudioSystem::gAudioPolicyService.clear();
+void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
+{
+ {
+ Mutex::Autolock _l(gLockAPC);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onServiceDied();
+ }
+ }
+ {
+ Mutex::Autolock _l(gLockAPS);
+ AudioSystem::gAudioPolicyService.clear();
+ }
ALOGW("AudioPolicyService server died!");
}
+void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
+{
+ Mutex::Autolock _l(gLockAPC);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onAudioPortListUpdate();
+ }
+}
+
+void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
+{
+ Mutex::Autolock _l(gLockAPC);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onAudioPatchListUpdate();
+ }
+}
+
}; // namespace android
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index a9d6993..735db5c 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -15,17 +15,21 @@
** limitations under the License.
*/
-
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioTrack"
+#include <inttypes.h>
+#include <math.h>
#include <sys/resource.h>
+
#include <audio_utils/primitives.h>
#include <binder/IPCThreadState.h>
#include <media/AudioTrack.h>
#include <utils/Log.h>
#include <private/media/AudioTrackShared.h>
#include <media/IAudioFlinger.h>
+#include <media/AudioPolicyHelper.h>
+#include <media/AudioResamplerPublic.h>
#define WAIT_PERIOD_MS 10
#define WAIT_STREAM_END_TIMEOUT_SEC 120
@@ -34,6 +38,19 @@
namespace android {
// ---------------------------------------------------------------------------
+static int64_t convertTimespecToUs(const struct timespec &tv)
+{
+ return tv.tv_sec * 1000000ll + tv.tv_nsec / 1000;
+}
+
+// current monotonic time in microseconds.
+static int64_t getNowUs()
+{
+ struct timespec tv;
+ (void) clock_gettime(CLOCK_MONOTONIC, &tv);
+ return convertTimespecToUs(tv);
+}
+
// static
status_t AudioTrack::getMinFrameCount(
size_t* frameCount,
@@ -44,9 +61,6 @@ status_t AudioTrack::getMinFrameCount(
return BAD_VALUE;
}
- // default to 0 in case of error
- *frameCount = 0;
-
// FIXME merge with similar code in createTrack_l(), except we're missing
// some information here that is available in createTrack_l():
// audio_io_handle_t output
@@ -54,16 +68,26 @@ status_t AudioTrack::getMinFrameCount(
// audio_channel_mask_t channelMask
// audio_output_flags_t flags
uint32_t afSampleRate;
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
- return NO_INIT;
+ status_t status;
+ status = AudioSystem::getOutputSamplingRate(&afSampleRate, streamType);
+ if (status != NO_ERROR) {
+ ALOGE("Unable to query output sample rate for stream type %d; status %d",
+ streamType, status);
+ return status;
}
size_t afFrameCount;
- if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) {
- return NO_INIT;
+ status = AudioSystem::getOutputFrameCount(&afFrameCount, streamType);
+ if (status != NO_ERROR) {
+ ALOGE("Unable to query output frame count for stream type %d; status %d",
+ streamType, status);
+ return status;
}
uint32_t afLatency;
- if (AudioSystem::getOutputLatency(&afLatency, streamType) != NO_ERROR) {
- return NO_INIT;
+ status = AudioSystem::getOutputLatency(&afLatency, streamType);
+ if (status != NO_ERROR) {
+ ALOGE("Unable to query output latency for stream type %d; status %d",
+ streamType, status);
+ return status;
}
// Ensure that buffer depth covers at least audio hardware latency
@@ -73,8 +97,15 @@ status_t AudioTrack::getMinFrameCount(
}
*frameCount = (sampleRate == 0) ? afFrameCount * minBufCount :
- afFrameCount * minBufCount * sampleRate / afSampleRate;
- ALOGV("getMinFrameCount=%d: afFrameCount=%d, minBufCount=%d, afSampleRate=%d, afLatency=%d",
+ afFrameCount * minBufCount * uint64_t(sampleRate) / afSampleRate;
+ // The formula above should always produce a non-zero value, but return an error
+ // in the unlikely event that it does not, as that's part of the API contract.
+ if (*frameCount == 0) {
+ ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %d",
+ streamType, sampleRate);
+ return BAD_VALUE;
+ }
+ ALOGV("getMinFrameCount=%zu: afFrameCount=%zu, minBufCount=%d, afSampleRate=%d, afLatency=%d",
*frameCount, afFrameCount, minBufCount, afSampleRate, afLatency);
return NO_ERROR;
}
@@ -85,8 +116,13 @@ AudioTrack::AudioTrack()
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT)
+ mPreviousSchedulingGroup(SP_DEFAULT),
+ mPausedPosition(0)
{
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
+ mAttributes.usage = AUDIO_USAGE_UNKNOWN;
+ mAttributes.flags = 0x0;
+ strcpy(mAttributes.tags, "");
}
AudioTrack::AudioTrack(
@@ -94,24 +130,27 @@ AudioTrack::AudioTrack(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCount,
+ size_t frameCount,
audio_output_flags_t flags,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid)
+ int uid,
+ pid_t pid,
+ const audio_attributes_t* pAttributes)
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT)
+ mPreviousSchedulingGroup(SP_DEFAULT),
+ mPausedPosition(0)
{
mStatus = set(streamType, sampleRate, format, channelMask,
frameCount, flags, cbf, user, notificationFrames,
0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
- offloadInfo, uid);
+ offloadInfo, uid, pid, pAttributes);
}
AudioTrack::AudioTrack(
@@ -123,19 +162,23 @@ AudioTrack::AudioTrack(
audio_output_flags_t flags,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid)
+ int uid,
+ pid_t pid,
+ const audio_attributes_t* pAttributes)
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
- mPreviousSchedulingGroup(SP_DEFAULT)
+ mPreviousSchedulingGroup(SP_DEFAULT),
+ mPausedPosition(0)
{
mStatus = set(streamType, sampleRate, format, channelMask,
0 /*frameCount*/, flags, cbf, user, notificationFrames,
- sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo, uid);
+ sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
+ uid, pid, pAttributes);
}
AudioTrack::~AudioTrack()
@@ -151,10 +194,14 @@ AudioTrack::~AudioTrack()
mAudioTrackThread->requestExitAndWait();
mAudioTrackThread.clear();
}
- mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
+ IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
mAudioTrack.clear();
+ mCblkMemory.clear();
+ mSharedBuffer.clear();
IPCThreadState::self()->flushCommands();
- AudioSystem::releaseAudioSessionId(mSessionId);
+ ALOGV("~AudioTrack, releasing session id from %d on behalf of %d",
+ IPCThreadState::self()->getCallingPid(), mClientPid);
+ AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
}
}
@@ -163,18 +210,25 @@ status_t AudioTrack::set(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCountInt,
+ size_t frameCount,
audio_output_flags_t flags,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
const sp<IMemory>& sharedBuffer,
bool threadCanCallJava,
int sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid)
+ int uid,
+ pid_t pid,
+ const audio_attributes_t* pAttributes)
{
+ ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
+ "flags #%x, notificationFrames %u, sessionId %d, transferType %d",
+ streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
+ sessionId, transferType);
+
switch (transferType) {
case TRANSFER_DEFAULT:
if (sharedBuffer != 0) {
@@ -208,19 +262,13 @@ status_t AudioTrack::set(
ALOGE("Invalid transfer type %d", transferType);
return BAD_VALUE;
}
+ mSharedBuffer = sharedBuffer;
mTransfer = transferType;
- // FIXME "int" here is legacy and will be replaced by size_t later
- if (frameCountInt < 0) {
- ALOGE("Invalid frame count %d", frameCountInt);
- return BAD_VALUE;
- }
- size_t frameCount = frameCountInt;
-
ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(),
sharedBuffer->size());
- ALOGV("set() streamType %d frameCount %u flags %04x", streamType, frameCount, flags);
+ ALOGV("set() streamType %d frameCount %zu flags %04x", streamType, frameCount, flags);
AutoMutex lock(mLock);
@@ -230,35 +278,44 @@ status_t AudioTrack::set(
return INVALID_OPERATION;
}
- mOutput = 0;
-
// handle default values first.
if (streamType == AUDIO_STREAM_DEFAULT) {
streamType = AUDIO_STREAM_MUSIC;
}
-
- if (sampleRate == 0) {
- uint32_t afSampleRate;
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
- return NO_INIT;
+ if (pAttributes == NULL) {
+ if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
+ ALOGE("Invalid stream type %d", streamType);
+ return BAD_VALUE;
}
- sampleRate = afSampleRate;
+ mStreamType = streamType;
+
+ } else {
+ // stream type shouldn't be looked at, this track has audio attributes
+ memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
+ ALOGV("Building AudioTrack with attributes: usage=%d content=%d flags=0x%x tags=[%s]",
+ mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);
+ mStreamType = AUDIO_STREAM_DEFAULT;
}
- mSampleRate = sampleRate;
// these below should probably come from the audioFlinger too...
if (format == AUDIO_FORMAT_DEFAULT) {
format = AUDIO_FORMAT_PCM_16_BIT;
}
- if (channelMask == 0) {
- channelMask = AUDIO_CHANNEL_OUT_STEREO;
- }
// validate parameters
if (!audio_is_valid_format(format)) {
- ALOGE("Invalid format %d", format);
+ ALOGE("Invalid format %#x", format);
return BAD_VALUE;
}
+ mFormat = format;
+
+ if (!audio_is_output_channel(channelMask)) {
+ ALOGE("Invalid channel mask %#x", channelMask);
+ return BAD_VALUE;
+ }
+ mChannelMask = channelMask;
+ uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+ mChannelCount = channelCount;
// AudioFlinger does not currently support 8-bit data in shared memory
if (format == AUDIO_FORMAT_PCM_8_BIT && sharedBuffer != 0) {
@@ -277,51 +334,68 @@ status_t AudioTrack::set(
// FIXME why can't we allow direct AND fast?
((flags | AUDIO_OUTPUT_FLAG_DIRECT) & ~AUDIO_OUTPUT_FLAG_FAST);
}
- // only allow deep buffering for music stream type
- if (streamType != AUDIO_STREAM_MUSIC) {
- flags = (audio_output_flags_t)(flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
- }
- if (!audio_is_output_channel(channelMask)) {
- ALOGE("Invalid channel mask %#x", channelMask);
- return BAD_VALUE;
+ // force direct flag if HW A/V sync requested
+ if ((flags & AUDIO_OUTPUT_FLAG_HW_AV_SYNC) != 0) {
+ flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_DIRECT);
}
- mChannelMask = channelMask;
- uint32_t channelCount = popcount(channelMask);
- mChannelCount = channelCount;
- if (audio_is_linear_pcm(format)) {
- mFrameSize = channelCount * audio_bytes_per_sample(format);
- mFrameSizeAF = channelCount * sizeof(int16_t);
+ if (flags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ if (audio_is_linear_pcm(format)) {
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ } else {
+ mFrameSize = sizeof(uint8_t);
+ }
+ mFrameSizeAF = mFrameSize;
} else {
- mFrameSize = sizeof(uint8_t);
- mFrameSizeAF = sizeof(uint8_t);
+ ALOG_ASSERT(audio_is_linear_pcm(format));
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ mFrameSizeAF = channelCount * audio_bytes_per_sample(
+ format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format);
+ // createTrack will return an error if PCM format is not supported by server,
+ // so no need to check for specific PCM formats here
}
- audio_io_handle_t output = AudioSystem::getOutput(
- streamType,
- sampleRate, format, channelMask,
- flags,
- offloadInfo);
-
- if (output == 0) {
- ALOGE("Could not get audio output for stream type %d", streamType);
+ // sampling rate must be specified for direct outputs
+ if (sampleRate == 0 && (flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
return BAD_VALUE;
}
+ mSampleRate = sampleRate;
+
+ // Make copy of input parameter offloadInfo so that in the future:
+ // (a) createTrack_l doesn't need it as an input parameter
+ // (b) we can support re-creation of offloaded tracks
+ if (offloadInfo != NULL) {
+ mOffloadInfoCopy = *offloadInfo;
+ mOffloadInfo = &mOffloadInfoCopy;
+ } else {
+ mOffloadInfo = NULL;
+ }
- mVolume[LEFT] = 1.0f;
- mVolume[RIGHT] = 1.0f;
+ mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
+ mVolume[AUDIO_INTERLEAVE_RIGHT] = 1.0f;
mSendLevel = 0.0f;
- mFrameCount = frameCount;
+ // mFrameCount is initialized in createTrack_l
mReqFrameCount = frameCount;
mNotificationFramesReq = notificationFrames;
mNotificationFramesAct = 0;
- mSessionId = sessionId;
- if (uid == -1 || (IPCThreadState::self()->getCallingPid() != getpid())) {
+ if (sessionId == AUDIO_SESSION_ALLOCATE) {
+ mSessionId = AudioSystem::newAudioUniqueId();
+ } else {
+ mSessionId = sessionId;
+ }
+ int callingpid = IPCThreadState::self()->getCallingPid();
+ int mypid = getpid();
+ if (uid == -1 || (callingpid != mypid)) {
mClientUid = IPCThreadState::self()->getCallingUid();
} else {
mClientUid = uid;
}
+ if (pid == -1 || (callingpid != mypid)) {
+ mClientPid = callingpid;
+ } else {
+ mClientPid = pid;
+ }
mAuxEffectId = 0;
mFlags = flags;
mCbf = cbf;
@@ -332,14 +406,7 @@ status_t AudioTrack::set(
}
// create the IAudioTrack
- status_t status = createTrack_l(streamType,
- sampleRate,
- format,
- frameCount,
- flags,
- sharedBuffer,
- output,
- 0 /*epoch*/);
+ status_t status = createTrack_l();
if (status != NO_ERROR) {
if (mAudioTrackThread != 0) {
@@ -347,17 +414,10 @@ status_t AudioTrack::set(
mAudioTrackThread->requestExitAndWait();
mAudioTrackThread.clear();
}
- //Use of direct and offloaded output streams is ref counted by audio policy manager.
- // As getOutput was called above and resulted in an output stream to be opened,
- // we need to release it.
- AudioSystem::releaseOutput(output);
return status;
}
mStatus = NO_ERROR;
- mStreamType = streamType;
- mFormat = format;
- mSharedBuffer = sharedBuffer;
mState = STATE_STOPPED;
mUserData = user;
mLoopPeriod = 0;
@@ -365,11 +425,14 @@ status_t AudioTrack::set(
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
- AudioSystem::acquireAudioSessionId(mSessionId);
+ mServer = 0;
+ mPosition = 0;
+ mReleased = 0;
+ mStartUs = 0;
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
mSequence = 1;
mObservedSequence = mSequence;
mInUnderrun = false;
- mOutput = output;
return NO_ERROR;
}
@@ -392,14 +455,21 @@ status_t AudioTrack::start()
} else {
mState = STATE_ACTIVE;
}
+ (void) updateAndGetPosition_l();
if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) {
// reset current position as seen by client to 0
- mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
+ mPosition = 0;
+ // For offloaded tracks, we don't know if the hardware counters are really zero here,
+ // since the flush is asynchronous and stop may not fully drain.
+ // We save the time when the track is started to later verify whether
+ // the counters are realistic (i.e. start from zero after this time).
+ mStartUs = getNowUs();
+
// force refresh of remaining frames by processAudioBuffer() as last
// write before stop could be partial.
mRefreshRemaining = true;
}
- mNewPosition = mProxy->getPosition() + mUpdatePeriod;
+ mNewPosition = mPosition + mUpdatePeriod;
int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->mFlags);
sp<AudioTrackThread> t = mAudioTrackThread;
@@ -445,15 +515,15 @@ status_t AudioTrack::start()
void AudioTrack::stop()
{
AutoMutex lock(mLock);
- // FIXME pause then stop should not be a nop
- if (mState != STATE_ACTIVE) {
+ if (mState != STATE_ACTIVE && mState != STATE_PAUSED) {
return;
}
- if (isOffloaded()) {
+ if (isOffloaded_l()) {
mState = STATE_STOPPING;
} else {
mState = STATE_STOPPED;
+ mReleased = 0;
}
mProxy->interrupt();
@@ -472,7 +542,7 @@ void AudioTrack::stop()
sp<AudioTrackThread> t = mAudioTrackThread;
if (t != 0) {
- if (!isOffloaded()) {
+ if (!isOffloaded_l()) {
t->pause();
}
} else {
@@ -510,7 +580,8 @@ void AudioTrack::flush_l()
mRefreshRemaining = true;
mState = STATE_FLUSHED;
- if (isOffloaded()) {
+ mReleased = 0;
+ if (isOffloaded_l()) {
mProxy->interrupt();
}
mProxy->flush();
@@ -529,21 +600,42 @@ void AudioTrack::pause()
}
mProxy->interrupt();
mAudioTrack->pause();
+
+ if (isOffloaded_l()) {
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
+ // An offload output can be re-used between two audio tracks having
+ // the same configuration. A timestamp query for a paused track
+ // while the other is running would return an incorrect time.
+ // To fix this, cache the playback position on a pause() and return
+ // this time when requested until the track is resumed.
+
+ // OffloadThread sends HAL pause in its threadLoop. Time saved
+ // here can be slightly off.
+
+ // TODO: check return code for getRenderPosition.
+
+ uint32_t halFrames;
+ AudioSystem::getRenderPosition(mOutput, &halFrames, &mPausedPosition);
+ ALOGV("AudioTrack::pause for offload, cache current position %u", mPausedPosition);
+ }
+ }
}
status_t AudioTrack::setVolume(float left, float right)
{
- if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) {
+ // This duplicates a test by AudioTrack JNI, but that is not the only caller
+ if (isnanf(left) || left < GAIN_FLOAT_ZERO || left > GAIN_FLOAT_UNITY ||
+ isnanf(right) || right < GAIN_FLOAT_ZERO || right > GAIN_FLOAT_UNITY) {
return BAD_VALUE;
}
AutoMutex lock(mLock);
- mVolume[LEFT] = left;
- mVolume[RIGHT] = right;
+ mVolume[AUDIO_INTERLEAVE_LEFT] = left;
+ mVolume[AUDIO_INTERLEAVE_RIGHT] = right;
- mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000));
+ mProxy->setVolumeLR(gain_minifloat_pack(gain_from_float(left), gain_from_float(right)));
- if (isOffloaded()) {
+ if (isOffloaded_l()) {
mAudioTrack->signal();
}
return NO_ERROR;
@@ -556,7 +648,8 @@ status_t AudioTrack::setVolume(float volume)
status_t AudioTrack::setAuxEffectSendLevel(float level)
{
- if (level < 0.0f || level > 1.0f) {
+ // This duplicates a test by AudioTrack JNI, but that is not the only caller
+ if (isnanf(level) || level < GAIN_FLOAT_ZERO || level > GAIN_FLOAT_UNITY) {
return BAD_VALUE;
}
@@ -576,20 +669,22 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const
status_t AudioTrack::setSampleRate(uint32_t rate)
{
- if (mIsTimed || isOffloaded()) {
+ if (mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
+ AutoMutex lock(mLock);
+ if (mOutput == AUDIO_IO_HANDLE_NONE) {
+ return NO_INIT;
+ }
uint32_t afSamplingRate;
- if (AudioSystem::getOutputSamplingRate(&afSamplingRate, mStreamType) != NO_ERROR) {
+ if (AudioSystem::getSamplingRate(mOutput, &afSamplingRate) != NO_ERROR) {
return NO_INIT;
}
- // Resampler implementation limits input sampling rate to 2 x output sampling rate.
- if (rate == 0 || rate > afSamplingRate*2 ) {
+ if (rate == 0 || rate > afSamplingRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) {
return BAD_VALUE;
}
- AutoMutex lock(mLock);
mSampleRate = rate;
mProxy->setSampleRate(rate);
@@ -607,10 +702,10 @@ uint32_t AudioTrack::getSampleRate() const
// sample rate can be updated during playback by the offloaded decoder so we need to
// query the HAL and update if needed.
// FIXME use Proxy return channel to update the rate from server and avoid polling here
- if (isOffloaded()) {
- if (mOutput != 0) {
+ if (isOffloadedOrDirect_l()) {
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
uint32_t sampleRate = 0;
- status_t status = AudioSystem::getSamplingRate(mOutput, mStreamType, &sampleRate);
+ status_t status = AudioSystem::getSamplingRate(mOutput, &sampleRate);
if (status == NO_ERROR) {
mSampleRate = sampleRate;
}
@@ -621,7 +716,7 @@ uint32_t AudioTrack::getSampleRate() const
status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
{
- if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) {
+ if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
@@ -645,9 +740,8 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount
void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)
{
- // FIXME If setting a loop also sets position to start of loop, then
- // this is correct. Otherwise it should be removed.
- mNewPosition = mProxy->getPosition() + mUpdatePeriod;
+ // Setting the loop will reset next notification update period (like setPosition).
+ mNewPosition = updateAndGetPosition_l() + mUpdatePeriod;
mLoopPeriod = loopCount != 0 ? loopEnd - loopStart : 0;
mStaticProxy->setLoop(loopStart, loopEnd, loopCount);
}
@@ -655,7 +749,7 @@ void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)
status_t AudioTrack::setMarkerPosition(uint32_t marker)
{
// The only purpose of setting marker position is to get a callback
- if (mCbf == NULL || isOffloaded()) {
+ if (mCbf == NULL || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
@@ -668,7 +762,7 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker)
status_t AudioTrack::getMarkerPosition(uint32_t *marker) const
{
- if (isOffloaded()) {
+ if (isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
if (marker == NULL) {
@@ -684,19 +778,20 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const
status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod)
{
// The only purpose of setting position update period is to get a callback
- if (mCbf == NULL || isOffloaded()) {
+ if (mCbf == NULL || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
AutoMutex lock(mLock);
- mNewPosition = mProxy->getPosition() + updatePeriod;
+ mNewPosition = updateAndGetPosition_l() + updatePeriod;
mUpdatePeriod = updatePeriod;
+
return NO_ERROR;
}
status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const
{
- if (isOffloaded()) {
+ if (isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
if (updatePeriod == NULL) {
@@ -711,7 +806,7 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const
status_t AudioTrack::setPosition(uint32_t position)
{
- if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) {
+ if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
if (position > mFrameCount) {
@@ -728,7 +823,7 @@ status_t AudioTrack::setPosition(uint32_t position)
if (mState == STATE_ACTIVE) {
return INVALID_OPERATION;
}
- mNewPosition = mProxy->getPosition() + mUpdatePeriod;
+ mNewPosition = updateAndGetPosition_l() + mUpdatePeriod;
mLoopPeriod = 0;
// FIXME Check whether loops and setting position are incompatible in old code.
// If we use setLoop for both purposes we lose the capability to set the position while looping.
@@ -737,25 +832,37 @@ status_t AudioTrack::setPosition(uint32_t position)
return NO_ERROR;
}
-status_t AudioTrack::getPosition(uint32_t *position) const
+status_t AudioTrack::getPosition(uint32_t *position)
{
if (position == NULL) {
return BAD_VALUE;
}
AutoMutex lock(mLock);
- if (isOffloaded()) {
+ if (isOffloadedOrDirect_l()) {
uint32_t dspFrames = 0;
- if (mOutput != 0) {
+ if (isOffloaded_l() && ((mState == STATE_PAUSED) || (mState == STATE_PAUSED_STOPPING))) {
+ ALOGV("getPosition called in paused state, return cached position %u", mPausedPosition);
+ *position = mPausedPosition;
+ return NO_ERROR;
+ }
+
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
uint32_t halFrames;
AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
}
+ // FIXME: dspFrames may not be zero in (mState == STATE_STOPPED || mState == STATE_FLUSHED)
+ // due to hardware latency. We leave this behavior for now.
*position = dspFrames;
} else {
+ if (mCblk->mFlags & CBLK_INVALID) {
+ restoreTrack_l("getPosition");
+ }
+
// IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
- *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ? 0 :
- mProxy->getPosition();
+ *position = (mState == STATE_STOPPED || mState == STATE_FLUSHED) ?
+ 0 : updateAndGetPosition_l();
}
return NO_ERROR;
}
@@ -776,7 +883,7 @@ status_t AudioTrack::getBufferPosition(uint32_t *position)
status_t AudioTrack::reload()
{
- if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) {
+ if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
@@ -793,23 +900,12 @@ status_t AudioTrack::reload()
return NO_ERROR;
}
-audio_io_handle_t AudioTrack::getOutput()
+audio_io_handle_t AudioTrack::getOutput() const
{
AutoMutex lock(mLock);
return mOutput;
}
-// must be called with mLock held
-audio_io_handle_t AudioTrack::getOutput_l()
-{
- if (mOutput) {
- return mOutput;
- } else {
- return AudioSystem::getOutput(mStreamType,
- mSampleRate, mFormat, mChannelMask, mFlags);
- }
-}
-
status_t AudioTrack::attachAuxEffect(int effectId)
{
AutoMutex lock(mLock);
@@ -820,70 +916,85 @@ status_t AudioTrack::attachAuxEffect(int effectId)
return status;
}
+audio_stream_type_t AudioTrack::streamType() const
+{
+ if (mStreamType == AUDIO_STREAM_DEFAULT) {
+ return audio_attributes_to_stream_type(&mAttributes);
+ }
+ return mStreamType;
+}
+
// -------------------------------------------------------------------------
// must be called with mLock held
-status_t AudioTrack::createTrack_l(
- audio_stream_type_t streamType,
- uint32_t sampleRate,
- audio_format_t format,
- size_t frameCount,
- audio_output_flags_t flags,
- const sp<IMemory>& sharedBuffer,
- audio_io_handle_t output,
- size_t epoch)
+status_t AudioTrack::createTrack_l()
{
- status_t status;
const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
if (audioFlinger == 0) {
ALOGE("Could not get audioflinger");
return NO_INIT;
}
+ audio_io_handle_t output;
+ audio_stream_type_t streamType = mStreamType;
+ audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL;
+ status_t status = AudioSystem::getOutputForAttr(attr, &output,
+ (audio_session_t)mSessionId, &streamType,
+ mSampleRate, mFormat, mChannelMask,
+ mFlags, mOffloadInfo);
+
+
+ if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) {
+ ALOGE("Could not get audio output for stream type %d, usage %d, sample rate %u, format %#x,"
+ " channel mask %#x, flags %#x",
+ streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);
+ return BAD_VALUE;
+ }
+ {
+ // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
+ // we must release it ourselves if anything goes wrong.
+
// Not all of these values are needed under all conditions, but it is easier to get them all
uint32_t afLatency;
- status = AudioSystem::getLatency(output, streamType, &afLatency);
+ status = AudioSystem::getLatency(output, &afLatency);
if (status != NO_ERROR) {
ALOGE("getLatency(%d) failed status %d", output, status);
- return NO_INIT;
+ goto release;
}
size_t afFrameCount;
- status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
+ status = AudioSystem::getFrameCount(output, &afFrameCount);
if (status != NO_ERROR) {
- ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
- return NO_INIT;
+ ALOGE("getFrameCount(output=%d) status %d", output, status);
+ goto release;
}
uint32_t afSampleRate;
- status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
+ status = AudioSystem::getSamplingRate(output, &afSampleRate);
if (status != NO_ERROR) {
- ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, status);
- return NO_INIT;
+ ALOGE("getSamplingRate(output=%d) status %d", output, status);
+ goto release;
+ }
+ if (mSampleRate == 0) {
+ mSampleRate = afSampleRate;
}
-
// Client decides whether the track is TIMED (see below), but can only express a preference
// for FAST. Server will perform additional tests.
- if ((flags & AUDIO_OUTPUT_FLAG_FAST) && !(
+ if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) && !((
// either of these use cases:
// use case 1: shared buffer
- (sharedBuffer != 0) ||
- // use case 2: callback handler
- (mCbf != NULL))) {
+ (mSharedBuffer != 0) ||
+ // use case 2: callback transfer mode
+ (mTransfer == TRANSFER_CALLBACK)) &&
+ // matching sample rate
+ (mSampleRate == afSampleRate))) {
ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client");
// once denied, do not request again if IAudioTrack is re-created
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
- mFlags = flags;
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
}
ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);
- if ((flags & AUDIO_OUTPUT_FLAG_FAST) && sampleRate != afSampleRate) {
- ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client due to mismatching sample rate (%d vs %d)",
- sampleRate, afSampleRate);
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
- }
-
// The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
// n = 1 fast track with single buffering; nBuffering is ignored
// n = 2 fast track with double buffering
@@ -891,64 +1002,70 @@ status_t AudioTrack::createTrack_l(
// n = 3 normal track, with sample rate conversion
// (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering)
// n > 3 very high latency or very small notification interval; nBuffering is ignored
- const uint32_t nBuffering = (sampleRate == afSampleRate) ? 2 : 3;
+ const uint32_t nBuffering = (mSampleRate == afSampleRate) ? 2 : 3;
mNotificationFramesAct = mNotificationFramesReq;
- if (!audio_is_linear_pcm(format)) {
+ size_t frameCount = mReqFrameCount;
+ if (!audio_is_linear_pcm(mFormat)) {
- if (sharedBuffer != 0) {
+ if (mSharedBuffer != 0) {
// Same comment as below about ignoring frameCount parameter for set()
- frameCount = sharedBuffer->size();
+ frameCount = mSharedBuffer->size();
} else if (frameCount == 0) {
frameCount = afFrameCount;
}
if (mNotificationFramesAct != frameCount) {
mNotificationFramesAct = frameCount;
}
- } else if (sharedBuffer != 0) {
+ } else if (mSharedBuffer != 0) {
// Ensure that buffer alignment matches channel count
// 8-bit data in shared memory is not currently supported by AudioFlinger
- size_t alignment = /* format == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2;
+ size_t alignment = audio_bytes_per_sample(
+ mFormat == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : mFormat);
+ if (alignment & 1) {
+ alignment = 1;
+ }
if (mChannelCount > 1) {
// More than 2 channels does not require stronger alignment than stereo
alignment <<= 1;
}
- if (((uintptr_t)sharedBuffer->pointer() & (alignment - 1)) != 0) {
+ if (((uintptr_t)mSharedBuffer->pointer() & (alignment - 1)) != 0) {
ALOGE("Invalid buffer alignment: address %p, channel count %u",
- sharedBuffer->pointer(), mChannelCount);
- return BAD_VALUE;
+ mSharedBuffer->pointer(), mChannelCount);
+ status = BAD_VALUE;
+ goto release;
}
// When initializing a shared buffer AudioTrack via constructors,
// there's no frameCount parameter.
// But when initializing a shared buffer AudioTrack via set(),
// there _is_ a frameCount parameter. We silently ignore it.
- frameCount = sharedBuffer->size()/mChannelCount/sizeof(int16_t);
+ frameCount = mSharedBuffer->size() / mFrameSizeAF;
- } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
+ } else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
// FIXME move these calculations and associated checks to server
// Ensure that buffer depth covers at least audio hardware latency
uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
- ALOGV("afFrameCount=%d, minBufCount=%d, afSampleRate=%u, afLatency=%d",
+ ALOGV("afFrameCount=%zu, minBufCount=%d, afSampleRate=%u, afLatency=%d",
afFrameCount, minBufCount, afSampleRate, afLatency);
if (minBufCount <= nBuffering) {
minBufCount = nBuffering;
}
- size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
- ALOGV("minFrameCount: %u, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u"
+ size_t minFrameCount = afFrameCount * minBufCount * uint64_t(mSampleRate) / afSampleRate;
+ ALOGV("minFrameCount: %zu, afFrameCount=%zu, minBufCount=%d, sampleRate=%u, afSampleRate=%u"
", afLatency=%d",
- minFrameCount, afFrameCount, minBufCount, sampleRate, afSampleRate, afLatency);
+ minFrameCount, afFrameCount, minBufCount, mSampleRate, afSampleRate, afLatency);
if (frameCount == 0) {
frameCount = minFrameCount;
} else if (frameCount < minFrameCount) {
// not ALOGW because it happens all the time when playing key clicks over A2DP
- ALOGV("Minimum buffer size corrected from %d to %d",
+ ALOGV("Minimum buffer size corrected from %zu to %zu",
frameCount, minFrameCount);
frameCount = minFrameCount;
}
@@ -967,65 +1084,84 @@ status_t AudioTrack::createTrack_l(
}
pid_t tid = -1;
- if (flags & AUDIO_OUTPUT_FLAG_FAST) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
trackFlags |= IAudioFlinger::TRACK_FAST;
if (mAudioTrackThread != 0) {
tid = mAudioTrackThread->getTid();
}
}
- if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
trackFlags |= IAudioFlinger::TRACK_OFFLOAD;
}
+ if (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ trackFlags |= IAudioFlinger::TRACK_DIRECT;
+ }
+
+ size_t temp = frameCount; // temp may be replaced by a revised value of frameCount,
+ // but we will still need the original value also
sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
- sampleRate,
+ mSampleRate,
// AudioFlinger only sees 16-bit PCM
- format == AUDIO_FORMAT_PCM_8_BIT ?
- AUDIO_FORMAT_PCM_16_BIT : format,
+ mFormat == AUDIO_FORMAT_PCM_8_BIT &&
+ !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT) ?
+ AUDIO_FORMAT_PCM_16_BIT : mFormat,
mChannelMask,
- frameCount,
+ &temp,
&trackFlags,
- sharedBuffer,
+ mSharedBuffer,
output,
tid,
&mSessionId,
- mName,
mClientUid,
&status);
- if (track == 0) {
+ if (status != NO_ERROR) {
ALOGE("AudioFlinger could not create track, status: %d", status);
- return status;
+ goto release;
}
+ ALOG_ASSERT(track != 0);
+
+ // AudioFlinger now owns the reference to the I/O handle,
+ // so we are no longer responsible for releasing it.
+
sp<IMemory> iMem = track->getCblk();
if (iMem == 0) {
ALOGE("Could not get control block");
return NO_INIT;
}
+ void *iMemPointer = iMem->pointer();
+ if (iMemPointer == NULL) {
+ ALOGE("Could not get control block pointer");
+ return NO_INIT;
+ }
// invariant that mAudioTrack != 0 is true only after set() returns successfully
if (mAudioTrack != 0) {
- mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
+ IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);
mDeathNotifier.clear();
}
mAudioTrack = track;
mCblkMemory = iMem;
- audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
+ IPCThreadState::self()->flushCommands();
+
+ audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
mCblk = cblk;
- size_t temp = cblk->frameCount_;
+ // note that temp is the (possibly revised) value of frameCount
if (temp < frameCount || (frameCount == 0 && temp == 0)) {
// In current design, AudioTrack client checks and ensures frame count validity before
// passing it to AudioFlinger so AudioFlinger should not return a different value except
// for fast track as it uses a special method of assigning frame count.
- ALOGW("Requested frameCount %u but received frameCount %u", frameCount, temp);
+ ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
}
frameCount = temp;
+
mAwaitBoost = false;
- if (flags & AUDIO_OUTPUT_FLAG_FAST) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
if (trackFlags & IAudioFlinger::TRACK_FAST) {
- ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount);
+ ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu", frameCount);
mAwaitBoost = true;
- if (sharedBuffer == 0) {
+ if (mSharedBuffer == 0) {
// Theoretically double-buffering is not required for fast tracks,
// due to tighter scheduling. But in practice, to accommodate kernels with
// scheduling jitter, and apps with computation jitter, we use double-buffering.
@@ -1034,28 +1170,39 @@ status_t AudioTrack::createTrack_l(
}
}
} else {
- ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount);
+ ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
// once denied, do not request again if IAudioTrack is re-created
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
- mFlags = flags;
- if (sharedBuffer == 0) {
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
+ if (mSharedBuffer == 0) {
if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
mNotificationFramesAct = frameCount/nBuffering;
}
}
}
}
- if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
if (trackFlags & IAudioFlinger::TRACK_OFFLOAD) {
ALOGV("AUDIO_OUTPUT_FLAG_OFFLOAD successful");
} else {
ALOGW("AUDIO_OUTPUT_FLAG_OFFLOAD denied by server");
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
- mFlags = flags;
- return NO_INIT;
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+ // FIXME This is a warning, not an error, so don't return error status
+ //return NO_INIT;
+ }
+ }
+ if (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ if (trackFlags & IAudioFlinger::TRACK_DIRECT) {
+ ALOGV("AUDIO_OUTPUT_FLAG_DIRECT successful");
+ } else {
+ ALOGW("AUDIO_OUTPUT_FLAG_DIRECT denied by server");
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+ // FIXME This is a warning, not an error, so don't return error status
+ //return NO_INIT;
}
}
+ // We retain a copy of the I/O handle, but don't own the reference
+ mOutput = output;
mRefreshRemaining = true;
// Starting address of buffers in shared memory. If there is a shared buffer, buffers
@@ -1063,15 +1210,16 @@ status_t AudioTrack::createTrack_l(
// immediately after the control block. This address is for the mapping within client
// address space. AudioFlinger::TrackBase::mBuffer is for the server address space.
void* buffers;
- if (sharedBuffer == 0) {
+ if (mSharedBuffer == 0) {
buffers = (char*)cblk + sizeof(audio_track_cblk_t);
} else {
- buffers = sharedBuffer->pointer();
+ buffers = mSharedBuffer->pointer();
}
mAudioTrack->attachAuxEffect(mAuxEffectId);
// FIXME don't believe this lie
- mLatency = afLatency + (1000*frameCount) / sampleRate;
+ mLatency = afLatency + (1000*frameCount) / mSampleRate;
+
mFrameCount = frameCount;
// If IAudioTrack is re-created, don't let the requested frameCount
// decrease. This can confuse clients that cache frameCount().
@@ -1080,24 +1228,34 @@ status_t AudioTrack::createTrack_l(
}
// update proxy
- if (sharedBuffer == 0) {
+ if (mSharedBuffer == 0) {
mStaticProxy.clear();
mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
} else {
mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
mProxy = mStaticProxy;
}
- mProxy->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) |
- uint16_t(mVolume[LEFT] * 0x1000));
+
+ mProxy->setVolumeLR(gain_minifloat_pack(
+ gain_from_float(mVolume[AUDIO_INTERLEAVE_LEFT]),
+ gain_from_float(mVolume[AUDIO_INTERLEAVE_RIGHT])));
+
mProxy->setSendLevel(mSendLevel);
mProxy->setSampleRate(mSampleRate);
- mProxy->setEpoch(epoch);
mProxy->setMinimum(mNotificationFramesAct);
mDeathNotifier = new DeathNotifier(this);
- mAudioTrack->asBinder()->linkToDeath(mDeathNotifier, this);
+ IInterface::asBinder(mAudioTrack)->linkToDeath(mDeathNotifier, this);
return NO_ERROR;
+ }
+
+release:
+ AudioSystem::releaseOutput(output, streamType, (audio_session_t)mSessionId);
+ if (status == NO_ERROR) {
+ status = NO_INIT;
+ }
+ return status;
}
status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -1113,13 +1271,13 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
}
const struct timespec *requested;
+ struct timespec timeout;
if (waitCount == -1) {
requested = &ClientProxy::kForever;
} else if (waitCount == 0) {
requested = &ClientProxy::kNonBlocking;
} else if (waitCount > 0) {
long long ms = WAIT_PERIOD_MS * (long long) waitCount;
- struct timespec timeout;
timeout.tv_sec = ms / 1000;
timeout.tv_nsec = (int) (ms % 1000) * 1000000;
requested = &timeout;
@@ -1218,6 +1376,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
buffer.mRaw = audioBuffer->raw;
AutoMutex lock(mLock);
+ mReleased += stepCount;
mInUnderrun = false;
mProxy->releaseBuffer(&buffer);
@@ -1225,8 +1384,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
if (mState == STATE_ACTIVE) {
audio_track_cblk_t* cblk = mCblk;
if (android_atomic_and(~CBLK_DISABLED, &cblk->mFlags) & CBLK_DISABLED) {
- ALOGW("releaseBuffer() track %p name=%s disabled due to previous underrun, restarting",
- this, mName.string());
+ ALOGW("releaseBuffer() track %p disabled due to previous underrun, restarting", this);
// FIXME ignoring status
mAudioTrack->start();
}
@@ -1235,12 +1393,22 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
// -------------------------------------------------------------------------
-ssize_t AudioTrack::write(const void* buffer, size_t userSize)
+ssize_t AudioTrack::write(const void* buffer, size_t userSize, bool blocking)
{
if (mTransfer != TRANSFER_SYNC || mIsTimed) {
return INVALID_OPERATION;
}
+ if (isDirect()) {
+ AutoMutex lock(mLock);
+ int32_t flags = android_atomic_and(
+ ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END),
+ &mCblk->mFlags);
+ if (flags & CBLK_INVALID) {
+ return DEAD_OBJECT;
+ }
+ }
+
if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
// Sanity-check: user is most-likely passing an error code, and it would
// make the return value ambiguous (actualSize vs error).
@@ -1254,7 +1422,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize)
while (userSize >= mFrameSize) {
audioBuffer.frameCount = userSize / mFrameSize;
- status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever);
+ status_t err = obtainBuffer(&audioBuffer,
+ blocking ? &ClientProxy::kForever : &ClientProxy::kNonBlocking);
if (err < 0) {
if (written > 0) {
break;
@@ -1350,7 +1519,7 @@ status_t TimedAudioTrack::setMediaTimeTransform(const LinearTransform& xform,
// -------------------------------------------------------------------------
-nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
+nsecs_t AudioTrack::processAudioBuffer()
{
// Currently the AudioTrack thread is not created if there are no callbacks.
// Would it ever make sense to run the thread, even without callbacks?
@@ -1388,7 +1557,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
// for offloaded tracks restoreTrack_l() will just update the sequence and clear
// AudioSystem cache. We should not exit here but after calling the callback so
// that the upper layers can recreate the track
- if (!isOffloaded() || (mSequence == mObservedSequence)) {
+ if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) {
status_t status = restoreTrack_l("processAudioBuffer");
mLock.unlock();
// Run again immediately, but with a new IAudioTrack
@@ -1420,7 +1589,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
}
// Get current position of server
- size_t position = mProxy->getPosition();
+ size_t position = updateAndGetPosition_l();
// Manage marker callback
bool markerReached = false;
@@ -1443,7 +1612,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
// Cache other fields that will be needed soon
uint32_t loopPeriod = mLoopPeriod;
uint32_t sampleRate = mSampleRate;
- size_t notificationFrames = mNotificationFramesAct;
+ uint32_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
@@ -1451,6 +1620,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
}
size_t misalignment = mProxy->getMisalignment();
uint32_t sequence = mSequence;
+ sp<AudioTrackClientProxy> proxy = mProxy;
// These fields don't need to be cached, because they are assigned only by set():
// mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFrameSizeAF, mFlags
@@ -1459,35 +1629,33 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
mLock.unlock();
if (waitStreamEnd) {
- AutoMutex lock(mLock);
-
- sp<AudioTrackClientProxy> proxy = mProxy;
- sp<IMemory> iMem = mCblkMemory;
-
struct timespec timeout;
timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC;
timeout.tv_nsec = 0;
- mLock.unlock();
- status_t status = mProxy->waitStreamEndDone(&timeout);
- mLock.lock();
+ status_t status = proxy->waitStreamEndDone(&timeout);
switch (status) {
case NO_ERROR:
case DEAD_OBJECT:
case TIMED_OUT:
- mLock.unlock();
mCbf(EVENT_STREAM_END, mUserData, NULL);
- mLock.lock();
- if (mState == STATE_STOPPING) {
- mState = STATE_STOPPED;
- if (status != DEAD_OBJECT) {
- return NS_INACTIVE;
+ {
+ AutoMutex lock(mLock);
+ // The previously assigned value of waitStreamEnd is no longer valid,
+ // since the mutex has been unlocked and either the callback handler
+ // or another thread could have re-started the AudioTrack during that time.
+ waitStreamEnd = mState == STATE_STOPPING;
+ if (waitStreamEnd) {
+ mState = STATE_STOPPED;
+ mReleased = 0;
}
}
- return 0;
- default:
- return 0;
+ if (waitStreamEnd && status != DEAD_OBJECT) {
+ return NS_INACTIVE;
+ }
+ break;
}
+ return 0;
}
// perform callbacks while unlocked
@@ -1516,7 +1684,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
mObservedSequence = sequence;
mCbf(EVENT_NEW_IAUDIOTRACK, mUserData, NULL);
// for offloaded tracks, just wait for the upper layers to recreate the track
- if (isOffloaded()) {
+ if (isOffloadedOrDirect()) {
return NS_INACTIVE;
}
}
@@ -1574,10 +1742,10 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
size_t nonContig;
status_t err = obtainBuffer(&audioBuffer, requested, NULL, &nonContig);
LOG_ALWAYS_FATAL_IF((err != NO_ERROR) != (audioBuffer.frameCount == 0),
- "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount);
+ "obtainBuffer() err=%d frameCount=%zu", err, audioBuffer.frameCount);
requested = &ClientProxy::kNonBlocking;
size_t avail = audioBuffer.frameCount + nonContig;
- ALOGV("obtainBuffer(%u) returned %u = %u + %u err %d",
+ ALOGV("obtainBuffer(%u) returned %zu = %zu + %zu err %d",
mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
if (err != NO_ERROR) {
if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR ||
@@ -1609,12 +1777,11 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
size_t reqSize = audioBuffer.size;
mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
size_t writtenSize = audioBuffer.size;
- size_t writtenFrames = writtenSize / mFrameSize;
// Sanity check on returned size
if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
- ALOGE("EVENT_MORE_DATA requested %u bytes but callback returned %d bytes",
- reqSize, (int) writtenSize);
+ ALOGE("EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
+ reqSize, ssize_t(writtenSize));
return NS_NEVER;
}
@@ -1675,37 +1842,31 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
status_t AudioTrack::restoreTrack_l(const char *from)
{
ALOGW("dead IAudioTrack, %s, creating a new one from %s()",
- isOffloaded() ? "Offloaded" : "PCM", from);
+ isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from);
++mSequence;
status_t result;
// refresh the audio configuration cache in this process to make sure we get new
- // output parameters in getOutput_l() and createTrack_l()
+ // output parameters and new IAudioFlinger in createTrack_l()
AudioSystem::clearAudioConfigCache();
- if (isOffloaded()) {
+ if (isOffloadedOrDirect_l()) {
+ // FIXME re-creation of offloaded tracks is not yet implemented
return DEAD_OBJECT;
}
- // force new output query from audio policy manager;
- mOutput = 0;
- audio_io_handle_t output = getOutput_l();
+ // save the old static buffer position
+ size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0;
- // if the new IAudioTrack is created, createTrack_l() will modify the
+ // If a new IAudioTrack is successfully created, createTrack_l() will modify the
// following member variables: mAudioTrack, mCblkMemory and mCblk.
- // It will also delete the strong references on previous IAudioTrack and IMemory
+ // It will also delete the strong references on previous IAudioTrack and IMemory.
+ // If a new IAudioTrack cannot be created, the previous (dead) instance will be left intact.
+ result = createTrack_l();
// take the frames that will be lost by track recreation into account in saved position
- size_t position = mProxy->getPosition() + mProxy->getFramesFilled();
- size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0;
- result = createTrack_l(mStreamType,
- mSampleRate,
- mFormat,
- mReqFrameCount, // so that frame count never goes down
- mFlags,
- mSharedBuffer,
- output,
- position /*epoch*/);
+ (void) updateAndGetPosition_l();
+ mPosition = mReleased;
if (result == NO_ERROR) {
// continue playback from last known position, but
@@ -1733,17 +1894,35 @@ status_t AudioTrack::restoreTrack_l(const char *from)
}
}
if (result != NO_ERROR) {
- //Use of direct and offloaded output streams is ref counted by audio policy manager.
- // As getOutput was called above and resulted in an output stream to be opened,
- // we need to release it.
- AudioSystem::releaseOutput(output);
ALOGW("restoreTrack_l() failed status %d", result);
mState = STATE_STOPPED;
+ mReleased = 0;
}
return result;
}
+uint32_t AudioTrack::updateAndGetPosition_l()
+{
+ // This is the sole place to read server consumed frames
+ uint32_t newServer = mProxy->getPosition();
+ int32_t delta = newServer - mServer;
+ mServer = newServer;
+ // TODO There is controversy about whether there can be "negative jitter" in server position.
+ // This should be investigated further, and if possible, it should be addressed.
+ // A more definite failure mode is infrequent polling by client.
+ // One could call (void)getPosition_l() in releaseBuffer(),
+ // so mReleased and mPosition are always lock-step as best possible.
+ // That should ensure delta never goes negative for infrequent polling
+ // unless the server has more than 2^31 frames in its buffer,
+ // in which case the use of uint32_t for these counters has bigger issues.
+ if (delta < 0) {
+ ALOGE("detected illegal retrograde motion by the server: mServer advanced by %d", delta);
+ delta = 0;
+ }
+ return mPosition += (uint32_t) delta;
+}
+
status_t AudioTrack::setParameters(const String8& keyValuePairs)
{
AutoMutex lock(mLock);
@@ -1757,26 +1936,132 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)
if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
return INVALID_OPERATION;
}
- if (mState != STATE_ACTIVE && mState != STATE_PAUSED) {
- return INVALID_OPERATION;
+
+ switch (mState) {
+ case STATE_ACTIVE:
+ case STATE_PAUSED:
+ break; // handle below
+ case STATE_FLUSHED:
+ case STATE_STOPPED:
+ return WOULD_BLOCK;
+ case STATE_STOPPING:
+ case STATE_PAUSED_STOPPING:
+ if (!isOffloaded_l()) {
+ return INVALID_OPERATION;
+ }
+ break; // offloaded tracks handled below
+ default:
+ LOG_ALWAYS_FATAL("Invalid mState in getTimestamp(): %d", mState);
+ break;
+ }
+
+ if (mCblk->mFlags & CBLK_INVALID) {
+ restoreTrack_l("getTimestamp");
}
+
+ // The presented frame count must always lag behind the consumed frame count.
+ // To avoid a race, read the presented frames first. This ensures that presented <= consumed.
status_t status = mAudioTrack->getTimestamp(timestamp);
- if (status == NO_ERROR) {
- timestamp.mPosition += mProxy->getEpoch();
+ if (status != NO_ERROR) {
+ ALOGV_IF(status != WOULD_BLOCK, "getTimestamp error:%#x", status);
+ return status;
+ }
+ if (isOffloadedOrDirect_l()) {
+ if (isOffloaded_l() && (mState == STATE_PAUSED || mState == STATE_PAUSED_STOPPING)) {
+ // use cached paused position in case another offloaded track is running.
+ timestamp.mPosition = mPausedPosition;
+ clock_gettime(CLOCK_MONOTONIC, &timestamp.mTime);
+ return NO_ERROR;
+ }
+
+ // Check whether a pending flush or stop has completed, as those commands may
+ // be asynchronous or return near finish.
+ if (mStartUs != 0 && mSampleRate != 0) {
+ static const int kTimeJitterUs = 100000; // 100 ms
+ static const int k1SecUs = 1000000;
+
+ const int64_t timeNow = getNowUs();
+
+ if (timeNow < mStartUs + k1SecUs) { // within first second of starting
+ const int64_t timestampTimeUs = convertTimespecToUs(timestamp.mTime);
+ if (timestampTimeUs < mStartUs) {
+ return WOULD_BLOCK; // stale timestamp time, occurs before start.
+ }
+ const int64_t deltaTimeUs = timestampTimeUs - mStartUs;
+ const int64_t deltaPositionByUs = timestamp.mPosition * 1000000LL / mSampleRate;
+
+ if (deltaPositionByUs > deltaTimeUs + kTimeJitterUs) {
+ // Verify that the counter can't count faster than the sample rate
+ // since the start time. If greater, then that means we have failed
+ // to completely flush or stop the previous playing track.
+ ALOGW("incomplete flush or stop:"
+ " deltaTimeUs(%lld) deltaPositionUs(%lld) tsmPosition(%u)",
+ (long long)deltaTimeUs, (long long)deltaPositionByUs,
+ timestamp.mPosition);
+ return WOULD_BLOCK;
+ }
+ }
+ mStartUs = 0; // no need to check again, start timestamp has either expired or unneeded.
+ }
+ } else {
+ // Update the mapping between local consumed (mPosition) and server consumed (mServer)
+ (void) updateAndGetPosition_l();
+ // Server consumed (mServer) and presented both use the same server time base,
+ // and server consumed is always >= presented.
+ // The delta between these represents the number of frames in the buffer pipeline.
+ // If this delta between these is greater than the client position, it means that
+ // actually presented is still stuck at the starting line (figuratively speaking),
+ // waiting for the first frame to go by. So we can't report a valid timestamp yet.
+ if ((uint32_t) (mServer - timestamp.mPosition) > mPosition) {
+ return INVALID_OPERATION;
+ }
+ // Convert timestamp position from server time base to client time base.
+ // TODO The following code should work OK now because timestamp.mPosition is 32-bit.
+ // But if we change it to 64-bit then this could fail.
+ // If (mPosition - mServer) can be negative then should use:
+ // (int32_t)(mPosition - mServer)
+ timestamp.mPosition += mPosition - mServer;
+ // Immediately after a call to getPosition_l(), mPosition and
+ // mServer both represent the same frame position. mPosition is
+ // in client's point of view, and mServer is in server's point of
+ // view. So the difference between them is the "fudge factor"
+ // between client and server views due to stop() and/or new
+ // IAudioTrack. And timestamp.mPosition is initially in server's
+ // point of view, so we need to apply the same fudge factor to it.
}
return status;
}
String8 AudioTrack::getParameters(const String8& keys)
{
- if (mOutput) {
- return AudioSystem::getParameters(mOutput, keys);
+ audio_io_handle_t output = getOutput();
+ if (output != AUDIO_IO_HANDLE_NONE) {
+ return AudioSystem::getParameters(output, keys);
} else {
return String8::empty();
}
}
-status_t AudioTrack::dump(int fd, const Vector<String16>& args) const
+bool AudioTrack::isOffloaded() const
+{
+ AutoMutex lock(mLock);
+ return isOffloaded_l();
+}
+
+bool AudioTrack::isDirect() const
+{
+ AutoMutex lock(mLock);
+ return isDirect_l();
+}
+
+bool AudioTrack::isOffloadedOrDirect() const
+{
+ AutoMutex lock(mLock);
+ return isOffloadedOrDirect_l();
+}
+
+
+status_t AudioTrack::dump(int fd, const Vector<String16>& args __unused) const
{
const size_t SIZE = 256;
@@ -1785,7 +2070,7 @@ status_t AudioTrack::dump(int fd, const Vector<String16>& args) const
result.append(" AudioTrack::dump\n");
snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType,
- mVolume[0], mVolume[1]);
+ mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
result.append(buffer);
snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%zu)\n", mFormat,
mChannelCount, mFrameCount);
@@ -1806,7 +2091,7 @@ uint32_t AudioTrack::getUnderrunFrames() const
// =========================================================================
-void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who)
+void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
{
sp<AudioTrack> audioTrack = mAudioTrack.promote();
if (audioTrack != 0) {
@@ -1850,7 +2135,10 @@ bool AudioTrack::AudioTrackThread::threadLoop()
return true;
}
}
- nsecs_t ns = mReceiver.processAudioBuffer(this);
+ if (exitPending()) {
+ return false;
+ }
+ nsecs_t ns = mReceiver.processAudioBuffer();
switch (ns) {
case 0:
return true;
@@ -1864,7 +2152,7 @@ bool AudioTrack::AudioTrackThread::threadLoop()
ns = 1000000000LL;
// fall through
default:
- LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
+ LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
pauseInternal(ns);
return true;
}
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index e898109..ff24475 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -19,15 +19,21 @@
#include <private/media/AudioTrackShared.h>
#include <utils/Log.h>
-extern "C" {
-#include "../private/bionic_futex.h"
-}
+
+#include <linux/futex.h>
+#include <sys/syscall.h>
namespace android {
+// used to clamp a value to size_t. TODO: move to another file.
+template <typename T>
+size_t clampToSize(T x) {
+ return x > SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
+}
+
audio_track_cblk_t::audio_track_cblk_t()
- : mServer(0), frameCount_(0), mFutex(0), mMinimum(0),
- mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mFlags(0)
+ : mServer(0), mFutex(0), mMinimum(0),
+ mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0), mFlags(0)
{
memset(&u, 0, sizeof(u));
}
@@ -134,10 +140,17 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques
ssize_t filled = rear - front;
// pipe should not be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
- mIsShutdown = true;
- status = NO_INIT;
- goto end;
+ if (mIsOut) {
+ ALOGE("Shared memory control block is corrupt (filled=%zd, mFrameCount=%zu); "
+ "shutting down", filled, mFrameCount);
+ mIsShutdown = true;
+ status = NO_INIT;
+ goto end;
+ }
+ // for input, sync up on overrun
+ filled = 0;
+ cblk->u.mStreaming.mFront = rear;
+ (void) android_atomic_or(CBLK_OVERRUN, &cblk->mFlags);
}
// don't allow filling pipe beyond the nominal size
size_t avail = mIsOut ? mFrameCount - filled : filled;
@@ -200,18 +213,18 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques
ts = &remaining;
break;
default:
- LOG_FATAL("obtainBuffer() timeout=%d", timeout);
+ LOG_ALWAYS_FATAL("obtainBuffer() timeout=%d", timeout);
ts = NULL;
break;
}
int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
- int rc;
if (measure && !beforeIsValid) {
clock_gettime(CLOCK_MONOTONIC, &before);
beforeIsValid = true;
}
- int ret = __futex_syscall4(&cblk->mFutex,
+ errno = 0;
+ (void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts);
// update total elapsed time spent waiting
if (measure) {
@@ -230,16 +243,16 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques
before = after;
beforeIsValid = true;
}
- switch (ret) {
- case 0: // normal wakeup by server, or by binderDied()
- case -EWOULDBLOCK: // benign race condition with server
- case -EINTR: // wait was interrupted by signal or other spurious wakeup
- case -ETIMEDOUT: // time-out expired
+ switch (errno) {
+ case 0: // normal wakeup by server, or by binderDied()
+ case EWOULDBLOCK: // benign race condition with server
+ case EINTR: // wait was interrupted by signal or other spurious wakeup
+ case ETIMEDOUT: // time-out expired
// FIXME these error/non-0 status are being dropped
break;
default:
- ALOGE("%s unexpected error %d", __func__, ret);
- status = -ret;
+ status = errno;
+ ALOGE("%s unexpected error %s", __func__, strerror(status));
goto end;
}
}
@@ -294,8 +307,9 @@ void ClientProxy::binderDied()
{
audio_track_cblk_t* cblk = mCblk;
if (!(android_atomic_or(CBLK_INVALID, &cblk->mFlags) & CBLK_INVALID)) {
+ android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
// it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process
- (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
+ (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
}
}
@@ -304,7 +318,8 @@ void ClientProxy::interrupt()
{
audio_track_cblk_t* cblk = mCblk;
if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) {
- (void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
+ android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
+ (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
}
}
@@ -331,7 +346,7 @@ size_t ClientProxy::getFramesFilled() {
ssize_t filled = rear - front;
// pipe should not be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+ ALOGE("Shared memory control block is corrupt (filled=%zd); shutting down", filled);
return 0;
}
return (size_t)filled;
@@ -341,7 +356,13 @@ size_t ClientProxy::getFramesFilled() {
void AudioTrackClientProxy::flush()
{
- mCblk->u.mStreaming.mFlush++;
+ // This works for mFrameCountP2 <= 2^30
+ size_t increment = mFrameCountP2 << 1;
+ size_t mask = increment - 1;
+ audio_track_cblk_t* cblk = mCblk;
+ int32_t newFlush = (cblk->u.mStreaming.mRear & mask) |
+ ((cblk->u.mStreaming.mFlush & ~mask) + increment);
+ android_atomic_release_store(newFlush, &cblk->u.mStreaming.mFlush);
}
bool AudioTrackClientProxy::clearStreamEndDone() {
@@ -429,24 +450,24 @@ status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *request
ts = &remaining;
break;
default:
- LOG_FATAL("waitStreamEndDone() timeout=%d", timeout);
+ LOG_ALWAYS_FATAL("waitStreamEndDone() timeout=%d", timeout);
ts = NULL;
break;
}
int32_t old = android_atomic_and(~CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
- int rc;
- int ret = __futex_syscall4(&cblk->mFutex,
+ errno = 0;
+ (void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAIT_PRIVATE : FUTEX_WAIT, old & ~CBLK_FUTEX_WAKE, ts);
- switch (ret) {
- case 0: // normal wakeup by server, or by binderDied()
- case -EWOULDBLOCK: // benign race condition with server
- case -EINTR: // wait was interrupted by signal or other spurious wakeup
- case -ETIMEDOUT: // time-out expired
+ switch (errno) {
+ case 0: // normal wakeup by server, or by binderDied()
+ case EWOULDBLOCK: // benign race condition with server
+ case EINTR: // wait was interrupted by signal or other spurious wakeup
+ case ETIMEDOUT: // time-out expired
break;
default:
- ALOGE("%s unexpected error %d", __func__, ret);
- status = -ret;
+ status = errno;
+ ALOGE("%s unexpected error %s", __func__, strerror(status));
goto end;
}
}
@@ -470,7 +491,7 @@ StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cbl
void StaticAudioTrackClientProxy::flush()
{
- LOG_FATAL("static flush");
+ LOG_ALWAYS_FATAL("static flush");
}
void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int loopCount)
@@ -484,7 +505,11 @@ void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int
newState.mLoopStart = (uint32_t) loopStart;
newState.mLoopEnd = (uint32_t) loopEnd;
newState.mLoopCount = loopCount;
- mBufferPosition = loopStart;
+ size_t bufferPosition;
+ if (loopCount == 0 || (bufferPosition = getBufferPosition()) >= loopEnd) {
+ bufferPosition = loopStart;
+ }
+ mBufferPosition = bufferPosition; // snapshot buffer position until loop is acknowledged.
(void) mMutator.push(newState);
}
@@ -529,17 +554,27 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
rear = android_atomic_acquire_load(&cblk->u.mStreaming.mRear);
front = cblk->u.mStreaming.mFront;
if (flush != mFlush) {
- mFlush = flush;
// effectively obtain then release whatever is in the buffer
- android_atomic_release_store(rear, &cblk->u.mStreaming.mFront);
- if (front != rear) {
+ size_t mask = (mFrameCountP2 << 1) - 1;
+ int32_t newFront = (front & ~mask) | (flush & mask);
+ ssize_t filled = rear - newFront;
+ // Rather than shutting down on a corrupt flush, just treat it as a full flush
+ if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
+ ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, filled %d=%#x",
+ mFlush, flush, front, rear, mask, newFront, filled, filled);
+ newFront = rear;
+ }
+ mFlush = flush;
+ android_atomic_release_store(newFront, &cblk->u.mStreaming.mFront);
+ // There is no danger from a false positive, so err on the side of caution
+ if (true /*front != newFront*/) {
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
- (void) __futex_syscall3(&cblk->mFutex,
+ (void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
}
}
- front = rear;
+ front = newFront;
}
} else {
front = android_atomic_acquire_load(&cblk->u.mStreaming.mFront);
@@ -548,7 +583,7 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
ssize_t filled = rear - front;
// pipe should not already be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+ ALOGE("Shared memory control block is corrupt (filled=%zd); shutting down", filled);
mIsShutdown = true;
}
if (mIsShutdown) {
@@ -621,7 +656,7 @@ void ServerProxy::releaseBuffer(Buffer* buffer)
android_atomic_release_store(stepCount + rear, &cblk->u.mStreaming.mRear);
}
- mCblk->mServer += stepCount;
+ cblk->mServer += stepCount;
size_t half = mFrameCount / 2;
if (half == 0) {
@@ -635,10 +670,10 @@ void ServerProxy::releaseBuffer(Buffer* buffer)
}
// FIXME AudioRecord wakeup needs to be optimized; it currently wakes up client every time
if (!mIsOut || (mAvailToClient + stepCount >= minimum)) {
- ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum);
+ ALOGV("mAvailToClient=%zu stepCount=%zu minimum=%zu", mAvailToClient, stepCount, minimum);
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
- (void) __futex_syscall3(&cblk->mFutex,
+ (void) syscall(__NR_futex, &cblk->mFutex,
mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE, 1);
}
}
@@ -661,6 +696,7 @@ size_t AudioTrackServerProxy::framesReady()
int32_t flush = cblk->u.mStreaming.mFlush;
if (flush != mFlush) {
+ // FIXME should return an accurate value, but over-estimate is better than under-estimate
return mFrameCount;
}
// the acquire might not be necessary since not doing a subsequent read
@@ -668,7 +704,7 @@ size_t AudioTrackServerProxy::framesReady()
ssize_t filled = rear - cblk->u.mStreaming.mFront;
// pipe should not already be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+ ALOGE("Shared memory control block is corrupt (filled=%zd); shutting down", filled);
mIsShutdown = true;
return 0;
}
@@ -679,10 +715,11 @@ size_t AudioTrackServerProxy::framesReady()
}
bool AudioTrackServerProxy::setStreamEndDone() {
+ audio_track_cblk_t* cblk = mCblk;
bool old =
- (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->mFlags) & CBLK_STREAM_END_DONE) != 0;
+ (android_atomic_or(CBLK_STREAM_END_DONE, &cblk->mFlags) & CBLK_STREAM_END_DONE) != 0;
if (!old) {
- (void) __futex_syscall3(&mCblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
+ (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
}
return old;
@@ -690,10 +727,11 @@ bool AudioTrackServerProxy::setStreamEndDone() {
void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
{
- mCblk->u.mStreaming.mUnderrunFrames += frameCount;
+ audio_track_cblk_t* cblk = mCblk;
+ cblk->u.mStreaming.mUnderrunFrames += frameCount;
// FIXME also wake futex so that underrun is noticed more quickly
- (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags);
+ (void) android_atomic_or(CBLK_UNDERRUN, &cblk->mFlags);
}
// ---------------------------------------------------------------------------
@@ -702,7 +740,8 @@ StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cbl
size_t frameCount, size_t frameSize)
: AudioTrackServerProxy(cblk, buffers, frameCount, frameSize),
mObserver(&cblk->u.mStatic.mSingleStateQueue), mPosition(0),
- mEnd(frameCount), mFramesReadyIsCalledByMultipleThreads(false)
+ mFramesReadySafe(frameCount), mFramesReady(frameCount),
+ mFramesReadyIsCalledByMultipleThreads(false)
{
mState.mLoopStart = 0;
mState.mLoopEnd = 0;
@@ -716,20 +755,11 @@ void StaticAudioTrackServerProxy::framesReadyIsCalledByMultipleThreads()
size_t StaticAudioTrackServerProxy::framesReady()
{
- // FIXME
- // This is racy if called by normal mixer thread,
- // as we're reading 2 independent variables without a lock.
- // Can't call mObserver.poll(), as we might be called from wrong thread.
- // If looping is enabled, should return a higher number (since includes non-contiguous).
- size_t position = mPosition;
+ // Can't call pollPosition() from multiple threads.
if (!mFramesReadyIsCalledByMultipleThreads) {
- ssize_t positionOrStatus = pollPosition();
- if (positionOrStatus >= 0) {
- position = (size_t) positionOrStatus;
- }
+ (void) pollPosition();
}
- size_t end = mEnd;
- return position < end ? end - position : 0;
+ return mFramesReadySafe;
}
ssize_t StaticAudioTrackServerProxy::pollPosition()
@@ -746,32 +776,44 @@ ssize_t StaticAudioTrackServerProxy::pollPosition()
}
// ignore loopEnd
mPosition = position = loopStart;
- mEnd = mFrameCount;
+ mFramesReady = mFrameCount - mPosition;
mState.mLoopCount = 0;
valid = true;
- } else {
+ } else if (state.mLoopCount >= -1) {
if (loopStart < loopEnd && loopEnd <= mFrameCount &&
loopEnd - loopStart >= MIN_LOOP) {
- if (!(loopStart <= position && position < loopEnd)) {
+ // If the current position is greater than the end of the loop
+ // we "wrap" to the loop start. This might cause an audible pop.
+ if (position >= loopEnd) {
mPosition = position = loopStart;
}
- mEnd = loopEnd;
+ if (state.mLoopCount == -1) {
+ mFramesReady = INT64_MAX;
+ } else {
+ // mFramesReady is 64 bits to handle the effective number of frames
+ // that the static audio track contains, including loops.
+ // TODO: Later consider fixing overflow, but does not seem needed now
+ // as will not overflow if loopStart and loopEnd are Java "ints".
+ mFramesReady = int64_t(state.mLoopCount) * (loopEnd - loopStart)
+ + mFrameCount - mPosition;
+ }
mState = state;
valid = true;
}
}
- if (!valid) {
+ if (!valid || mPosition > mFrameCount) {
ALOGE("%s client pushed an invalid state, shutting down", __func__);
mIsShutdown = true;
return (ssize_t) NO_INIT;
}
+ mFramesReadySafe = clampToSize(mFramesReady);
// This may overflow, but client is not supposed to rely on it
mCblk->u.mStatic.mBufferPosition = (uint32_t) position;
}
return (ssize_t) position;
}
-status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
+status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused)
{
if (mIsShutdown) {
buffer->mFrameCount = 0;
@@ -789,9 +831,10 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
return (status_t) positionOrStatus;
}
size_t position = (size_t) positionOrStatus;
+ size_t end = mState.mLoopCount != 0 ? mState.mLoopEnd : mFrameCount;
size_t avail;
- if (position < mEnd) {
- avail = mEnd - position;
+ if (position < end) {
+ avail = end - position;
size_t wanted = buffer->mFrameCount;
if (avail < wanted) {
buffer->mFrameCount = avail;
@@ -804,7 +847,10 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
buffer->mFrameCount = 0;
buffer->mRaw = NULL;
}
- buffer->mNonContig = 0; // FIXME should be > 0 for looping
+ // As mFramesReady is the total remaining frames in the static audio track,
+ // it is always larger or equal to avail.
+ LOG_ALWAYS_FATAL_IF(mFramesReady < avail);
+ buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);
mUnreleased = avail;
return NO_ERROR;
}
@@ -812,6 +858,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
{
size_t stepCount = buffer->mFrameCount;
+ LOG_ALWAYS_FATAL_IF(!(stepCount <= mFramesReady));
LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased));
if (stepCount == 0) {
// prevent accidental re-use of buffer
@@ -825,14 +872,13 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
size_t newPosition = position + stepCount;
int32_t setFlags = 0;
if (!(position <= newPosition && newPosition <= mFrameCount)) {
- ALOGW("%s newPosition %u outside [%u, %u]", __func__, newPosition, position, mFrameCount);
+ ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, mFrameCount);
newPosition = mFrameCount;
} else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) {
+ newPosition = mState.mLoopStart;
if (mState.mLoopCount == -1 || --mState.mLoopCount != 0) {
- newPosition = mState.mLoopStart;
setFlags = CBLK_LOOP_CYCLE;
} else {
- mEnd = mFrameCount; // this is what allows playback to continue after the loop
setFlags = CBLK_LOOP_FINAL;
}
}
@@ -840,6 +886,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
setFlags |= CBLK_BUFFER_END;
}
mPosition = newPosition;
+ if (mFramesReady != INT64_MAX) {
+ mFramesReady -= stepCount;
+ }
+ mFramesReadySafe = clampToSize(mFramesReady);
cblk->mServer += stepCount;
// This may overflow, but client is not supposed to rely on it
@@ -854,7 +904,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
buffer->mNonContig = 0;
}
-void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
+void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount __unused)
{
// Unlike AudioTrackServerProxy::tallyUnderrunFrames() used for streaming tracks,
// we don't have a location to count underrun frames. The underrun frame counter
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
new file mode 100644
index 0000000..41994dc
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -0,0 +1,473 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CharacterEncodingDector"
+#include <utils/Log.h>
+
+#include <CharacterEncodingDetector.h>
+#include "CharacterEncodingDetectorTables.h"
+
+#include "utils/Vector.h"
+#include "StringArray.h"
+
+#include "unicode/ucnv.h"
+#include "unicode/ucsdet.h"
+#include "unicode/ustring.h"
+
+namespace android {
+
+CharacterEncodingDetector::CharacterEncodingDetector() {
+
+ UErrorCode status = U_ZERO_ERROR;
+ mUtf8Conv = ucnv_open("UTF-8", &status);
+ if (U_FAILURE(status)) {
+ ALOGE("could not create UConverter for UTF-8");
+ mUtf8Conv = NULL;
+ }
+}
+
+CharacterEncodingDetector::~CharacterEncodingDetector() {
+ ucnv_close(mUtf8Conv);
+}
+
+void CharacterEncodingDetector::addTag(const char *name, const char *value) {
+ mNames.push_back(name);
+ mValues.push_back(value);
+}
+
+size_t CharacterEncodingDetector::size() {
+ return mNames.size();
+}
+
+status_t CharacterEncodingDetector::getTag(int index, const char **name, const char**value) {
+ if (index >= mNames.size()) {
+ return BAD_VALUE;
+ }
+
+ *name = mNames.getEntry(index);
+ *value = mValues.getEntry(index);
+ return OK;
+}
+
+static bool isPrintableAscii(const char *value, size_t len) {
+ for (size_t i = 0; i < len; i++) {
+ if ((value[i] & 0x80) || value[i] < 0x20 || value[i] == 0x7f) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void CharacterEncodingDetector::detectAndConvert() {
+
+ int size = mNames.size();
+ ALOGV("%d tags before conversion", size);
+ for (int i = 0; i < size; i++) {
+ ALOGV("%s: %s", mNames.getEntry(i), mValues.getEntry(i));
+ }
+
+ if (size && mUtf8Conv) {
+
+ UErrorCode status = U_ZERO_ERROR;
+ UCharsetDetector *csd = ucsdet_open(&status);
+ const UCharsetMatch *ucm;
+
+ // try combined detection of artist/album/title etc.
+ char buf[1024];
+ buf[0] = 0;
+ int idx;
+ bool allprintable = true;
+ for (int i = 0; i < size; i++) {
+ const char *name = mNames.getEntry(i);
+ const char *value = mValues.getEntry(i);
+ if (!isPrintableAscii(value, strlen(value)) && (
+ !strcmp(name, "artist") ||
+ !strcmp(name, "albumartist") ||
+ !strcmp(name, "composer") ||
+ !strcmp(name, "genre") ||
+ !strcmp(name, "album") ||
+ !strcmp(name, "title"))) {
+ strlcat(buf, value, sizeof(buf));
+ // separate tags by space so ICU's ngram detector can do its job
+ strlcat(buf, " ", sizeof(buf));
+ allprintable = false;
+ }
+ }
+
+ const char *combinedenc = "UTF-8";
+ if (allprintable) {
+ // since 'buf' is empty, ICU would return a UTF-8 matcher with low confidence, so
+ // no need to even call it
+ ALOGV("all tags are printable, assuming ascii (%zu)", strlen(buf));
+ } else {
+ ucsdet_setText(csd, buf, strlen(buf), &status);
+ int32_t matches;
+ const UCharsetMatch** ucma = ucsdet_detectAll(csd, &matches, &status);
+ bool goodmatch = true;
+ int highest = 0;
+ const UCharsetMatch* bestCombinedMatch = getPreferred(buf, strlen(buf),
+ ucma, matches, &goodmatch, &highest);
+
+ ALOGV("goodmatch: %s, highest: %d", goodmatch ? "true" : "false", highest);
+ if (!goodmatch && (highest < 15 || strlen(buf) < 20)) {
+ ALOGV("not a good match, trying with more data");
+ // This string might be too short for ICU to do anything useful with.
+ // (real world example: "Björk" in ISO-8859-1 might be detected as GB18030, because
+ // the ISO detector reports a confidence of 0, while the GB18030 detector reports
+ // a confidence of 10 with no invalid characters)
+ // Append artist, album and title if they were previously omitted because they
+ // were printable ascii.
+ bool added = false;
+ for (int i = 0; i < size; i++) {
+ const char *name = mNames.getEntry(i);
+ const char *value = mValues.getEntry(i);
+ if (isPrintableAscii(value, strlen(value)) && (
+ !strcmp(name, "artist") ||
+ !strcmp(name, "album") ||
+ !strcmp(name, "title"))) {
+ strlcat(buf, value, sizeof(buf));
+ strlcat(buf, " ", sizeof(buf));
+ added = true;
+ }
+ }
+ if (added) {
+ ucsdet_setText(csd, buf, strlen(buf), &status);
+ ucma = ucsdet_detectAll(csd, &matches, &status);
+ bestCombinedMatch = getPreferred(buf, strlen(buf),
+ ucma, matches, &goodmatch, &highest);
+ if (!goodmatch && highest <= 15) {
+ ALOGV("still not a good match after adding printable tags");
+ bestCombinedMatch = NULL;
+ }
+ } else {
+ ALOGV("no printable tags to add");
+ }
+ }
+
+ if (bestCombinedMatch != NULL) {
+ combinedenc = ucsdet_getName(bestCombinedMatch, &status);
+ } else {
+ combinedenc = "ISO-8859-1";
+ }
+ }
+
+ for (int i = 0; i < size; i++) {
+ const char *name = mNames.getEntry(i);
+ uint8_t* src = (uint8_t *)mValues.getEntry(i);
+ int len = strlen((char *)src);
+ uint8_t* dest = src;
+
+ ALOGV("@@@ checking %s", name);
+ const char *s = mValues.getEntry(i);
+ int32_t inputLength = strlen(s);
+ const char *enc;
+
+ if (!allprintable && (!strcmp(name, "artist") ||
+ !strcmp(name, "albumartist") ||
+ !strcmp(name, "composer") ||
+ !strcmp(name, "genre") ||
+ !strcmp(name, "album") ||
+ !strcmp(name, "title"))) {
+ // use encoding determined from the combination of artist/album/title etc.
+ enc = combinedenc;
+ } else {
+ if (isPrintableAscii(s, inputLength)) {
+ enc = "UTF-8";
+ ALOGV("@@@@ %s is ascii", mNames.getEntry(i));
+ } else {
+ ucsdet_setText(csd, s, inputLength, &status);
+ ucm = ucsdet_detect(csd, &status);
+ if (!ucm) {
+ mValues.setEntry(i, "???");
+ continue;
+ }
+ enc = ucsdet_getName(ucm, &status);
+ ALOGV("@@@@ recognized charset: %s for %s confidence %d",
+ enc, mNames.getEntry(i), ucsdet_getConfidence(ucm, &status));
+ }
+ }
+
+ if (strcmp(enc,"UTF-8") != 0) {
+ // only convert if the source encoding isn't already UTF-8
+ ALOGV("@@@ using converter %s for %s", enc, mNames.getEntry(i));
+ status = U_ZERO_ERROR;
+ UConverter *conv = ucnv_open(enc, &status);
+ if (U_FAILURE(status)) {
+ ALOGW("could not create UConverter for %s (%d), falling back to ISO-8859-1",
+ enc, status);
+ status = U_ZERO_ERROR;
+ conv = ucnv_open("ISO-8859-1", &status);
+ if (U_FAILURE(status)) {
+ ALOGW("could not create UConverter for ISO-8859-1 either");
+ continue;
+ }
+ }
+
+ // convert from native encoding to UTF-8
+ const char* source = mValues.getEntry(i);
+ int targetLength = len * 3 + 1;
+ char* buffer = new char[targetLength];
+ // don't normally check for NULL, but in this case targetLength may be large
+ if (!buffer)
+ break;
+ char* target = buffer;
+
+ ucnv_convertEx(mUtf8Conv, conv, &target, target + targetLength,
+ &source, source + strlen(source),
+ NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
+
+ if (U_FAILURE(status)) {
+ ALOGE("ucnv_convertEx failed: %d", status);
+ mValues.setEntry(i, "???");
+ } else {
+ // zero terminate
+ *target = 0;
+ // strip trailing spaces
+ while (--target > buffer && *target == ' ') {
+ *target = 0;
+ }
+ // skip leading spaces
+ char *start = buffer;
+ while (*start == ' ') {
+ start++;
+ }
+ mValues.setEntry(i, start);
+ }
+
+ delete[] buffer;
+
+ ucnv_close(conv);
+ }
+ }
+
+ for (int i = size - 1; i >= 0; --i) {
+ if (strlen(mValues.getEntry(i)) == 0) {
+ ALOGV("erasing %s because entry is empty", mNames.getEntry(i));
+ mNames.erase(i);
+ mValues.erase(i);
+ }
+ }
+
+ ucsdet_close(csd);
+ }
+}
+
+/*
+ * When ICU detects multiple encoding matches, apply additional heuristics to determine
+ * which one is the best match, since ICU can't always be trusted to make the right choice.
+ *
+ * What this method does is:
+ * - decode the input using each of the matches found
+ * - recalculate the starting confidence level for multibyte encodings using a different
+ * algorithm and larger frequent character lists than ICU
+ * - devalue encoding where the conversion contains unlikely characters (symbols, reserved, etc)
+ * - pick the highest match
+ * - signal to the caller whether this match is considered good: confidence > 15, and confidence
+ * delta with the next runner up > 15
+ */
+const UCharsetMatch *CharacterEncodingDetector::getPreferred(
+ const char *input, size_t len,
+ const UCharsetMatch** ucma, size_t nummatches,
+ bool *goodmatch, int *highestmatch) {
+
+ *goodmatch = false;
+ Vector<const UCharsetMatch*> matches;
+ UErrorCode status = U_ZERO_ERROR;
+
+ ALOGV("%zu matches", nummatches);
+ for (size_t i = 0; i < nummatches; i++) {
+ const char *encname = ucsdet_getName(ucma[i], &status);
+ int confidence = ucsdet_getConfidence(ucma[i], &status);
+ ALOGV("%zu: %s %d", i, encname, confidence);
+ matches.push_back(ucma[i]);
+ }
+
+ size_t num = matches.size();
+ if (num == 0) {
+ return NULL;
+ }
+ if (num == 1) {
+ int confidence = ucsdet_getConfidence(matches[0], &status);
+ if (confidence > 15) {
+ *goodmatch = true;
+ }
+ return matches[0];
+ }
+
+ ALOGV("considering %zu matches", num);
+
+ // keep track of how many "special" characters result when converting the input using each
+ // encoding
+ Vector<int> newconfidence;
+ for (size_t i = 0; i < num; i++) {
+ const uint16_t *freqdata = NULL;
+ float freqcoverage = 0;
+ status = U_ZERO_ERROR;
+ const char *encname = ucsdet_getName(matches[i], &status);
+ int confidence = ucsdet_getConfidence(matches[i], &status);
+ if (!strcmp("GB18030", encname)) {
+ freqdata = frequent_zhCN;
+ freqcoverage = frequent_zhCN_coverage;
+ } else if (!strcmp("Big5", encname)) {
+ freqdata = frequent_zhTW;
+ freqcoverage = frequent_zhTW_coverage;
+ } else if (!strcmp("EUC-KR", encname)) {
+ freqdata = frequent_ko;
+ freqcoverage = frequent_ko_coverage;
+ } else if (!strcmp("EUC-JP", encname)) {
+ freqdata = frequent_ja;
+ freqcoverage = frequent_ja_coverage;
+ } else if (!strcmp("Shift_JIS", encname)) {
+ freqdata = frequent_ja;
+ freqcoverage = frequent_ja_coverage;
+ }
+
+ ALOGV("%zu: %s %d", i, encname, confidence);
+ status = U_ZERO_ERROR;
+ UConverter *conv = ucnv_open(encname, &status);
+ int demerit = 0;
+ if (U_FAILURE(status)) {
+ ALOGV("failed to open %s: %d", encname, status);
+ confidence = 0;
+ demerit += 1000;
+ }
+ const char *source = input;
+ const char *sourceLimit = input + len;
+ status = U_ZERO_ERROR;
+ int frequentchars = 0;
+ int totalchars = 0;
+ while (true) {
+ // demerit the current encoding for each "special" character found after conversion.
+ // The amount of demerit is somewhat arbitrarily chosen.
+ int inchar;
+ if (source != sourceLimit) {
+ inchar = (source[0] << 8) + source[1];
+ }
+ UChar32 c = ucnv_getNextUChar(conv, &source, sourceLimit, &status);
+ if (!U_SUCCESS(status)) {
+ break;
+ }
+ if (c < 0x20 || (c >= 0x7f && c <= 0x009f)) {
+ ALOGV("control character %x", c);
+ demerit += 100;
+ } else if ((c == 0xa0) // no-break space
+ || (c >= 0xa2 && c <= 0xbe) // symbols, superscripts
+ || (c == 0xd7) || (c == 0xf7) // multiplication and division signs
+ || (c >= 0x2000 && c <= 0x209f)) { // punctuation, superscripts
+ ALOGV("unlikely character %x", c);
+ demerit += 10;
+ } else if (c >= 0xe000 && c <= 0xf8ff) {
+ ALOGV("private use character %x", c);
+ demerit += 30;
+ } else if (c >= 0x2190 && c <= 0x2bff) {
+ // this range comprises various symbol ranges that are unlikely to appear in
+ // music file metadata.
+ ALOGV("symbol %x", c);
+ demerit += 10;
+ } else if (c == 0xfffd) {
+ ALOGV("replacement character");
+ demerit += 50;
+ } else if (c >= 0xfff0 && c <= 0xfffc) {
+ ALOGV("unicode special %x", c);
+ demerit += 50;
+ } else if (freqdata != NULL) {
+ totalchars++;
+ if (isFrequent(freqdata, c)) {
+ frequentchars++;
+ }
+ }
+ }
+ if (freqdata != NULL && totalchars != 0) {
+ int myconfidence = 10 + float((100 * frequentchars) / totalchars) / freqcoverage;
+ ALOGV("ICU confidence: %d, my confidence: %d (%d %d)", confidence, myconfidence,
+ totalchars, frequentchars);
+ if (myconfidence > 100) myconfidence = 100;
+ if (myconfidence < 0) myconfidence = 0;
+ confidence = myconfidence;
+ }
+ ALOGV("%d-%d=%d", confidence, demerit, confidence - demerit);
+ newconfidence.push_back(confidence - demerit);
+ ucnv_close(conv);
+ if (i == 0 && (confidence - demerit) == 100) {
+ // no need to check any further, we'll end up using this match anyway
+ break;
+ }
+ }
+
+ // find match with highest confidence after adjusting for unlikely characters
+ int highest = newconfidence[0];
+ size_t highestidx = 0;
+ int runnerup = -10000;
+ int runnerupidx = -10000;
+ num = newconfidence.size();
+ for (size_t i = 1; i < num; i++) {
+ if (newconfidence[i] > highest) {
+ runnerup = highest;
+ runnerupidx = highestidx;
+ highest = newconfidence[i];
+ highestidx = i;
+ } else if (newconfidence[i] > runnerup){
+ runnerup = newconfidence[i];
+ runnerupidx = i;
+ }
+ }
+ status = U_ZERO_ERROR;
+ ALOGV("selecting: '%s' w/ %d confidence",
+ ucsdet_getName(matches[highestidx], &status), highest);
+ if (runnerupidx < 0) {
+ ALOGV("no runner up");
+ if (highest > 15) {
+ *goodmatch = true;
+ }
+ } else {
+ ALOGV("runner up: '%s' w/ %d confidence",
+ ucsdet_getName(matches[runnerupidx], &status), runnerup);
+ if (runnerup < 0) {
+ runnerup = 0;
+ }
+ if ((highest - runnerup) > 15) {
+ *goodmatch = true;
+ }
+ }
+ *highestmatch = highest;
+ return matches[highestidx];
+}
+
+
+bool CharacterEncodingDetector::isFrequent(const uint16_t *values, uint32_t c) {
+
+ int start = 0;
+ int end = 511; // All the tables have 512 entries
+ int mid = (start+end)/2;
+
+ while(start <= end) {
+ if(c == values[mid]) {
+ return true;
+ } else if (c > values[mid]) {
+ start = mid + 1;
+ } else {
+ end = mid - 1;
+ }
+
+ mid = (start + end) / 2;
+ }
+
+ return false;
+}
+
+
+} // namespace android
diff --git a/media/libmedia/CharacterEncodingDetectorTables.h b/media/libmedia/CharacterEncodingDetectorTables.h
new file mode 100644
index 0000000..1fe1137
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetectorTables.h
@@ -0,0 +1,2092 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The 512 most frequently occuring characters for the zhCN language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_zhCN[] = {
+ 0x4E00, // 一, #2
+ 0x4E07, // 万, #306
+ 0x4E09, // 三, #138
+ 0x4E0A, // 上, #16
+ 0x4E0B, // 下, #25
+ 0x4E0D, // ä¸, #7
+ 0x4E0E, // 与, #133
+ 0x4E13, // 专, #151
+ 0x4E16, // 世, #346
+ 0x4E1A, // 业, #39
+ 0x4E1C, // 东, #197
+ 0x4E24, // 两, #376
+ 0x4E2A, // 个, #23
+ 0x4E2D, // 中, #4
+ 0x4E3A, // 为, #31
+ 0x4E3B, // 主, #95
+ 0x4E3E, // 举, #418
+ 0x4E48, // 么, #93
+ 0x4E4B, // 之, #131
+ 0x4E50, // ä¹, #130
+ 0x4E5F, // 也, #145
+ 0x4E66, // 书, #283
+ 0x4E70, // ä¹°, #483
+ 0x4E86, // 了, #13
+ 0x4E8B, // 事, #168
+ 0x4E8C, // 二, #218
+ 0x4E8E, // 于, #64
+ 0x4E94, // 五, #430
+ 0x4E9A, // 亚, #468
+ 0x4E9B, // 些, #366
+ 0x4EA4, // 交, #243
+ 0x4EA7, // 产, #86
+ 0x4EAB, // 享, #345
+ 0x4EAC, // 京, #206
+ 0x4EBA, // 人, #3
+ 0x4EC0, // 什, #287
+ 0x4ECB, // 介, #478
+ 0x4ECE, // 从, #381
+ 0x4ED6, // ä»–, #129
+ 0x4EE3, // 代, #241
+ 0x4EE5, // 以, #51
+ 0x4EEC, // 们, #83
+ 0x4EF6, // 件, #141
+ 0x4EF7, // ä»·, #140
+ 0x4EFB, // ä»», #383
+ 0x4F01, // ä¼, #439
+ 0x4F18, // 优, #374
+ 0x4F1A, // 会, #29
+ 0x4F20, // ä¼ , #222
+ 0x4F46, // 但, #451
+ 0x4F4D, // ä½, #208
+ 0x4F53, // 体, #98
+ 0x4F55, // 何, #339
+ 0x4F5C, // 作, #44
+ 0x4F60, // ä½ , #76
+ 0x4F7F, // 使, #272
+ 0x4F9B, // ä¾›, #375
+ 0x4FDD, // ä¿, #180
+ 0x4FE1, // ä¿¡, #84
+ 0x4FEE, // ä¿®, #437
+ 0x503C, // 值, #450
+ 0x505A, // åš, #368
+ 0x5065, // å¥, #484
+ 0x50CF, // åƒ, #487
+ 0x513F, // å„¿, #326
+ 0x5143, // å…ƒ, #202
+ 0x5148, // å…ˆ, #485
+ 0x5149, // å…‰, #254
+ 0x514B, // å…‹, #503
+ 0x514D, // å…, #349
+ 0x5165, // å…¥, #156
+ 0x5168, // å…¨, #47
+ 0x516C, // å…¬, #35
+ 0x5171, // å…±, #448
+ 0x5173, // å…³, #49
+ 0x5176, // å…¶, #195
+ 0x5177, // å…·, #329
+ 0x5185, // 内, #109
+ 0x518C, // 册, #225
+ 0x519B, // 军, #466
+ 0x51FA, // 出, #53
+ 0x51FB, // 击, #359
+ 0x5206, // 分, #22
+ 0x5217, // 列, #410
+ 0x521B, // 创, #399
+ 0x5229, // 利, #296
+ 0x522B, // 别, #372
+ 0x5230, // 到, #33
+ 0x5236, // 制, #192
+ 0x524D, // å‰, #117
+ 0x529B, // 力, #173
+ 0x529E, // 办, #436
+ 0x529F, // 功, #455
+ 0x52A0, // 加, #97
+ 0x52A1, // 务, #100
+ 0x52A8, // 动, #46
+ 0x52A9, // 助, #365
+ 0x5305, // 包, #331
+ 0x5316, // 化, #155
+ 0x5317, // 北, #194
+ 0x533A, // 区, #105
+ 0x533B, // 医, #234
+ 0x5341, // å, #294
+ 0x534E, // åŽ, #205
+ 0x5355, // å•, #259
+ 0x5357, // å—, #182
+ 0x535A, // åš, #153
+ 0x5361, // å¡, #332
+ 0x539F, // 原, #271
+ 0x53BB, // 去, #282
+ 0x53C2, // å‚, #500
+ 0x53CA, // åŠ, #255
+ 0x53CB, // å‹, #186
+ 0x53CD, // å, #422
+ 0x53D1, // å‘, #15
+ 0x53D7, // å—, #507
+ 0x53D8, // å˜, #395
+ 0x53E3, // å£, #293
+ 0x53EA, // åª, #340
+ 0x53EF, // å¯, #45
+ 0x53F0, // å°, #267
+ 0x53F7, // å·, #121
+ 0x53F8, // å¸, #150
+ 0x5404, // å„, #491
+ 0x5408, // åˆ, #115
+ 0x540C, // åŒ, #189
+ 0x540D, // å, #127
+ 0x540E, // åŽ, #75
+ 0x5411, // å‘, #459
+ 0x5427, // å§, #353
+ 0x544A, // å‘Š, #318
+ 0x5458, // 员, #232
+ 0x5468, // 周, #347
+ 0x548C, // 和, #43
+ 0x54C1, // å“, #36
+ 0x5546, // 商, #148
+ 0x5668, // 器, #228
+ 0x56DB, // å››, #352
+ 0x56DE, // 回, #38
+ 0x56E0, // å› , #355
+ 0x56E2, // 团, #412
+ 0x56ED, // å›­, #470
+ 0x56FD, // 国, #12
+ 0x56FE, // 图, #32
+ 0x5728, // 在, #10
+ 0x5730, // 地, #30
+ 0x573A, // 场, #177
+ 0x575B, // å›, #364
+ 0x578B, // åž‹, #274
+ 0x57CE, // 城, #172
+ 0x57FA, // 基, #315
+ 0x58EB, // 士, #434
+ 0x58F0, // 声, #397
+ 0x5904, // 处, #416
+ 0x5907, // 备, #270
+ 0x590D, // å¤, #122
+ 0x5916, // 外, #190
+ 0x591A, // 多, #40
+ 0x5927, // 大, #8
+ 0x5929, // 天, #52
+ 0x592A, // 太, #456
+ 0x5934, // 头, #258
+ 0x5973, // 女, #65
+ 0x597D, // 好, #62
+ 0x5982, // 如, #135
+ 0x5A31, // 娱, #452
+ 0x5B50, // å­, #37
+ 0x5B57, // å­—, #285
+ 0x5B66, // å­¦, #19
+ 0x5B89, // 安, #144
+ 0x5B8C, // 完, #469
+ 0x5B9A, // 定, #179
+ 0x5B9D, // å®, #188
+ 0x5B9E, // 实, #154
+ 0x5BA2, // 客, #174
+ 0x5BB6, // 家, #26
+ 0x5BB9, // 容, #307
+ 0x5BC6, // 密, #471
+ 0x5BF9, // 对, #90
+ 0x5BFC, // 导, #348
+ 0x5C06, // å°†, #265
+ 0x5C0F, // å°, #28
+ 0x5C11, // å°‘, #379
+ 0x5C14, // å°”, #490
+ 0x5C31, // å°±, #101
+ 0x5C55, // 展, #291
+ 0x5C71, // å±±, #239
+ 0x5DDE, // å·ž, #227
+ 0x5DE5, // å·¥, #73
+ 0x5DF1, // å·±, #480
+ 0x5DF2, // å·², #310
+ 0x5E02, // 市, #78
+ 0x5E03, // 布, #350
+ 0x5E08, // 师, #277
+ 0x5E16, // 帖, #396
+ 0x5E26, // 带, #449
+ 0x5E2E, // 帮, #461
+ 0x5E38, // 常, #319
+ 0x5E73, // å¹³, #217
+ 0x5E74, // å¹´, #20
+ 0x5E76, // 并, #440
+ 0x5E7F, // 广, #166
+ 0x5E93, // 库, #446
+ 0x5E94, // 应, #187
+ 0x5E97, // 店, #320
+ 0x5EA6, // 度, #114
+ 0x5EB7, // 康, #499
+ 0x5EFA, // 建, #211
+ 0x5F00, // å¼€, #72
+ 0x5F0F, // å¼, #207
+ 0x5F15, // 引, #495
+ 0x5F20, // å¼ , #385
+ 0x5F3A, // 强, #404
+ 0x5F53, // 当, #233
+ 0x5F55, // 录, #146
+ 0x5F62, // å½¢, #494
+ 0x5F69, // 彩, #356
+ 0x5F71, // å½±, #214
+ 0x5F88, // 很, #300
+ 0x5F97, // å¾—, #193
+ 0x5FAE, // å¾®, #245
+ 0x5FC3, // 心, #70
+ 0x5FEB, // å¿«, #324
+ 0x6001, // æ€, #508
+ 0x600E, // 怎, #370
+ 0x6027, // 性, #99
+ 0x603B, // 总, #398
+ 0x606F, // æ¯, #176
+ 0x60A8, // 您, #251
+ 0x60C5, // 情, #87
+ 0x60F3, // 想, #290
+ 0x610F, // æ„, #184
+ 0x611F, // æ„Ÿ, #253
+ 0x620F, // æˆ, #237
+ 0x6210, // æˆ, #71
+ 0x6211, // 我, #11
+ 0x6216, // 或, #321
+ 0x6218, // 战, #369
+ 0x6237, // 户, #215
+ 0x623F, // 房, #236
+ 0x6240, // 所, #147
+ 0x624B, // 手, #55
+ 0x624D, // æ‰, #407
+ 0x6253, // 打, #281
+ 0x6280, // 技, #203
+ 0x6295, // 投, #408
+ 0x62A4, // 护, #502
+ 0x62A5, // 报, #113
+ 0x62DB, // æ‹›, #363
+ 0x6301, // æŒ, #403
+ 0x6307, // 指, #414
+ 0x636E, // æ®, #409
+ 0x6392, // 排, #377
+ 0x63A5, // 接, #266
+ 0x63A8, // 推, #244
+ 0x63D0, // æ, #181
+ 0x641C, // æœ, #301
+ 0x64AD, // æ’­, #401
+ 0x652F, // 支, #400
+ 0x6536, // 收, #158
+ 0x653E, // 放, #317
+ 0x653F, // 政, #380
+ 0x6548, // 效, #496
+ 0x6559, // æ•™, #170
+ 0x6570, // æ•°, #136
+ 0x6587, // æ–‡, #21
+ 0x6599, // æ–™, #295
+ 0x65AF, // æ–¯, #473
+ 0x65B0, // æ–°, #14
+ 0x65B9, // æ–¹, #68
+ 0x65C5, // æ—…, #457
+ 0x65E0, // æ— , #164
+ 0x65E5, // æ—¥, #50
+ 0x65F6, // æ—¶, #18
+ 0x660E, // 明, #132
+ 0x6613, // 易, #428
+ 0x661F, // 星, #240
+ 0x662F, // 是, #6
+ 0x663E, // 显, #486
+ 0x66F4, // æ›´, #103
+ 0x6700, // 最, #61
+ 0x6708, // 月, #80
+ 0x6709, // 有, #5
+ 0x670D, // æœ, #94
+ 0x671F, // 期, #139
+ 0x672C, // 本, #56
+ 0x672F, // 术, #216
+ 0x673A, // 机, #27
+ 0x6743, // æƒ, #250
+ 0x6761, // æ¡, #309
+ 0x6765, // æ¥, #42
+ 0x677F, // æ¿, #505
+ 0x6797, // æž—, #475
+ 0x679C, // 果, #212
+ 0x67E5, // 查, #165
+ 0x6807, // æ ‡, #269
+ 0x6821, // æ ¡, #462
+ 0x6837, // æ ·, #314
+ 0x683C, // æ ¼, #238
+ 0x6848, // 案, #378
+ 0x697C, // 楼, #342
+ 0x6A21, // 模, #413
+ 0x6B21, // 次, #263
+ 0x6B22, // 欢, #443
+ 0x6B3E, // 款, #358
+ 0x6B63, // æ­£, #219
+ 0x6B64, // æ­¤, #362
+ 0x6BD4, // 比, #298
+ 0x6C11, // æ°‘, #279
+ 0x6C14, // æ°”, #303
+ 0x6C34, // æ°´, #163
+ 0x6C42, // 求, #373
+ 0x6C5F, // 江, #336
+ 0x6CA1, // 没, #229
+ 0x6CBB, // æ²», #425
+ 0x6CD5, // 法, #85
+ 0x6CE8, // 注, #119
+ 0x6D3B, // æ´», #231
+ 0x6D41, // æµ, #280
+ 0x6D4B, // 测, #460
+ 0x6D77, // æµ·, #124
+ 0x6D88, // 消, #415
+ 0x6DF1, // æ·±, #477
+ 0x6E05, // 清, #311
+ 0x6E38, // 游, #81
+ 0x6E90, // æº, #325
+ 0x706B, // ç«, #498
+ 0x70B9, // 点, #58
+ 0x70ED, // 热, #183
+ 0x7136, // 然, #308
+ 0x7167, // ç…§, #431
+ 0x7231, // 爱, #223
+ 0x7247, // 片, #128
+ 0x7248, // 版, #91
+ 0x724C, // 牌, #429
+ 0x7269, // 物, #169
+ 0x7279, // 特, #224
+ 0x738B, // 王, #351
+ 0x73A9, // 玩, #476
+ 0x73B0, // 现, #125
+ 0x7403, // çƒ, #367
+ 0x7406, // ç†, #69
+ 0x751F, // 生, #24
+ 0x7528, // 用, #17
+ 0x7531, // ç”±, #441
+ 0x7535, // 电, #34
+ 0x7537, // ç”·, #275
+ 0x754C, // 界, #419
+ 0x75C5, // ç—…, #371
+ 0x767B, // ç™», #204
+ 0x767D, // 白, #338
+ 0x767E, // 百, #157
+ 0x7684, // çš„, #1
+ 0x76D8, // 盘, #493
+ 0x76EE, // ç›®, #261
+ 0x76F4, // ç›´, #391
+ 0x76F8, // 相, #143
+ 0x7701, // çœ, #464
+ 0x770B, // 看, #54
+ 0x771F, // 真, #249
+ 0x7740, // ç€, #302
+ 0x77E5, // 知, #142
+ 0x7801, // ç , #257
+ 0x7814, // ç ”, #387
+ 0x793A, // 示, #334
+ 0x793E, // 社, #343
+ 0x795E, // 神, #330
+ 0x798F, // ç¦, #509
+ 0x79BB, // 离, #454
+ 0x79CD, // ç§, #278
+ 0x79D1, // 科, #126
+ 0x79EF, // 积, #390
+ 0x7A0B, // 程, #209
+ 0x7A76, // 究, #504
+ 0x7A7A, // 空, #312
+ 0x7ACB, // ç«‹, #393
+ 0x7AD9, // ç«™, #107
+ 0x7AE0, // ç« , #304
+ 0x7B2C, // 第, #96
+ 0x7B49, // ç­‰, #210
+ 0x7B54, // ç­”, #256
+ 0x7B80, // 简, #474
+ 0x7BA1, // 管, #221
+ 0x7C7B, // ç±», #246
+ 0x7CBE, // ç²¾, #226
+ 0x7CFB, // ç³», #89
+ 0x7D22, // ç´¢, #354
+ 0x7EA2, // 红, #417
+ 0x7EA7, // 级, #178
+ 0x7EBF, // 线, #108
+ 0x7EC4, // 组, #389
+ 0x7EC6, // 细, #442
+ 0x7ECF, // ç», #74
+ 0x7ED3, // 结, #333
+ 0x7ED9, // ç»™, #384
+ 0x7EDC, // 络, #472
+ 0x7EDF, // 统, #344
+ 0x7F16, // ç¼–, #424
+ 0x7F51, // 网, #9
+ 0x7F6E, // ç½®, #411
+ 0x7F8E, // 美, #60
+ 0x8001, // è€, #292
+ 0x8003, // 考, #288
+ 0x8005, // 者, #106
+ 0x800C, // 而, #297
+ 0x8054, // è”, #159
+ 0x80B2, // 育, #327
+ 0x80FD, // 能, #59
+ 0x81EA, // 自, #77
+ 0x8272, // 色, #198
+ 0x8282, // 节, #361
+ 0x82B1, // 花, #299
+ 0x82F1, // 英, #316
+ 0x8350, // è, #402
+ 0x836F, // è¯, #481
+ 0x8425, // è¥, #394
+ 0x85CF, // è—, #337
+ 0x884C, // 行, #41
+ 0x8868, // 表, #104
+ 0x88AB, // 被, #289
+ 0x88C5, // 装, #161
+ 0x897F, // 西, #199
+ 0x8981, // è¦, #48
+ 0x89C1, // è§, #360
+ 0x89C2, // 观, #423
+ 0x89C4, // 规, #453
+ 0x89C6, // 视, #120
+ 0x89E3, // 解, #264
+ 0x8A00, // 言, #433
+ 0x8BA1, // 计, #191
+ 0x8BA4, // 认, #482
+ 0x8BA9, // 让, #421
+ 0x8BAE, // è®®, #427
+ 0x8BAF, // 讯, #388
+ 0x8BB0, // è®°, #273
+ 0x8BBA, // 论, #66
+ 0x8BBE, // 设, #162
+ 0x8BC1, // è¯, #201
+ 0x8BC4, // 评, #111
+ 0x8BC6, // 识, #463
+ 0x8BD5, // 试, #323
+ 0x8BDD, // è¯, #247
+ 0x8BE2, // 询, #432
+ 0x8BE5, // 该, #447
+ 0x8BE6, // 详, #497
+ 0x8BED, // 语, #268
+ 0x8BF4, // 说, #112
+ 0x8BF7, // 请, #213
+ 0x8BFB, // 读, #341
+ 0x8C03, // è°ƒ, #438
+ 0x8D22, // è´¢, #488
+ 0x8D28, // è´¨, #386
+ 0x8D2D, // è´­, #260
+ 0x8D34, // è´´, #510
+ 0x8D39, // è´¹, #242
+ 0x8D44, // 资, #116
+ 0x8D77, // èµ·, #220
+ 0x8D85, // 超, #406
+ 0x8DEF, // è·¯, #235
+ 0x8EAB, // 身, #262
+ 0x8F66, // 车, #82
+ 0x8F6C, // 转, #322
+ 0x8F7D, // è½½, #175
+ 0x8FBE, // è¾¾, #435
+ 0x8FC7, // 过, #118
+ 0x8FD0, // è¿, #357
+ 0x8FD1, // è¿‘, #492
+ 0x8FD8, // 还, #171
+ 0x8FD9, // è¿™, #57
+ 0x8FDB, // è¿›, #160
+ 0x8FDE, // è¿ž, #489
+ 0x9009, // 选, #328
+ 0x901A, // 通, #137
+ 0x901F, // 速, #458
+ 0x9020, // 造, #511
+ 0x9053, // é“, #79
+ 0x90A3, // é‚£, #305
+ 0x90E8, // 部, #102
+ 0x90FD, // 都, #167
+ 0x914D, // é…, #479
+ 0x9152, // é…’, #444
+ 0x91CC, // 里, #196
+ 0x91CD, // é‡, #230
+ 0x91CF, // é‡, #248
+ 0x91D1, // 金, #134
+ 0x9500, // 销, #465
+ 0x957F, // é•¿, #152
+ 0x95E8, // é—¨, #185
+ 0x95EE, // é—®, #92
+ 0x95F4, // é—´, #88
+ 0x95FB, // é—», #313
+ 0x9605, // 阅, #467
+ 0x9633, // 阳, #420
+ 0x9645, // é™…, #501
+ 0x9650, // é™, #286
+ 0x9662, // 院, #276
+ 0x96C6, // 集, #284
+ 0x9700, // 需, #405
+ 0x9762, // é¢, #123
+ 0x97F3, // 音, #335
+ 0x9875, // 页, #63
+ 0x9879, // 项, #506
+ 0x9891, // 频, #200
+ 0x9898, // 题, #110
+ 0x98CE, // 风, #252
+ 0x98DF, // 食, #445
+ 0x9996, // 首, #149
+ 0x9999, // 香, #512
+ 0x9A6C, // 马, #392
+ 0x9A8C, // 验, #382
+ 0x9AD8, // 高, #67
+ 0x9F99, // é¾™, #426
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_zhCN_coverage=0.718950369339973;
+
+// The 512 most frequently occuring characters for the zhTW language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_zhTW[] = {
+ 0x4E00, // 一, #2
+ 0x4E09, // 三, #131
+ 0x4E0A, // 上, #12
+ 0x4E0B, // 下, #37
+ 0x4E0D, // ä¸, #6
+ 0x4E16, // 世, #312
+ 0x4E26, // 並, #434
+ 0x4E2D, // 中, #9
+ 0x4E3B, // 主, #97
+ 0x4E4B, // 之, #55
+ 0x4E5F, // 也, #95
+ 0x4E86, // 了, #19
+ 0x4E8B, // 事, #128
+ 0x4E8C, // 二, #187
+ 0x4E94, // 五, #339
+ 0x4E9B, // 些, #435
+ 0x4E9E, // 亞, #432
+ 0x4EA4, // 交, #264
+ 0x4EAB, // 享, #160
+ 0x4EBA, // 人, #3
+ 0x4EC0, // 什, #483
+ 0x4ECA, // 今, #380
+ 0x4ECB, // 介, #468
+ 0x4ED6, // ä»–, #65
+ 0x4EE3, // 代, #284
+ 0x4EE5, // 以, #26
+ 0x4EF6, // 件, #234
+ 0x4EFB, // ä»», #381
+ 0x4EFD, // 份, #447
+ 0x4F46, // 但, #281
+ 0x4F4D, // ä½, #202
+ 0x4F4F, // ä½, #471
+ 0x4F55, // 何, #334
+ 0x4F5C, // 作, #56
+ 0x4F60, // ä½ , #64
+ 0x4F7F, // 使, #236
+ 0x4F86, // 來, #38
+ 0x4F9B, // ä¾›, #397
+ 0x4FBF, // 便, #440
+ 0x4FC2, // ä¿‚, #506
+ 0x4FDD, // ä¿, #161
+ 0x4FE1, // ä¿¡, #268
+ 0x4FEE, // ä¿®, #473
+ 0x500B, // 個, #27
+ 0x5011, // 們, #109
+ 0x505A, // åš, #383
+ 0x5065, // å¥, #415
+ 0x5099, // å‚™, #461
+ 0x50B3, // 傳, #277
+ 0x50CF, // åƒ, #403
+ 0x50F9, // 價, #93
+ 0x512A, // 優, #396
+ 0x5143, // å…ƒ, #158
+ 0x5148, // å…ˆ, #382
+ 0x5149, // å…‰, #216
+ 0x514D, // å…, #321
+ 0x5152, // å…’, #374
+ 0x5165, // å…¥, #58
+ 0x5167, // å…§, #106
+ 0x5168, // å…¨, #67
+ 0x5169, // å…©, #322
+ 0x516C, // å…¬, #53
+ 0x516D, // å…­, #493
+ 0x5171, // å…±, #456
+ 0x5176, // å…¶, #148
+ 0x5177, // å…·, #328
+ 0x518A, // 冊, #360
+ 0x518D, // å†, #311
+ 0x51FA, // 出, #44
+ 0x5206, // 分, #15
+ 0x5217, // 列, #259
+ 0x5225, // 別, #361
+ 0x5229, // 利, #251
+ 0x5230, // 到, #29
+ 0x5247, // 則, #511
+ 0x524D, // å‰, #82
+ 0x5275, // 創, #409
+ 0x529B, // 力, #176
+ 0x529F, // 功, #430
+ 0x52A0, // 加, #87
+ 0x52A9, // 助, #465
+ 0x52D5, // å‹•, #48
+ 0x52D9, // å‹™, #102
+ 0x5305, // 包, #248
+ 0x5316, // 化, #223
+ 0x5317, // 北, #145
+ 0x5340, // å€, #60
+ 0x5341, // å, #242
+ 0x5357, // å—, #261
+ 0x535A, // åš, #484
+ 0x5361, // å¡, #327
+ 0x5370, // å°, #498
+ 0x5373, // å³, #351
+ 0x539F, // 原, #237
+ 0x53BB, // 去, #190
+ 0x53C3, // åƒ, #444
+ 0x53C8, // åˆ, #426
+ 0x53CA, // åŠ, #136
+ 0x53CB, // å‹, #142
+ 0x53D6, // å–, #422
+ 0x53D7, // å—, #410
+ 0x53E3, // å£, #357
+ 0x53EA, // åª, #250
+ 0x53EF, // å¯, #35
+ 0x53F0, // å°, #34
+ 0x53F8, // å¸, #226
+ 0x5403, // åƒ, #362
+ 0x5404, // å„, #454
+ 0x5408, // åˆ, #147
+ 0x540C, // åŒ, #173
+ 0x540D, // å, #108
+ 0x544A, // å‘Š, #186
+ 0x548C, // 和, #130
+ 0x54C1, // å“, #23
+ 0x54E1, // å“¡, #150
+ 0x5546, // 商, #75
+ 0x554F, // å•, #120
+ 0x559C, // 喜, #502
+ 0x55AE, // å–®, #210
+ 0x55CE, // å—Ž, #443
+ 0x5668, // 器, #305
+ 0x56DB, // å››, #318
+ 0x56DE, // 回, #59
+ 0x56E0, // å› , #253
+ 0x570B, // 國, #21
+ 0x5712, // 園, #345
+ 0x5716, // 圖, #73
+ 0x5718, // 團, #338
+ 0x5728, // 在, #11
+ 0x5730, // 地, #50
+ 0x578B, // åž‹, #270
+ 0x57CE, // 城, #466
+ 0x57FA, // 基, #349
+ 0x5831, // å ±, #127
+ 0x5834, // å ´, #165
+ 0x58EB, // 士, #372
+ 0x5916, // 外, #152
+ 0x591A, // 多, #54
+ 0x5927, // 大, #8
+ 0x5929, // 天, #43
+ 0x592A, // 太, #343
+ 0x5947, // 奇, #325
+ 0x5973, // 女, #85
+ 0x5979, // 她, #420
+ 0x597D, // 好, #22
+ 0x5982, // 如, #144
+ 0x5B50, // å­, #46
+ 0x5B57, // å­—, #275
+ 0x5B78, // å­¸, #49
+ 0x5B89, // 安, #239
+ 0x5B8C, // 完, #320
+ 0x5B9A, // 定, #159
+ 0x5BA2, // 客, #188
+ 0x5BB6, // 家, #31
+ 0x5BB9, // 容, #244
+ 0x5BE6, // 實, #198
+ 0x5BF6, // 寶, #367
+ 0x5C07, // å°‡, #232
+ 0x5C08, // å°ˆ, #133
+ 0x5C0B, // å°‹, #352
+ 0x5C0D, // å°, #126
+ 0x5C0E, // å°Ž, #418
+ 0x5C0F, // å°, #20
+ 0x5C11, // å°‘, #368
+ 0x5C31, // å°±, #63
+ 0x5C55, // 展, #341
+ 0x5C71, // å±±, #273
+ 0x5DE5, // å·¥, #121
+ 0x5DF1, // å·±, #402
+ 0x5DF2, // å·², #299
+ 0x5E02, // 市, #81
+ 0x5E2B, // 師, #262
+ 0x5E36, // 帶, #470
+ 0x5E38, // 常, #303
+ 0x5E73, // å¹³, #297
+ 0x5E74, // å¹´, #30
+ 0x5E97, // 店, #171
+ 0x5EA6, // 度, #220
+ 0x5EB7, // 康, #441
+ 0x5EE3, // 廣, #279
+ 0x5EFA, // 建, #254
+ 0x5F0F, // å¼, #155
+ 0x5F15, // 引, #346
+ 0x5F35, // å¼µ, #366
+ 0x5F37, // å¼·, #437
+ 0x5F71, // å½±, #94
+ 0x5F88, // 很, #177
+ 0x5F8C, // 後, #66
+ 0x5F97, // å¾—, #113
+ 0x5F9E, // 從, #436
+ 0x5FC3, // 心, #57
+ 0x5FEB, // å¿«, #292
+ 0x6027, // 性, #175
+ 0x606F, // æ¯, #378
+ 0x60A8, // 您, #252
+ 0x60C5, // 情, #123
+ 0x60F3, // 想, #178
+ 0x610F, // æ„, #168
+ 0x611B, // æ„›, #125
+ 0x611F, // æ„Ÿ, #211
+ 0x61C9, // 應, #164
+ 0x6210, // æˆ, #86
+ 0x6211, // 我, #7
+ 0x6216, // 或, #199
+ 0x6230, // 戰, #438
+ 0x6232, // 戲, #309
+ 0x6236, // 戶, #497
+ 0x623F, // 房, #274
+ 0x6240, // 所, #79
+ 0x624B, // 手, #68
+ 0x624D, // æ‰, #400
+ 0x6253, // 打, #278
+ 0x627E, // 找, #449
+ 0x6280, // 技, #332
+ 0x6295, // 投, #425
+ 0x62C9, // 拉, #500
+ 0x62CD, // æ‹, #398
+ 0x6307, // 指, #407
+ 0x6392, // 排, #458
+ 0x63A5, // 接, #326
+ 0x63A8, // 推, #153
+ 0x63D0, // æ, #235
+ 0x641C, // æœ, #314
+ 0x6469, // æ‘©, #472
+ 0x6536, // 收, #249
+ 0x6539, // 改, #508
+ 0x653E, // 放, #331
+ 0x653F, // 政, #295
+ 0x6559, // æ•™, #184
+ 0x6574, // æ•´, #394
+ 0x6578, // 數, #134
+ 0x6587, // æ–‡, #16
+ 0x6599, // æ–™, #167
+ 0x65AF, // æ–¯, #476
+ 0x65B0, // æ–°, #10
+ 0x65B9, // æ–¹, #96
+ 0x65BC, // æ–¼, #70
+ 0x65C5, // æ—…, #289
+ 0x65E5, // æ—¥, #18
+ 0x660E, // 明, #118
+ 0x6613, // 易, #482
+ 0x661F, // 星, #205
+ 0x662F, // 是, #5
+ 0x6642, // 時, #13
+ 0x66F4, // æ›´, #149
+ 0x66F8, // 書, #209
+ 0x6700, // 最, #51
+ 0x6703, // 會, #14
+ 0x6708, // 月, #25
+ 0x6709, // 有, #4
+ 0x670D, // æœ, #99
+ 0x671F, // 期, #139
+ 0x672A, // 未, #404
+ 0x672C, // 本, #45
+ 0x6771, // æ±, #221
+ 0x677F, // æ¿, #364
+ 0x6797, // æž—, #330
+ 0x679C, // 果, #179
+ 0x67E5, // 查, #283
+ 0x683C, // æ ¼, #157
+ 0x6848, // 案, #392
+ 0x689D, // æ¢, #406
+ 0x696D, // 業, #103
+ 0x6A02, // 樂, #116
+ 0x6A13, // 樓, #411
+ 0x6A19, // 標, #384
+ 0x6A23, // 樣, #306
+ 0x6A5F, // æ©Ÿ, #40
+ 0x6AA2, // 檢, #359
+ 0x6B0A, // 權, #228
+ 0x6B21, // 次, #227
+ 0x6B3E, // 款, #276
+ 0x6B4C, // 歌, #496
+ 0x6B61, // æ­¡, #427
+ 0x6B63, // æ­£, #206
+ 0x6B64, // æ­¤, #247
+ 0x6BCF, // æ¯, #391
+ 0x6BD4, // 比, #257
+ 0x6C11, // æ°‘, #230
+ 0x6C23, // æ°£, #200
+ 0x6C34, // æ°´, #140
+ 0x6C42, // 求, #501
+ 0x6C92, // æ²’, #162
+ 0x6CD5, // 法, #89
+ 0x6D3B, // æ´», #124
+ 0x6D41, // æµ, #315
+ 0x6D77, // æµ·, #258
+ 0x6D88, // 消, #342
+ 0x6E05, // 清, #329
+ 0x6E2F, // 港, #293
+ 0x6F14, // æ¼”, #491
+ 0x7063, // ç£, #195
+ 0x70BA, // 為, #39
+ 0x7121, // ç„¡, #107
+ 0x7136, // 然, #215
+ 0x7167, // ç…§, #376
+ 0x71B1, // 熱, #245
+ 0x7247, // 片, #90
+ 0x7248, // 版, #112
+ 0x724C, // 牌, #467
+ 0x7269, // 物, #110
+ 0x7279, // 特, #183
+ 0x738B, // 王, #287
+ 0x73A9, // 玩, #354
+ 0x73FE, // ç¾, #143
+ 0x7403, // çƒ, #350
+ 0x7406, // ç†, #105
+ 0x751F, // 生, #24
+ 0x7522, // 產, #201
+ 0x7528, // 用, #17
+ 0x7531, // ç”±, #288
+ 0x7537, // ç”·, #298
+ 0x754C, // 界, #399
+ 0x7559, // ç•™, #218
+ 0x756B, // ç•«, #412
+ 0x7576, // 當, #185
+ 0x767B, // ç™», #138
+ 0x767C, // 發, #28
+ 0x767D, // 白, #377
+ 0x767E, // 百, #393
+ 0x7684, // çš„, #1
+ 0x76EE, // ç›®, #271
+ 0x76F4, // ç›´, #379
+ 0x76F8, // 相, #98
+ 0x770B, // 看, #52
+ 0x771F, // 真, #180
+ 0x773C, // 眼, #433
+ 0x77E5, // 知, #170
+ 0x78BC, // 碼, #481
+ 0x793A, // 示, #353
+ 0x793E, // 社, #333
+ 0x795E, // 神, #304
+ 0x7968, // 票, #477
+ 0x798F, // ç¦, #494
+ 0x79C1, // ç§, #507
+ 0x79D1, // 科, #280
+ 0x7A0B, // 程, #272
+ 0x7A2E, // 種, #337
+ 0x7A4D, // ç©, #385
+ 0x7A7A, // 空, #324
+ 0x7ACB, // ç«‹, #286
+ 0x7AD9, // ç«™, #117
+ 0x7AE0, // ç« , #141
+ 0x7B2C, // 第, #135
+ 0x7B49, // ç­‰, #240
+ 0x7BA1, // 管, #340
+ 0x7BC0, // 節, #431
+ 0x7BC7, // 篇, #479
+ 0x7C21, // ç°¡, #499
+ 0x7CBE, // ç²¾, #213
+ 0x7CFB, // ç³», #212
+ 0x7D04, // ç´„, #462
+ 0x7D05, // ç´…, #452
+ 0x7D1A, // ç´š, #267
+ 0x7D30, // ç´°, #486
+ 0x7D44, // 組, #335
+ 0x7D50, // çµ, #243
+ 0x7D66, // 給, #355
+ 0x7D71, // çµ±, #375
+ 0x7D93, // 經, #111
+ 0x7DB2, // 網, #32
+ 0x7DDA, // ç·š, #151
+ 0x7E23, // 縣, #439
+ 0x7E3D, // 總, #370
+ 0x7F8E, // 美, #41
+ 0x7FA9, // 義, #504
+ 0x8001, // è€, #290
+ 0x8003, // 考, #428
+ 0x8005, // 者, #92
+ 0x800C, // 而, #217
+ 0x805E, // èž, #181
+ 0x806F, // è¯, #310
+ 0x8072, // è², #413
+ 0x80A1, // è‚¡, #390
+ 0x80B2, // 育, #453
+ 0x80FD, // 能, #71
+ 0x8166, // è…¦, #408
+ 0x81EA, // 自, #61
+ 0x81F3, // 至, #344
+ 0x8207, // 與, #84
+ 0x8209, // 舉, #463
+ 0x8272, // 色, #192
+ 0x82B1, // 花, #255
+ 0x82F1, // 英, #348
+ 0x83EF, // è¯, #196
+ 0x842C, // è¬, #316
+ 0x843D, // è½, #308
+ 0x8457, // è‘—, #233
+ 0x85A6, // è–¦, #401
+ 0x85CF, // è—, #503
+ 0x85DD, // è—, #488
+ 0x8655, // 處, #419
+ 0x865F, // 號, #191
+ 0x884C, // 行, #47
+ 0x8853, // è¡“, #395
+ 0x8868, // 表, #77
+ 0x88AB, // 被, #291
+ 0x88DD, // è£, #256
+ 0x88E1, // 裡, #369
+ 0x88FD, // 製, #510
+ 0x897F, // 西, #300
+ 0x8981, // è¦, #36
+ 0x898B, // 見, #307
+ 0x8996, // 視, #204
+ 0x89BA, // 覺, #450
+ 0x89BD, // 覽, #387
+ 0x89C0, // 觀, #365
+ 0x89E3, // 解, #323
+ 0x8A00, // 言, #169
+ 0x8A02, // 訂, #423
+ 0x8A08, // 計, #225
+ 0x8A0A, // 訊, #156
+ 0x8A0E, // 討, #373
+ 0x8A18, // 記, #222
+ 0x8A2D, // 設, #174
+ 0x8A3B, // 註, #356
+ 0x8A55, // è©•, #246
+ 0x8A66, // 試, #448
+ 0x8A71, // 話, #229
+ 0x8A72, // 該, #446
+ 0x8A8D, // èª, #464
+ 0x8A9E, // 語, #371
+ 0x8AAA, // 說, #91
+ 0x8ABF, // 調, #509
+ 0x8ACB, // è«‹, #119
+ 0x8AD6, // è«–, #114
+ 0x8B1D, // è¬, #389
+ 0x8B49, // è­‰, #429
+ 0x8B58, // è­˜, #416
+ 0x8B70, // è­°, #485
+ 0x8B77, // è­·, #475
+ 0x8B80, // 讀, #386
+ 0x8B8A, // 變, #388
+ 0x8B93, // 讓, #336
+ 0x8CA8, // 貨, #313
+ 0x8CB7, // è²·, #260
+ 0x8CBB, // è²», #203
+ 0x8CC7, // 資, #62
+ 0x8CE3, // è³£, #294
+ 0x8CEA, // 質, #457
+ 0x8CFC, // è³¼, #189
+ 0x8D77, // èµ·, #214
+ 0x8D85, // 超, #296
+ 0x8DDF, // è·Ÿ, #489
+ 0x8DEF, // è·¯, #137
+ 0x8EAB, // 身, #197
+ 0x8ECA, // 車, #76
+ 0x8F09, // 載, #301
+ 0x8F49, // 轉, #282
+ 0x8FD1, // è¿‘, #414
+ 0x9001, // é€, #363
+ 0x9019, // 這, #42
+ 0x901A, // 通, #207
+ 0x901F, // 速, #495
+ 0x9020, // 造, #455
+ 0x9023, // 連, #285
+ 0x9032, // 進, #231
+ 0x904A, // éŠ, #132
+ 0x904B, // é‹, #219
+ 0x904E, // éŽ, #101
+ 0x9053, // é“, #146
+ 0x9054, // é”, #417
+ 0x9078, // é¸, #182
+ 0x9084, // é‚„, #154
+ 0x908A, // é‚Š, #487
+ 0x90A3, // é‚£, #269
+ 0x90E8, // 部, #78
+ 0x90FD, // 都, #104
+ 0x914D, // é…, #421
+ 0x9152, // é…’, #512
+ 0x91AB, // 醫, #358
+ 0x91CD, // é‡, #224
+ 0x91CF, // é‡, #319
+ 0x91D1, // 金, #115
+ 0x9304, // 錄, #302
+ 0x9577, // é•·, #172
+ 0x9580, // é–€, #193
+ 0x958B, // é–‹, #72
+ 0x9593, // é–“, #80
+ 0x95B1, // é–±, #405
+ 0x95DC, // 關, #74
+ 0x963F, // 阿, #460
+ 0x9650, // é™, #265
+ 0x9662, // 院, #474
+ 0x9664, // 除, #478
+ 0x969B, // éš›, #459
+ 0x96C6, // 集, #347
+ 0x96E2, // 離, #442
+ 0x96FB, // é›», #33
+ 0x9700, // 需, #445
+ 0x975E, // éž, #451
+ 0x9762, // é¢, #129
+ 0x97F3, // 音, #194
+ 0x9801, // é , #83
+ 0x982D, // é ­, #238
+ 0x984C, // 題, #122
+ 0x985E, // é¡ž, #163
+ 0x98A8, // 風, #266
+ 0x98DF, // 食, #208
+ 0x9910, // é¤, #469
+ 0x9928, // 館, #424
+ 0x9996, // 首, #166
+ 0x9999, // 香, #263
+ 0x99AC, // 馬, #317
+ 0x9A57, // é©—, #492
+ 0x9AD4, // é«”, #100
+ 0x9AD8, // 高, #88
+ 0x9EBC, // 麼, #241
+ 0x9EC3, // 黃, #480
+ 0x9ED1, // 黑, #490
+ 0x9EDE, // 點, #69
+ 0x9F8D, // é¾, #505
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_zhTW_coverage=0.704841200026877;
+
+// The 512 most frequently occuring characters for the ja language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_ja[] = {
+ 0x3005, // 々, #352
+ 0x3041, // ã, #486
+ 0x3042, // ã‚, #50
+ 0x3044, // ã„, #2
+ 0x3046, // ã†, #33
+ 0x3048, // ãˆ, #83
+ 0x304A, // ãŠ, #37
+ 0x304B, // ã‹, #21
+ 0x304C, // ãŒ, #17
+ 0x304D, // ã, #51
+ 0x304E, // ãŽ, #324
+ 0x304F, // ã, #38
+ 0x3050, // ã, #334
+ 0x3051, // ã‘, #60
+ 0x3052, // ã’, #296
+ 0x3053, // ã“, #34
+ 0x3054, // ã”, #100
+ 0x3055, // ã•, #31
+ 0x3056, // ã–, #378
+ 0x3057, // ã—, #4
+ 0x3058, // ã˜, #121
+ 0x3059, // ã™, #12
+ 0x305A, // ãš, #215
+ 0x305B, // ã›, #86
+ 0x305D, // ã, #68
+ 0x305F, // ãŸ, #11
+ 0x3060, // ã , #42
+ 0x3061, // ã¡, #67
+ 0x3063, // ã£, #23
+ 0x3064, // ã¤, #73
+ 0x3066, // ã¦, #7
+ 0x3067, // ã§, #6
+ 0x3068, // ã¨, #14
+ 0x3069, // ã©, #75
+ 0x306A, // ãª, #8
+ 0x306B, // ã«, #5
+ 0x306D, // ã­, #123
+ 0x306E, // ã®, #1
+ 0x306F, // ã¯, #16
+ 0x3070, // ã°, #150
+ 0x3071, // ã±, #259
+ 0x3072, // ã², #364
+ 0x3073, // ã³, #266
+ 0x3075, // ãµ, #484
+ 0x3076, // ã¶, #330
+ 0x3078, // ã¸, #146
+ 0x3079, // ã¹, #207
+ 0x307B, // ã», #254
+ 0x307E, // ã¾, #18
+ 0x307F, // ã¿, #74
+ 0x3080, // ã‚€, #285
+ 0x3081, // ã‚, #78
+ 0x3082, // ã‚‚, #32
+ 0x3083, // ゃ, #111
+ 0x3084, // ã‚„, #85
+ 0x3086, // ゆ, #392
+ 0x3087, // ょ, #224
+ 0x3088, // よ, #63
+ 0x3089, // ら, #29
+ 0x308A, // ã‚Š, #28
+ 0x308B, // ã‚‹, #9
+ 0x308C, // れ, #35
+ 0x308D, // ã‚, #127
+ 0x308F, // ã‚, #88
+ 0x3092, // ã‚’, #19
+ 0x3093, // ã‚“, #22
+ 0x30A1, // ã‚¡, #193
+ 0x30A2, // ã‚¢, #27
+ 0x30A3, // ã‚£, #70
+ 0x30A4, // イ, #15
+ 0x30A6, // ウ, #89
+ 0x30A7, // ェ, #134
+ 0x30A8, // エ, #81
+ 0x30A9, // ã‚©, #225
+ 0x30AA, // オ, #76
+ 0x30AB, // ã‚«, #52
+ 0x30AC, // ガ, #147
+ 0x30AD, // ã‚­, #66
+ 0x30AE, // ã‚®, #246
+ 0x30AF, // ク, #25
+ 0x30B0, // ã‚°, #39
+ 0x30B1, // ケ, #137
+ 0x30B2, // ゲ, #200
+ 0x30B3, // コ, #46
+ 0x30B4, // ã‚´, #183
+ 0x30B5, // サ, #64
+ 0x30B6, // ザ, #221
+ 0x30B7, // ã‚·, #48
+ 0x30B8, // ジ, #55
+ 0x30B9, // ス, #13
+ 0x30BA, // ズ, #103
+ 0x30BB, // ã‚», #109
+ 0x30BC, // ゼ, #499
+ 0x30BD, // ソ, #175
+ 0x30BF, // ã‚¿, #45
+ 0x30C0, // ダ, #104
+ 0x30C1, // ãƒ, #71
+ 0x30C3, // ッ, #20
+ 0x30C4, // ツ, #119
+ 0x30C6, // テ, #59
+ 0x30C7, // デ, #82
+ 0x30C8, // ト, #10
+ 0x30C9, // ド, #44
+ 0x30CA, // ナ, #102
+ 0x30CB, // ニ, #72
+ 0x30CD, // ãƒ, #117
+ 0x30CE, // ノ, #192
+ 0x30CF, // ãƒ, #164
+ 0x30D0, // ãƒ, #62
+ 0x30D1, // パ, #90
+ 0x30D2, // ヒ, #398
+ 0x30D3, // ビ, #77
+ 0x30D4, // ピ, #135
+ 0x30D5, // フ, #47
+ 0x30D6, // ブ, #56
+ 0x30D7, // プ, #43
+ 0x30D8, // ヘ, #268
+ 0x30D9, // ベ, #157
+ 0x30DA, // ペ, #125
+ 0x30DB, // ホ, #155
+ 0x30DC, // ボ, #168
+ 0x30DD, // ãƒ, #114
+ 0x30DE, // マ, #57
+ 0x30DF, // ミ, #97
+ 0x30E0, // ム, #69
+ 0x30E1, // メ, #53
+ 0x30E2, // モ, #142
+ 0x30E3, // ャ, #93
+ 0x30E4, // ヤ, #258
+ 0x30E5, // ュ, #79
+ 0x30E6, // ユ, #405
+ 0x30E7, // ョ, #98
+ 0x30E9, // ラ, #26
+ 0x30EA, // リ, #30
+ 0x30EB, // ル, #24
+ 0x30EC, // レ, #41
+ 0x30ED, // ロ, #40
+ 0x30EF, // ワ, #144
+ 0x30F3, // ン, #3
+ 0x30F4, // ヴ, #483
+ 0x30FD, // ヽ, #501
+ 0x4E00, // 一, #84
+ 0x4E07, // 万, #337
+ 0x4E09, // 三, #323
+ 0x4E0A, // 上, #133
+ 0x4E0B, // 下, #180
+ 0x4E0D, // ä¸, #277
+ 0x4E16, // 世, #385
+ 0x4E2D, // 中, #87
+ 0x4E3B, // 主, #432
+ 0x4E88, // 予, #326
+ 0x4E8B, // 事, #95
+ 0x4E8C, // 二, #394
+ 0x4E95, // 井, #468
+ 0x4EA4, // 交, #410
+ 0x4EAC, // 京, #260
+ 0x4EBA, // 人, #61
+ 0x4ECA, // 今, #184
+ 0x4ECB, // 介, #358
+ 0x4ED5, // 仕, #391
+ 0x4ED6, // ä»–, #256
+ 0x4ED8, // 付, #243
+ 0x4EE3, // 代, #280
+ 0x4EE5, // 以, #216
+ 0x4EF6, // 件, #190
+ 0x4F1A, // 会, #105
+ 0x4F4D, // ä½, #177
+ 0x4F4F, // ä½, #376
+ 0x4F53, // 体, #223
+ 0x4F55, // 何, #294
+ 0x4F5C, // 作, #154
+ 0x4F7F, // 使, #233
+ 0x4F9B, // ä¾›, #503
+ 0x4FA1, // 価, #217
+ 0x4FBF, // 便, #511
+ 0x4FDD, // ä¿, #279
+ 0x4FE1, // ä¿¡, #271
+ 0x500B, // 個, #415
+ 0x50CF, // åƒ, #178
+ 0x512A, // 優, #403
+ 0x5143, // å…ƒ, #384
+ 0x5148, // å…ˆ, #311
+ 0x5149, // å…‰, #488
+ 0x5165, // å…¥, #115
+ 0x5168, // å…¨, #173
+ 0x516C, // å…¬, #287
+ 0x5177, // å…·, #447
+ 0x5185, // 内, #169
+ 0x5186, // 円, #131
+ 0x5199, // 写, #275
+ 0x51FA, // 出, #110
+ 0x5206, // 分, #130
+ 0x5207, // 切, #401
+ 0x521D, // åˆ, #319
+ 0x5225, // 別, #290
+ 0x5229, // 利, #226
+ 0x5236, // 制, #507
+ 0x524D, // å‰, #124
+ 0x529B, // 力, #272
+ 0x52A0, // 加, #249
+ 0x52D5, // å‹•, #120
+ 0x52D9, // å‹™, #421
+ 0x52DF, // å‹Ÿ, #476
+ 0x5316, // 化, #308
+ 0x5317, // 北, #341
+ 0x533A, // 区, #348
+ 0x539F, // 原, #321
+ 0x53C2, // å‚, #452
+ 0x53CB, // å‹, #451
+ 0x53D6, // å–, #237
+ 0x53D7, // å—, #354
+ 0x53E3, // å£, #289
+ 0x53E4, // å¤, #339
+ 0x53EF, // å¯, #298
+ 0x53F0, // å°, #439
+ 0x53F7, // å·, #361
+ 0x5408, // åˆ, #118
+ 0x540C, // åŒ, #263
+ 0x540D, // å, #65
+ 0x5411, // å‘, #434
+ 0x544A, // å‘Š, #386
+ 0x5468, // 周, #393
+ 0x5473, // 味, #299
+ 0x548C, // 和, #350
+ 0x54C1, // å“, #96
+ 0x54E1, // å“¡, #293
+ 0x5546, // 商, #198
+ 0x554F, // å•, #158
+ 0x55B6, // å–¶, #438
+ 0x5668, // 器, #366
+ 0x56DE, // 回, #143
+ 0x56F3, // 図, #444
+ 0x56FD, // 国, #153
+ 0x5712, // 園, #435
+ 0x571F, // 土, #239
+ 0x5728, // 在, #351
+ 0x5730, // 地, #163
+ 0x578B, // åž‹, #430
+ 0x5831, // å ±, #112
+ 0x5834, // å ´, #139
+ 0x58F2, // 売, #232
+ 0x5909, // 変, #306
+ 0x5916, // 外, #222
+ 0x591A, // 多, #336
+ 0x5927, // 大, #80
+ 0x5929, // 天, #278
+ 0x5973, // 女, #161
+ 0x597D, // 好, #349
+ 0x5A5A, // å©š, #479
+ 0x5B50, // å­, #113
+ 0x5B57, // å­—, #492
+ 0x5B66, // å­¦, #132
+ 0x5B89, // 安, #295
+ 0x5B9A, // 定, #145
+ 0x5B9F, // 実, #220
+ 0x5BA4, // 室, #482
+ 0x5BAE, // å®®, #487
+ 0x5BB6, // 家, #211
+ 0x5BB9, // 容, #333
+ 0x5BFE, // 対, #252
+ 0x5C02, // å°‚, #474
+ 0x5C0F, // å°, #212
+ 0x5C11, // å°‘, #377
+ 0x5C4B, // 屋, #284
+ 0x5C71, // å±±, #206
+ 0x5CA1, // 岡, #429
+ 0x5CF6, // 島, #297
+ 0x5DDD, // å·, #253
+ 0x5DE5, // å·¥, #374
+ 0x5E02, // 市, #159
+ 0x5E2F, // 帯, #416
+ 0x5E38, // 常, #437
+ 0x5E73, // å¹³, #390
+ 0x5E74, // å¹´, #54
+ 0x5E83, // 広, #367
+ 0x5E97, // 店, #149
+ 0x5EA6, // 度, #269
+ 0x5EAB, // 庫, #380
+ 0x5F0F, // å¼, #265
+ 0x5F15, // 引, #345
+ 0x5F37, // å¼·, #446
+ 0x5F53, // 当, #240
+ 0x5F62, // å½¢, #502
+ 0x5F8C, // 後, #230
+ 0x5F97, // å¾—, #490
+ 0x5FC3, // 心, #307
+ 0x5FC5, // å¿…, #422
+ 0x5FDC, // 応, #356
+ 0x601D, // æ€, #189
+ 0x6027, // 性, #201
+ 0x6075, // æµ, #400
+ 0x60C5, // 情, #140
+ 0x60F3, // 想, #477
+ 0x610F, // æ„, #305
+ 0x611B, // æ„›, #273
+ 0x611F, // æ„Ÿ, #257
+ 0x6210, // æˆ, #262
+ 0x6226, // 戦, #365
+ 0x6240, // 所, #236
+ 0x624B, // 手, #160
+ 0x6295, // 投, #129
+ 0x6301, // æŒ, #355
+ 0x6307, // 指, #425
+ 0x63A2, // 探, #369
+ 0x63B2, // 掲, #399
+ 0x643A, // æº, #459
+ 0x652F, // 支, #512
+ 0x653E, // 放, #469
+ 0x6559, // æ•™, #270
+ 0x6570, // æ•°, #181
+ 0x6587, // æ–‡, #202
+ 0x6599, // æ–™, #106
+ 0x65B0, // æ–°, #99
+ 0x65B9, // æ–¹, #126
+ 0x65C5, // æ—…, #445
+ 0x65E5, // æ—¥, #36
+ 0x660E, // 明, #300
+ 0x6620, // 映, #418
+ 0x6642, // 時, #107
+ 0x66F4, // æ›´, #359
+ 0x66F8, // 書, #174
+ 0x6700, // 最, #152
+ 0x6708, // 月, #49
+ 0x6709, // 有, #302
+ 0x671F, // 期, #332
+ 0x6728, // 木, #203
+ 0x672C, // 本, #92
+ 0x6750, // æ, #489
+ 0x6751, // æ‘, #466
+ 0x6765, // æ¥, #267
+ 0x6771, // æ±, #191
+ 0x677F, // æ¿, #411
+ 0x679C, // 果, #441
+ 0x6821, // æ ¡, #327
+ 0x682A, // æ ª, #412
+ 0x683C, // æ ¼, #228
+ 0x691C, // 検, #179
+ 0x696D, // 業, #166
+ 0x697D, // 楽, #172
+ 0x69D8, // 様, #255
+ 0x6A5F, // æ©Ÿ, #235
+ 0x6B21, // 次, #318
+ 0x6B62, // æ­¢, #475
+ 0x6B63, // æ­£, #312
+ 0x6C17, // æ°—, #116
+ 0x6C34, // æ°´, #165
+ 0x6C42, // 求, #465
+ 0x6C7A, // 決, #370
+ 0x6CBB, // æ²», #505
+ 0x6CC1, // æ³, #462
+ 0x6CD5, // 法, #227
+ 0x6CE8, // 注, #372
+ 0x6D3B, // æ´», #303
+ 0x6D41, // æµ, #480
+ 0x6D77, // æµ·, #274
+ 0x6E08, // 済, #417
+ 0x6F14, // æ¼”, #504
+ 0x706B, // ç«, #264
+ 0x70B9, // 点, #331
+ 0x7121, // ç„¡, #58
+ 0x7248, // 版, #409
+ 0x7269, // 物, #170
+ 0x7279, // 特, #242
+ 0x72B6, // 状, #458
+ 0x73FE, // ç¾, #322
+ 0x7406, // ç†, #162
+ 0x751F, // 生, #122
+ 0x7523, // 産, #320
+ 0x7528, // 用, #94
+ 0x7530, // ç”°, #195
+ 0x7537, // ç”·, #373
+ 0x753A, // 町, #314
+ 0x753B, // ç”», #91
+ 0x754C, // 界, #436
+ 0x756A, // 番, #261
+ 0x75C5, // ç—…, #428
+ 0x767A, // 発, #194
+ 0x767B, // ç™», #231
+ 0x767D, // 白, #419
+ 0x7684, // çš„, #251
+ 0x76EE, // ç›®, #197
+ 0x76F4, // ç›´, #497
+ 0x76F8, // 相, #286
+ 0x770C, // 県, #199
+ 0x771F, // 真, #219
+ 0x7740, // ç€, #283
+ 0x77E5, // 知, #185
+ 0x77F3, // 石, #500
+ 0x78BA, // 確, #383
+ 0x793A, // 示, #241
+ 0x793E, // 社, #167
+ 0x795E, // 神, #315
+ 0x798F, // ç¦, #423
+ 0x79C1, // ç§, #347
+ 0x79D1, // 科, #420
+ 0x7A0E, // 税, #368
+ 0x7A2E, // 種, #455
+ 0x7A3F, // 稿, #148
+ 0x7A7A, // 空, #427
+ 0x7ACB, // ç«‹, #309
+ 0x7B11, // 笑, #454
+ 0x7B2C, // 第, #317
+ 0x7B49, // ç­‰, #457
+ 0x7B54, // ç­”, #426
+ 0x7BA1, // 管, #481
+ 0x7CFB, // ç³», #408
+ 0x7D04, // ç´„, #276
+ 0x7D20, // ç´ , #407
+ 0x7D22, // ç´¢, #214
+ 0x7D30, // ç´°, #381
+ 0x7D39, // ç´¹, #471
+ 0x7D42, // 終, #456
+ 0x7D44, // 組, #424
+ 0x7D4C, // 経, #360
+ 0x7D50, // çµ, #291
+ 0x7D9A, // 続, #357
+ 0x7DCF, // ç·, #467
+ 0x7DDA, // ç·š, #338
+ 0x7DE8, // ç·¨, #453
+ 0x7F8E, // 美, #204
+ 0x8003, // 考, #387
+ 0x8005, // 者, #151
+ 0x805E, // èž, #463
+ 0x8077, // è·, #363
+ 0x80B2, // 育, #433
+ 0x80FD, // 能, #250
+ 0x8179, // è…¹, #396
+ 0x81EA, // 自, #156
+ 0x826F, // 良, #329
+ 0x8272, // 色, #402
+ 0x82B1, // 花, #440
+ 0x82B8, // 芸, #413
+ 0x82F1, // 英, #485
+ 0x8449, // 葉, #472
+ 0x884C, // 行, #128
+ 0x8853, // è¡“, #460
+ 0x8868, // 表, #209
+ 0x88FD, // 製, #431
+ 0x897F, // 西, #406
+ 0x8981, // è¦, #313
+ 0x898B, // 見, #101
+ 0x898F, // è¦, #375
+ 0x89A7, // 覧, #171
+ 0x89E3, // 解, #388
+ 0x8A00, // 言, #210
+ 0x8A08, // 計, #343
+ 0x8A18, // 記, #136
+ 0x8A2D, // 設, #292
+ 0x8A71, // 話, #213
+ 0x8A73, // 詳, #371
+ 0x8A8D, // èª, #404
+ 0x8A9E, // 語, #234
+ 0x8AAC, // 説, #494
+ 0x8AAD, // 読, #301
+ 0x8ABF, // 調, #443
+ 0x8AC7, // 談, #448
+ 0x8B77, // è­·, #509
+ 0x8C37, // è°·, #506
+ 0x8CA9, // 販, #362
+ 0x8CB7, // è²·, #346
+ 0x8CC7, // 資, #473
+ 0x8CEA, // 質, #281
+ 0x8CFC, // è³¼, #495
+ 0x8EAB, // 身, #470
+ 0x8ECA, // 車, #205
+ 0x8EE2, // 転, #335
+ 0x8F09, // 載, #342
+ 0x8FBC, // è¾¼, #229
+ 0x8FD1, // è¿‘, #304
+ 0x8FD4, // è¿”, #461
+ 0x8FFD, // 追, #379
+ 0x9001, // é€, #186
+ 0x901A, // 通, #182
+ 0x901F, // 速, #340
+ 0x9023, // 連, #244
+ 0x904B, // é‹, #382
+ 0x904E, // éŽ, #498
+ 0x9053, // é“, #282
+ 0x9054, // é”, #450
+ 0x9055, // é•, #414
+ 0x9078, // é¸, #288
+ 0x90E8, // 部, #208
+ 0x90FD, // 都, #344
+ 0x914D, // é…, #389
+ 0x91CD, // é‡, #478
+ 0x91CE, // 野, #245
+ 0x91D1, // 金, #138
+ 0x9332, // 録, #238
+ 0x9577, // é•·, #247
+ 0x9580, // é–€, #508
+ 0x958B, // é–‹, #248
+ 0x9593, // é–“, #141
+ 0x95A2, // é–¢, #188
+ 0x962A, // 阪, #496
+ 0x9650, // é™, #395
+ 0x9662, // 院, #449
+ 0x9664, // 除, #510
+ 0x969B, // éš›, #493
+ 0x96C6, // 集, #196
+ 0x96D1, // 雑, #442
+ 0x96FB, // é›», #187
+ 0x9762, // é¢, #328
+ 0x97F3, // 音, #325
+ 0x984C, // 題, #310
+ 0x985E, // é¡ž, #491
+ 0x98A8, // 風, #353
+ 0x98DF, // 食, #218
+ 0x9928, // 館, #464
+ 0x99C5, // 駅, #316
+ 0x9A13, // 験, #397
+ 0x9AD8, // 高, #176
+ 0xFF57, // ï½—, #108
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_ja_coverage=0.880569589120162;
+
+// The 512 most frequently occuring characters for the ko language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_ko[] = {
+ 0x314B, // ã…‹, #148
+ 0x314E, // ã…Ž, #390
+ 0x3160, // ã… , #354
+ 0x318D, // ã†, #439
+ 0xAC00, // ê°€, #6
+ 0xAC01, // ê°, #231
+ 0xAC04, // ê°„, #106
+ 0xAC08, // ê°ˆ, #362
+ 0xAC10, // ê°, #122
+ 0xAC11, // ê°‘, #493
+ 0xAC15, // ê°•, #155
+ 0xAC19, // ê°™, #264
+ 0xAC1C, // 개, #87
+ 0xAC1D, // ê°, #198
+ 0xAC24, // ê°¤, #457
+ 0xAC70, // ê±°, #91
+ 0xAC74, // ê±´, #161
+ 0xAC78, // 걸, #338
+ 0xAC80, // ê²€, #184
+ 0xAC83, // 것, #116
+ 0xAC8C, // 게, #36
+ 0xACA0, // ê² , #233
+ 0xACA8, // 겨, #341
+ 0xACA9, // 격, #245
+ 0xACAC, // 견, #413
+ 0xACB0, // ê²°, #202
+ 0xACBD, // ê²½, #62
+ 0xACC4, // 계, #142
+ 0xACE0, // ê³ , #12
+ 0xACE1, // 곡, #444
+ 0xACE8, // 골, #379
+ 0xACF3, // ê³³, #388
+ 0xACF5, // ê³µ, #59
+ 0xACFC, // ê³¼, #69
+ 0xAD00, // ê´€, #95
+ 0xAD11, // ê´‘, #235
+ 0xAD50, // êµ, #128
+ 0xAD6C, // 구, #52
+ 0xAD6D, // êµ­, #85
+ 0xAD70, // êµ°, #293
+ 0xAD74, // êµ´, #487
+ 0xAD81, // ê¶, #441
+ 0xAD8C, // 권, #192
+ 0xADC0, // ê·€, #386
+ 0xADDC, // 규, #367
+ 0xADF8, // ê·¸, #30
+ 0xADF9, // ê·¹, #424
+ 0xADFC, // ê·¼, #241
+ 0xAE00, // 글, #61
+ 0xAE08, // 금, #138
+ 0xAE09, // 급, #269
+ 0xAE30, // 기, #3
+ 0xAE34, // 긴, #465
+ 0xAE38, // 길, #297
+ 0xAE40, // ê¹€, #205
+ 0xAE4C, // 까, #171
+ 0xAED8, // 께, #273
+ 0xAF43, // 꽃, #475
+ 0xB05D, // ë, #505
+ 0xB07C, // ë¼, #490
+ 0xB098, // 나, #39
+ 0xB09C, // 난, #274
+ 0xB0A0, // ë‚ , #292
+ 0xB0A8, // 남, #139
+ 0xB0B4, // ë‚´, #56
+ 0xB108, // 너, #272
+ 0xB110, // ë„, #476
+ 0xB118, // 넘, #492
+ 0xB124, // 네, #100
+ 0xB137, // ë„·, #329
+ 0xB140, // ë…€, #288
+ 0xB144, // ë…„, #151
+ 0xB178, // ë…¸, #149
+ 0xB17C, // ë…¼, #491
+ 0xB180, // 놀, #464
+ 0xB18D, // ë†, #442
+ 0xB204, // 누, #319
+ 0xB208, // 눈, #383
+ 0xB274, // 뉴, #173
+ 0xB290, // ëŠ, #368
+ 0xB294, // 는, #5
+ 0xB298, // 늘, #322
+ 0xB2A5, // 능, #190
+ 0xB2C8, // 니, #16
+ 0xB2D8, // 님, #153
+ 0xB2E4, // 다, #2
+ 0xB2E8, // 단, #134
+ 0xB2EB, // ë‹«, #195
+ 0xB2EC, // 달, #243
+ 0xB2F4, // ë‹´, #254
+ 0xB2F5, // 답, #287
+ 0xB2F9, // 당, #159
+ 0xB300, // 대, #33
+ 0xB313, // 댓, #303
+ 0xB354, // ë”, #140
+ 0xB358, // ë˜, #252
+ 0xB367, // ë§, #463
+ 0xB370, // ë°, #104
+ 0xB378, // ë¸, #429
+ 0xB3C4, // ë„, #25
+ 0xB3C5, // ë…, #301
+ 0xB3CC, // ëŒ, #309
+ 0xB3D9, // ë™, #58
+ 0xB418, // ë˜, #82
+ 0xB41C, // ëœ, #189
+ 0xB420, // ë , #408
+ 0xB429, // ë©, #332
+ 0xB450, // ë‘, #199
+ 0xB4A4, // ë’¤, #496
+ 0xB4DC, // 드, #40
+ 0xB4E0, // ë“ , #283
+ 0xB4E4, // 들, #54
+ 0xB4EF, // 듯, #478
+ 0xB4F1, // 등, #90
+ 0xB514, // ë””, #133
+ 0xB529, // 딩, #462
+ 0xB530, // ë”°, #333
+ 0xB54C, // 때, #240
+ 0xB610, // ë˜, #313
+ 0xB77C, // ë¼, #42
+ 0xB77D, // ë½, #355
+ 0xB780, // 란, #290
+ 0xB78C, // 람, #246
+ 0xB78D, // ëž, #420
+ 0xB791, // ëž‘, #270
+ 0xB798, // 래, #174
+ 0xB799, // ëž™, #381
+ 0xB79C, // 랜, #357
+ 0xB7A8, // 램, #359
+ 0xB7A9, // ëž©, #402
+ 0xB7C9, // 량, #346
+ 0xB7EC, // 러, #130
+ 0xB7F0, // 런, #312
+ 0xB7FC, // 럼, #327
+ 0xB7FD, // 럽, #447
+ 0xB807, // ë ‡, #412
+ 0xB808, // ë ˆ, #114
+ 0xB80C, // 렌, #395
+ 0xB824, // ë ¤, #158
+ 0xB825, // ë ¥, #194
+ 0xB828, // ë ¨, #326
+ 0xB839, // ë ¹, #389
+ 0xB85C, // 로, #4
+ 0xB85D, // ë¡, #84
+ 0xB860, // ë¡ , #366
+ 0xB8CC, // 료, #154
+ 0xB8E8, // 루, #236
+ 0xB958, // 류, #265
+ 0xB974, // 르, #212
+ 0xB978, // 른, #250
+ 0xB97C, // 를, #35
+ 0xB984, // 름, #276
+ 0xB9AC, // 리, #19
+ 0xB9AD, // 릭, #394
+ 0xB9B0, // 린, #259
+ 0xB9B4, // 릴, #485
+ 0xB9BC, // 림, #305
+ 0xB9BD, // 립, #217
+ 0xB9C1, // ë§, #351
+ 0xB9C8, // 마, #67
+ 0xB9C9, // 막, #310
+ 0xB9CC, // 만, #65
+ 0xB9CE, // 많, #257
+ 0xB9D0, // ë§, #188
+ 0xB9DB, // 맛, #397
+ 0xB9DD, // ë§, #370
+ 0xB9DE, // 맞, #399
+ 0xB9E4, // 매, #125
+ 0xB9E8, // 맨, #422
+ 0xBA38, // 머, #311
+ 0xBA39, // 먹, #377
+ 0xBA3C, // 먼, #469
+ 0xBA54, // ë©”, #147
+ 0xBA70, // ë©°, #191
+ 0xBA74, // ë©´, #72
+ 0xBA85, // 명, #131
+ 0xBAA8, // 모, #73
+ 0xBAA9, // 목, #157
+ 0xBAB0, // 몰, #401
+ 0xBAB8, // 몸, #437
+ 0xBABB, // 못, #336
+ 0xBB34, // 무, #80
+ 0xBB38, // 문, #57
+ 0xBB3C, // 물, #94
+ 0xBBA4, // 뮤, #431
+ 0xBBF8, // 미, #76
+ 0xBBFC, // 민, #200
+ 0xBC00, // ë°€, #308
+ 0xBC0F, // ë°, #249
+ 0xBC14, // ë°”, #89
+ 0xBC15, // ë°•, #226
+ 0xBC18, // ë°˜, #175
+ 0xBC1B, // ë°›, #248
+ 0xBC1C, // 발, #164
+ 0xBC29, // ë°©, #92
+ 0xBC30, // ë°°, #162
+ 0xBC31, // ë°±, #256
+ 0xBC84, // 버, #111
+ 0xBC88, // 번, #167
+ 0xBC8C, // 벌, #423
+ 0xBC94, // ë²”, #427
+ 0xBC95, // 법, #207
+ 0xBCA0, // ë² , #281
+ 0xBCA4, // 벤, #378
+ 0xBCA8, // 벨, #387
+ 0xBCC0, // ë³€, #253
+ 0xBCC4, // 별, #262
+ 0xBCD1, // 병, #340
+ 0xBCF4, // ë³´, #20
+ 0xBCF5, // ë³µ, #204
+ 0xBCF8, // 본, #182
+ 0xBCFC, // ë³¼, #385
+ 0xBD09, // ë´‰, #405
+ 0xBD80, // 부, #46
+ 0xBD81, // ë¶, #261
+ 0xBD84, // 분, #105
+ 0xBD88, // 불, #225
+ 0xBDF0, // ë·°, #350
+ 0xBE0C, // 브, #214
+ 0xBE14, // 블, #99
+ 0xBE44, // 비, #55
+ 0xBE4C, // 빌, #510
+ 0xBE60, // ë¹ , #398
+ 0xC0AC, // 사, #14
+ 0xC0AD, // ì‚­, #342
+ 0xC0B0, // ì‚°, #121
+ 0xC0B4, // ì‚´, #279
+ 0xC0BC, // 삼, #348
+ 0xC0C1, // ìƒ, #41
+ 0xC0C8, // 새, #282
+ 0xC0C9, // 색, #181
+ 0xC0DD, // ìƒ, #109
+ 0xC11C, // 서, #21
+ 0xC11D, // ì„, #234
+ 0xC120, // ì„ , #107
+ 0xC124, // 설, #170
+ 0xC131, // 성, #50
+ 0xC138, // 세, #60
+ 0xC139, // 섹, #456
+ 0xC13C, // 센, #267
+ 0xC154, // ì…”, #455
+ 0xC158, // ì…˜, #237
+ 0xC15C, // 셜, #448
+ 0xC168, // ì…¨, #421
+ 0xC18C, // 소, #51
+ 0xC18D, // ì†, #219
+ 0xC190, // ì†, #323
+ 0xC1A1, // 송, #203
+ 0xC1C4, // 쇄, #501
+ 0xC1FC, // 쇼, #364
+ 0xC218, // 수, #27
+ 0xC219, // 숙, #467
+ 0xC21C, // 순, #258
+ 0xC220, // 술, #302
+ 0xC26C, // 쉬, #511
+ 0xC288, // 슈, #384
+ 0xC2A4, // 스, #11
+ 0xC2AC, // 슬, #438
+ 0xC2B4, // 슴, #504
+ 0xC2B5, // 습, #77
+ 0xC2B9, // 승, #299
+ 0xC2DC, // 시, #13
+ 0xC2DD, // ì‹, #137
+ 0xC2E0, // ì‹ , #47
+ 0xC2E4, // 실, #132
+ 0xC2EC, // 심, #196
+ 0xC2ED, // ì‹­, #482
+ 0xC2F6, // 싶, #352
+ 0xC2F8, // 싸, #419
+ 0xC4F0, // ì“°, #278
+ 0xC528, // 씨, #360
+ 0xC544, // ì•„, #23
+ 0xC545, // ì•…, #296
+ 0xC548, // 안, #71
+ 0xC54A, // ì•Š, #209
+ 0xC54C, // 알, #222
+ 0xC554, // ì•”, #460
+ 0xC558, // 았, #349
+ 0xC559, // ì•™, #473
+ 0xC55E, // ì•ž, #434
+ 0xC560, // ì• , #271
+ 0xC561, // ì•¡, #415
+ 0xC571, // 앱, #477
+ 0xC57C, // 야, #124
+ 0xC57D, // 약, #229
+ 0xC591, // ì–‘, #177
+ 0xC5B4, // ì–´, #24
+ 0xC5B5, // ì–µ, #407
+ 0xC5B8, // ì–¸, #294
+ 0xC5BC, // ì–¼, #356
+ 0xC5C4, // ì—„, #426
+ 0xC5C5, // ì—…, #118
+ 0xC5C6, // ì—†, #178
+ 0xC5C8, // ì—ˆ, #165
+ 0xC5D0, // ì—, #9
+ 0xC5D4, // ì—”, #375
+ 0xC5D8, // ì—˜, #506
+ 0xC5EC, // ì—¬, #66
+ 0xC5ED, // ì—­, #186
+ 0xC5EE, // ì—®, #488
+ 0xC5F0, // ì—°, #96
+ 0xC5F4, // ì—´, #266
+ 0xC5FC, // ì—¼, #449
+ 0xC600, // 였, #374
+ 0xC601, // ì˜, #83
+ 0xC608, // 예, #168
+ 0xC624, // 오, #75
+ 0xC628, // 온, #300
+ 0xC62C, // 올, #306
+ 0xC640, // 와, #119
+ 0xC644, // 완, #361
+ 0xC654, // ì™”, #489
+ 0xC655, // 왕, #418
+ 0xC678, // 외, #218
+ 0xC694, // ìš”, #43
+ 0xC695, // ìš•, #479
+ 0xC6A9, // ìš©, #48
+ 0xC6B0, // ìš°, #64
+ 0xC6B1, // ìš±, #503
+ 0xC6B4, // ìš´, #108
+ 0xC6B8, // 울, #223
+ 0xC6C0, // 움, #317
+ 0xC6C3, // 웃, #404
+ 0xC6CC, // 워, #280
+ 0xC6D0, // ì›, #45
+ 0xC6D4, // ì›”, #150
+ 0xC6E8, // 웨, #446
+ 0xC6F9, // 웹, #500
+ 0xC704, // 위, #78
+ 0xC720, // 유, #81
+ 0xC721, // 육, #321
+ 0xC724, // 윤, #416
+ 0xC73C, // 으, #49
+ 0xC740, // ì€, #31
+ 0xC744, // ì„, #17
+ 0xC74C, // ìŒ, #112
+ 0xC751, // ì‘, #461
+ 0xC758, // ì˜, #8
+ 0xC774, // ì´, #1
+ 0xC775, // ìµ, #403
+ 0xC778, // ì¸, #18
+ 0xC77C, // ì¼, #28
+ 0xC784, // ìž„, #160
+ 0xC785, // ìž…, #93
+ 0xC788, // 있, #44
+ 0xC790, // ìž, #22
+ 0xC791, // ìž‘, #88
+ 0xC798, // 잘, #347
+ 0xC7A1, // ìž¡, #372
+ 0xC7A5, // 장, #53
+ 0xC7AC, // 재, #120
+ 0xC7C1, // ìŸ, #483
+ 0xC800, // ì €, #98
+ 0xC801, // ì , #97
+ 0xC804, // ì „, #34
+ 0xC808, // ì ˆ, #320
+ 0xC810, // ì , #201
+ 0xC811, // ì ‘, #331
+ 0xC815, // ì •, #26
+ 0xC81C, // 제, #29
+ 0xC838, // ì ¸, #414
+ 0xC870, // ì¡°, #86
+ 0xC871, // 족, #373
+ 0xC874, // ì¡´, #432
+ 0xC880, // 좀, #470
+ 0xC885, // 종, #208
+ 0xC88B, // 좋, #239
+ 0xC8E0, // 죠, #451
+ 0xC8FC, // 주, #38
+ 0xC8FD, // 죽, #471
+ 0xC900, // 준, #286
+ 0xC904, // 줄, #392
+ 0xC911, // 중, #103
+ 0xC988, // 즈, #255
+ 0xC98C, // 즌, #507
+ 0xC990, // ì¦, #371
+ 0xC99D, // ì¦, #260
+ 0xC9C0, // 지, #10
+ 0xC9C1, // ì§, #216
+ 0xC9C4, // 진, #79
+ 0xC9C8, // 질, #238
+ 0xC9D1, // 집, #206
+ 0xC9DC, // 짜, #411
+ 0xC9F8, // 째, #494
+ 0xCABD, // 쪽, #435
+ 0xCC28, // ì°¨, #146
+ 0xCC29, // ì°©, #443
+ 0xCC2C, // ì°¬, #481
+ 0xCC30, // ì°°, #440
+ 0xCC38, // ì°¸, #343
+ 0xCC3D, // ì°½, #304
+ 0xCC3E, // ì°¾, #335
+ 0xCC44, // 채, #284
+ 0xCC45, // ì±…, #298
+ 0xCC98, // 처, #242
+ 0xCC9C, // 천, #143
+ 0xCCA0, // ì² , #380
+ 0xCCA8, // 첨, #452
+ 0xCCAB, // 첫, #484
+ 0xCCAD, // ì²­, #197
+ 0xCCB4, // ì²´, #126
+ 0xCCD0, // ì³, #472
+ 0xCD08, // ì´ˆ, #220
+ 0xCD1D, // ì´, #406
+ 0xCD5C, // 최, #179
+ 0xCD94, // 추, #136
+ 0xCD95, // 축, #337
+ 0xCD9C, // 출, #166
+ 0xCDA9, // 충, #369
+ 0xCDE8, // ì·¨, #210
+ 0xCE20, // 츠, #215
+ 0xCE21, // 측, #468
+ 0xCE35, // 층, #512
+ 0xCE58, // 치, #102
+ 0xCE5C, // 친, #325
+ 0xCE68, // 침, #263
+ 0xCE74, // ì¹´, #115
+ 0xCE7C, // ì¹¼, #466
+ 0xCE90, // ìº, #454
+ 0xCEE4, // 커, #285
+ 0xCEE8, // 컨, #328
+ 0xCEF4, // ì»´, #417
+ 0xCF00, // ì¼€, #339
+ 0xCF13, // 켓, #509
+ 0xCF1C, // 켜, #508
+ 0xCF54, // ì½”, #193
+ 0xCF58, // 콘, #391
+ 0xCFE0, // ì¿ , #393
+ 0xD035, // 퀵, #453
+ 0xD06C, // í¬, #101
+ 0xD070, // í°, #495
+ 0xD074, // í´, #289
+ 0xD0A4, // 키, #230
+ 0xD0C0, // 타, #127
+ 0xD0C1, // íƒ, #314
+ 0xD0C4, // 탄, #450
+ 0xD0C8, // 탈, #436
+ 0xD0DC, // 태, #221
+ 0xD0DD, // íƒ, #275
+ 0xD130, // í„°, #70
+ 0xD14C, // 테, #213
+ 0xD150, // í…, #324
+ 0xD154, // í…”, #430
+ 0xD15C, // 템, #382
+ 0xD1A0, // 토, #145
+ 0xD1B5, // 통, #156
+ 0xD22C, // 투, #227
+ 0xD2B8, // 트, #37
+ 0xD2B9, // 특, #247
+ 0xD2F0, // í‹°, #187
+ 0xD305, // 팅, #410
+ 0xD30C, // 파, #141
+ 0xD310, // íŒ, #163
+ 0xD314, // 팔, #499
+ 0xD328, // 패, #307
+ 0xD32C, // 팬, #459
+ 0xD338, // 팸, #433
+ 0xD37C, // í¼, #344
+ 0xD398, // 페, #172
+ 0xD3B8, // 편, #251
+ 0xD3C9, // í‰, #291
+ 0xD3EC, // í¬, #68
+ 0xD3ED, // í­, #445
+ 0xD3F0, // í°, #318
+ 0xD45C, // 표, #232
+ 0xD480, // í’€, #497
+ 0xD488, // í’ˆ, #113
+ 0xD48D, // í’, #425
+ 0xD504, // 프, #110
+ 0xD508, // 픈, #498
+ 0xD50C, // 플, #211
+ 0xD53C, // 피, #169
+ 0xD544, // í•„, #295
+ 0xD551, // í•‘, #376
+ 0xD558, // 하, #7
+ 0xD559, // í•™, #129
+ 0xD55C, // 한, #15
+ 0xD560, // í• , #144
+ 0xD568, // 함, #152
+ 0xD569, // í•©, #123
+ 0xD56D, // í•­, #268
+ 0xD574, // í•´, #32
+ 0xD588, // í–ˆ, #180
+ 0xD589, // í–‰, #135
+ 0xD5A5, // í–¥, #345
+ 0xD5C8, // í—ˆ, #396
+ 0xD5D8, // í—˜, #316
+ 0xD5E4, // í—¤, #474
+ 0xD604, // 현, #185
+ 0xD611, // 협, #315
+ 0xD615, // 형, #244
+ 0xD61C, // 혜, #428
+ 0xD638, // 호, #117
+ 0xD63C, // 혼, #358
+ 0xD648, // 홈, #330
+ 0xD64D, // í™, #363
+ 0xD654, // í™”, #63
+ 0xD655, // 확, #183
+ 0xD658, // 환, #224
+ 0xD65C, // 활, #277
+ 0xD669, // 황, #353
+ 0xD68C, // 회, #74
+ 0xD68D, // íš, #458
+ 0xD69F, // 횟, #409
+ 0xD6A8, // 효, #400
+ 0xD6C4, // 후, #176
+ 0xD6C8, // 훈, #486
+ 0xD734, // 휴, #365
+ 0xD754, // í”, #480
+ 0xD76C, // í¬, #334
+ 0xD788, // 히, #228
+ 0xD798, // 힘, #502
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_ko_coverage=0.948157021464184;
+
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 86ff8bd..8e3b633 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -58,7 +58,7 @@ enum {
RESTORE_OUTPUT,
OPEN_INPUT,
CLOSE_INPUT,
- SET_STREAM_OUTPUT,
+ INVALIDATE_STREAM,
SET_VOICE_VOLUME,
GET_RENDER_POSITION,
GET_INPUT_FRAMES_LOST,
@@ -74,6 +74,13 @@ enum {
GET_PRIMARY_OUTPUT_SAMPLING_RATE,
GET_PRIMARY_OUTPUT_FRAME_COUNT,
SET_LOW_RAM_DEVICE,
+ LIST_AUDIO_PORTS,
+ GET_AUDIO_PORT,
+ CREATE_AUDIO_PATCH,
+ RELEASE_AUDIO_PATCH,
+ LIST_AUDIO_PATCHES,
+ SET_AUDIO_PORT_CONFIG,
+ GET_AUDIO_HW_SYNC
};
class BpAudioFlinger : public BpInterface<IAudioFlinger>
@@ -89,13 +96,12 @@ public:
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- size_t frameCount,
+ size_t *pFrameCount,
track_flags_t *flags,
const sp<IMemory>& sharedBuffer,
audio_io_handle_t output,
pid_t tid,
int *sessionId,
- String8& name,
int clientUid,
status_t *status)
{
@@ -106,18 +112,20 @@ public:
data.writeInt32(sampleRate);
data.writeInt32(format);
data.writeInt32(channelMask);
- data.writeInt32(frameCount);
+ size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
+ data.writeInt64(frameCount);
track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
data.writeInt32(lFlags);
+ // haveSharedBuffer
if (sharedBuffer != 0) {
data.writeInt32(true);
- data.writeStrongBinder(sharedBuffer->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(sharedBuffer));
} else {
data.writeInt32(false);
}
data.writeInt32((int32_t) output);
data.writeInt32((int32_t) tid);
- int lSessionId = 0;
+ int lSessionId = AUDIO_SESSION_ALLOCATE;
if (sessionId != NULL) {
lSessionId = *sessionId;
}
@@ -127,6 +135,10 @@ public:
if (lStatus != NO_ERROR) {
ALOGE("createTrack error: %s", strerror(-lStatus));
} else {
+ frameCount = reply.readInt64();
+ if (pFrameCount != NULL) {
+ *pFrameCount = frameCount;
+ }
lFlags = reply.readInt32();
if (flags != NULL) {
*flags = lFlags;
@@ -135,11 +147,21 @@ public:
if (sessionId != NULL) {
*sessionId = lSessionId;
}
- name = reply.readString8();
lStatus = reply.readInt32();
track = interface_cast<IAudioTrack>(reply.readStrongBinder());
+ if (lStatus == NO_ERROR) {
+ if (track == 0) {
+ ALOGE("createTrack should have returned an IAudioTrack");
+ lStatus = UNKNOWN_ERROR;
+ }
+ } else {
+ if (track != 0) {
+ ALOGE("createTrack returned an IAudioTrack but with status %d", lStatus);
+ track.clear();
+ }
+ }
}
- if (status) {
+ if (status != NULL) {
*status = lStatus;
}
return track;
@@ -150,10 +172,13 @@ public:
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- size_t frameCount,
+ size_t *pFrameCount,
track_flags_t *flags,
pid_t tid,
int *sessionId,
+ size_t *notificationFrames,
+ sp<IMemory>& cblk,
+ sp<IMemory>& buffers,
status_t *status)
{
Parcel data, reply;
@@ -163,19 +188,27 @@ public:
data.writeInt32(sampleRate);
data.writeInt32(format);
data.writeInt32(channelMask);
- data.writeInt32(frameCount);
+ size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
+ data.writeInt64(frameCount);
track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
data.writeInt32(lFlags);
data.writeInt32((int32_t) tid);
- int lSessionId = 0;
+ int lSessionId = AUDIO_SESSION_ALLOCATE;
if (sessionId != NULL) {
lSessionId = *sessionId;
}
data.writeInt32(lSessionId);
+ data.writeInt64(notificationFrames != NULL ? *notificationFrames : 0);
+ cblk.clear();
+ buffers.clear();
status_t lStatus = remote()->transact(OPEN_RECORD, data, &reply);
if (lStatus != NO_ERROR) {
ALOGE("openRecord error: %s", strerror(-lStatus));
} else {
+ frameCount = reply.readInt64();
+ if (pFrameCount != NULL) {
+ *pFrameCount = frameCount;
+ }
lFlags = reply.readInt32();
if (flags != NULL) {
*flags = lFlags;
@@ -184,21 +217,42 @@ public:
if (sessionId != NULL) {
*sessionId = lSessionId;
}
+ size_t lNotificationFrames = (size_t) reply.readInt64();
+ if (notificationFrames != NULL) {
+ *notificationFrames = lNotificationFrames;
+ }
lStatus = reply.readInt32();
record = interface_cast<IAudioRecord>(reply.readStrongBinder());
+ cblk = interface_cast<IMemory>(reply.readStrongBinder());
+ if (cblk != 0 && cblk->pointer() == NULL) {
+ cblk.clear();
+ }
+ buffers = interface_cast<IMemory>(reply.readStrongBinder());
+ if (buffers != 0 && buffers->pointer() == NULL) {
+ buffers.clear();
+ }
if (lStatus == NO_ERROR) {
if (record == 0) {
ALOGE("openRecord should have returned an IAudioRecord");
lStatus = UNKNOWN_ERROR;
+ } else if (cblk == 0) {
+ ALOGE("openRecord should have returned a cblk");
+ lStatus = NO_MEMORY;
}
+ // buffers is permitted to be 0
} else {
- if (record != 0) {
- ALOGE("openRecord returned an IAudioRecord but with status %d", lStatus);
- record.clear();
+ if (record != 0 || cblk != 0 || buffers != 0) {
+ ALOGE("openRecord returned an IAudioRecord, cblk, "
+ "or buffers but with status %d", lStatus);
}
}
+ if (lStatus != NO_ERROR) {
+ record.clear();
+ cblk.clear();
+ buffers.clear();
+ }
}
- if (status) {
+ if (status != NULL) {
*status = lStatus;
}
return record;
@@ -228,7 +282,7 @@ public:
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32((int32_t) output);
remote()->transact(FRAME_COUNT, data, &reply);
- return reply.readInt32();
+ return reply.readInt64();
}
virtual uint32_t latency(audio_io_handle_t output) const
@@ -365,7 +419,7 @@ public:
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- data.writeStrongBinder(client->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(client));
remote()->transact(REGISTER_CLIENT, data, &reply);
}
@@ -378,53 +432,43 @@ public:
data.writeInt32(format);
data.writeInt32(channelMask);
remote()->transact(GET_INPUTBUFFERSIZE, data, &reply);
- return reply.readInt32();
+ return reply.readInt64();
}
- virtual audio_io_handle_t openOutput(audio_module_handle_t module,
- audio_devices_t *pDevices,
- uint32_t *pSamplingRate,
- audio_format_t *pFormat,
- audio_channel_mask_t *pChannelMask,
- uint32_t *pLatencyMs,
- audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo)
+ virtual status_t openOutput(audio_module_handle_t module,
+ audio_io_handle_t *output,
+ audio_config_t *config,
+ audio_devices_t *devices,
+ const String8& address,
+ uint32_t *latencyMs,
+ audio_output_flags_t flags)
{
+ if (output == NULL || config == NULL || devices == NULL || latencyMs == NULL) {
+ return BAD_VALUE;
+ }
Parcel data, reply;
- audio_devices_t devices = pDevices != NULL ? *pDevices : (audio_devices_t)0;
- uint32_t samplingRate = pSamplingRate != NULL ? *pSamplingRate : 0;
- audio_format_t format = pFormat != NULL ? *pFormat : AUDIO_FORMAT_DEFAULT;
- audio_channel_mask_t channelMask = pChannelMask != NULL ?
- *pChannelMask : (audio_channel_mask_t)0;
- uint32_t latency = pLatencyMs != NULL ? *pLatencyMs : 0;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(module);
- data.writeInt32(devices);
- data.writeInt32(samplingRate);
- data.writeInt32(format);
- data.writeInt32(channelMask);
- data.writeInt32(latency);
+ data.write(config, sizeof(audio_config_t));
+ data.writeInt32(*devices);
+ data.writeString8(address);
data.writeInt32((int32_t) flags);
- if (offloadInfo == NULL) {
- data.writeInt32(0);
- } else {
- data.writeInt32(1);
- data.write(offloadInfo, sizeof(audio_offload_info_t));
+ status_t status = remote()->transact(OPEN_OUTPUT, data, &reply);
+ if (status != NO_ERROR) {
+ *output = AUDIO_IO_HANDLE_NONE;
+ return status;
+ }
+ status = (status_t)reply.readInt32();
+ if (status != NO_ERROR) {
+ *output = AUDIO_IO_HANDLE_NONE;
+ return status;
}
- remote()->transact(OPEN_OUTPUT, data, &reply);
- audio_io_handle_t output = (audio_io_handle_t) reply.readInt32();
- ALOGV("openOutput() returned output, %d", output);
- devices = (audio_devices_t)reply.readInt32();
- if (pDevices != NULL) *pDevices = devices;
- samplingRate = reply.readInt32();
- if (pSamplingRate != NULL) *pSamplingRate = samplingRate;
- format = (audio_format_t) reply.readInt32();
- if (pFormat != NULL) *pFormat = format;
- channelMask = (audio_channel_mask_t)reply.readInt32();
- if (pChannelMask != NULL) *pChannelMask = channelMask;
- latency = reply.readInt32();
- if (pLatencyMs != NULL) *pLatencyMs = latency;
- return output;
+ *output = (audio_io_handle_t)reply.readInt32();
+ ALOGV("openOutput() returned output, %d", *output);
+ reply.read(config, sizeof(audio_config_t));
+ *devices = (audio_devices_t)reply.readInt32();
+ *latencyMs = reply.readInt32();
+ return NO_ERROR;
}
virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
@@ -465,36 +509,40 @@ public:
return reply.readInt32();
}
- virtual audio_io_handle_t openInput(audio_module_handle_t module,
- audio_devices_t *pDevices,
- uint32_t *pSamplingRate,
- audio_format_t *pFormat,
- audio_channel_mask_t *pChannelMask)
+ virtual status_t openInput(audio_module_handle_t module,
+ audio_io_handle_t *input,
+ audio_config_t *config,
+ audio_devices_t *device,
+ const String8& address,
+ audio_source_t source,
+ audio_input_flags_t flags)
{
+ if (input == NULL || config == NULL || device == NULL) {
+ return BAD_VALUE;
+ }
Parcel data, reply;
- audio_devices_t devices = pDevices != NULL ? *pDevices : (audio_devices_t)0;
- uint32_t samplingRate = pSamplingRate != NULL ? *pSamplingRate : 0;
- audio_format_t format = pFormat != NULL ? *pFormat : AUDIO_FORMAT_DEFAULT;
- audio_channel_mask_t channelMask = pChannelMask != NULL ?
- *pChannelMask : (audio_channel_mask_t)0;
-
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(module);
- data.writeInt32(devices);
- data.writeInt32(samplingRate);
- data.writeInt32(format);
- data.writeInt32(channelMask);
- remote()->transact(OPEN_INPUT, data, &reply);
- audio_io_handle_t input = (audio_io_handle_t) reply.readInt32();
- devices = (audio_devices_t)reply.readInt32();
- if (pDevices != NULL) *pDevices = devices;
- samplingRate = reply.readInt32();
- if (pSamplingRate != NULL) *pSamplingRate = samplingRate;
- format = (audio_format_t) reply.readInt32();
- if (pFormat != NULL) *pFormat = format;
- channelMask = (audio_channel_mask_t)reply.readInt32();
- if (pChannelMask != NULL) *pChannelMask = channelMask;
- return input;
+ data.writeInt32(*input);
+ data.write(config, sizeof(audio_config_t));
+ data.writeInt32(*device);
+ data.writeString8(address);
+ data.writeInt32(source);
+ data.writeInt32(flags);
+ status_t status = remote()->transact(OPEN_INPUT, data, &reply);
+ if (status != NO_ERROR) {
+ *input = AUDIO_IO_HANDLE_NONE;
+ return status;
+ }
+ status = (status_t)reply.readInt32();
+ if (status != NO_ERROR) {
+ *input = AUDIO_IO_HANDLE_NONE;
+ return status;
+ }
+ *input = (audio_io_handle_t)reply.readInt32();
+ reply.read(config, sizeof(audio_config_t));
+ *device = (audio_devices_t)reply.readInt32();
+ return NO_ERROR;
}
virtual status_t closeInput(int input)
@@ -506,13 +554,12 @@ public:
return reply.readInt32();
}
- virtual status_t setStreamOutput(audio_stream_type_t stream, audio_io_handle_t output)
+ virtual status_t invalidateStream(audio_stream_type_t stream)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32((int32_t) stream);
- data.writeInt32((int32_t) output);
- remote()->transact(SET_STREAM_OUTPUT, data, &reply);
+ remote()->transact(INVALIDATE_STREAM, data, &reply);
return reply.readInt32();
}
@@ -535,11 +582,11 @@ public:
status_t status = reply.readInt32();
if (status == NO_ERROR) {
uint32_t tmp = reply.readInt32();
- if (halFrames) {
+ if (halFrames != NULL) {
*halFrames = tmp;
}
tmp = reply.readInt32();
- if (dspFrames) {
+ if (dspFrames != NULL) {
*dspFrames = tmp;
}
}
@@ -551,35 +598,40 @@ public:
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32((int32_t) ioHandle);
- remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
- return reply.readInt32();
+ status_t status = remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
+ if (status != NO_ERROR) {
+ return 0;
+ }
+ return (uint32_t) reply.readInt32();
}
- virtual int newAudioSessionId()
+ virtual audio_unique_id_t newAudioUniqueId()
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
status_t status = remote()->transact(NEW_AUDIO_SESSION_ID, data, &reply);
- int id = 0;
+ audio_unique_id_t id = AUDIO_SESSION_ALLOCATE;
if (status == NO_ERROR) {
id = reply.readInt32();
}
return id;
}
- virtual void acquireAudioSessionId(int audioSession)
+ virtual void acquireAudioSessionId(int audioSession, int pid)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(audioSession);
+ data.writeInt32(pid);
remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
}
- virtual void releaseAudioSessionId(int audioSession)
+ virtual void releaseAudioSessionId(int audioSession, int pid)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(audioSession);
+ data.writeInt32(pid);
remote()->transact(RELEASE_AUDIO_SESSION_ID, data, &reply);
}
@@ -657,14 +709,14 @@ public:
if (pDesc == NULL) {
return effect;
- if (status) {
+ if (status != NULL) {
*status = BAD_VALUE;
}
}
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.write(pDesc, sizeof(effect_descriptor_t));
- data.writeStrongBinder(client->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(client));
data.writeInt32(priority);
data.writeInt32((int32_t) output);
data.writeInt32(sessionId);
@@ -675,7 +727,7 @@ public:
} else {
lStatus = reply.readInt32();
int tmp = reply.readInt32();
- if (id) {
+ if (id != NULL) {
*id = tmp;
}
tmp = reply.readInt32();
@@ -685,7 +737,7 @@ public:
effect = interface_cast<IEffect>(reply.readStrongBinder());
reply.read(pDesc, sizeof(effect_descriptor_t));
}
- if (status) {
+ if (status != NULL) {
*status = lStatus;
}
@@ -726,7 +778,7 @@ public:
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
remote()->transact(GET_PRIMARY_OUTPUT_FRAME_COUNT, data, &reply);
- return reply.readInt32();
+ return reply.readInt64();
}
virtual status_t setLowRamDevice(bool isLowRamDevice)
@@ -737,7 +789,112 @@ public:
remote()->transact(SET_LOW_RAM_DEVICE, data, &reply);
return reply.readInt32();
}
-
+ virtual status_t listAudioPorts(unsigned int *num_ports,
+ struct audio_port *ports)
+ {
+ if (num_ports == NULL || *num_ports == 0 || ports == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(*num_ports);
+ status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ *num_ports = (unsigned int)reply.readInt32();
+ reply.read(ports, *num_ports * sizeof(struct audio_port));
+ return status;
+ }
+ virtual status_t getAudioPort(struct audio_port *port)
+ {
+ if (port == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(port, sizeof(struct audio_port));
+ status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(port, sizeof(struct audio_port));
+ return status;
+ }
+ virtual status_t createAudioPatch(const struct audio_patch *patch,
+ audio_patch_handle_t *handle)
+ {
+ if (patch == NULL || handle == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(patch, sizeof(struct audio_patch));
+ data.write(handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(handle, sizeof(audio_patch_handle_t));
+ return status;
+ }
+ virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(&handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+ virtual status_t listAudioPatches(unsigned int *num_patches,
+ struct audio_patch *patches)
+ {
+ if (num_patches == NULL || *num_patches == 0 || patches == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(*num_patches);
+ status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ *num_patches = (unsigned int)reply.readInt32();
+ reply.read(patches, *num_patches * sizeof(struct audio_patch));
+ return status;
+ }
+ virtual status_t setAudioPortConfig(const struct audio_port_config *config)
+ {
+ if (config == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(config, sizeof(struct audio_port_config));
+ status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+ virtual audio_hw_sync_t getAudioHwSyncForSession(audio_session_t sessionId)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(sessionId);
+ status_t status = remote()->transact(GET_AUDIO_HW_SYNC, data, &reply);
+ if (status != NO_ERROR) {
+ return AUDIO_HW_SYNC_INVALID;
+ }
+ return (audio_hw_sync_t)reply.readInt32();
+ }
};
IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
@@ -754,7 +911,7 @@ status_t BnAudioFlinger::onTransact(
uint32_t sampleRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
- size_t frameCount = data.readInt32();
+ size_t frameCount = data.readInt64();
track_flags_t flags = (track_flags_t) data.readInt32();
bool haveSharedBuffer = data.readInt32() != 0;
sp<IMemory> buffer;
@@ -765,7 +922,6 @@ status_t BnAudioFlinger::onTransact(
pid_t tid = (pid_t) data.readInt32();
int sessionId = data.readInt32();
int clientUid = data.readInt32();
- String8 name;
status_t status;
sp<IAudioTrack> track;
if ((haveSharedBuffer && (buffer == 0)) ||
@@ -775,14 +931,15 @@ status_t BnAudioFlinger::onTransact(
} else {
track = createTrack(
(audio_stream_type_t) streamType, sampleRate, format,
- channelMask, frameCount, &flags, buffer, output, tid,
- &sessionId, name, clientUid, &status);
+ channelMask, &frameCount, &flags, buffer, output, tid,
+ &sessionId, clientUid, &status);
+ LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
}
+ reply->writeInt64(frameCount);
reply->writeInt32(flags);
reply->writeInt32(sessionId);
- reply->writeString8(name);
reply->writeInt32(status);
- reply->writeStrongBinder(track->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(track));
return NO_ERROR;
} break;
case OPEN_RECORD: {
@@ -791,18 +948,27 @@ status_t BnAudioFlinger::onTransact(
uint32_t sampleRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
- size_t frameCount = data.readInt32();
+ size_t frameCount = data.readInt64();
track_flags_t flags = (track_flags_t) data.readInt32();
pid_t tid = (pid_t) data.readInt32();
int sessionId = data.readInt32();
+ size_t notificationFrames = data.readInt64();
+ sp<IMemory> cblk;
+ sp<IMemory> buffers;
status_t status;
sp<IAudioRecord> record = openRecord(input,
- sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+ sampleRate, format, channelMask, &frameCount, &flags, tid, &sessionId,
+ &notificationFrames,
+ cblk, buffers, &status);
LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
+ reply->writeInt64(frameCount);
reply->writeInt32(flags);
reply->writeInt32(sessionId);
+ reply->writeInt64(notificationFrames);
reply->writeInt32(status);
- reply->writeStrongBinder(record->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(record));
+ reply->writeStrongBinder(IInterface::asBinder(cblk));
+ reply->writeStrongBinder(IInterface::asBinder(buffers));
return NO_ERROR;
} break;
case SAMPLE_RATE: {
@@ -817,7 +983,7 @@ status_t BnAudioFlinger::onTransact(
} break;
case FRAME_COUNT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32( frameCount((audio_io_handle_t) data.readInt32()) );
+ reply->writeInt64( frameCount((audio_io_handle_t) data.readInt32()) );
return NO_ERROR;
} break;
case LATENCY: {
@@ -916,38 +1082,29 @@ status_t BnAudioFlinger::onTransact(
uint32_t sampleRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
- reply->writeInt32( getInputBufferSize(sampleRate, format, channelMask) );
+ reply->writeInt64( getInputBufferSize(sampleRate, format, channelMask) );
return NO_ERROR;
} break;
case OPEN_OUTPUT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
+ audio_config_t config;
+ data.read(&config, sizeof(audio_config_t));
audio_devices_t devices = (audio_devices_t)data.readInt32();
- uint32_t samplingRate = data.readInt32();
- audio_format_t format = (audio_format_t) data.readInt32();
- audio_channel_mask_t channelMask = (audio_channel_mask_t)data.readInt32();
- uint32_t latency = data.readInt32();
+ String8 address(data.readString8());
audio_output_flags_t flags = (audio_output_flags_t) data.readInt32();
- bool hasOffloadInfo = data.readInt32() != 0;
- audio_offload_info_t offloadInfo;
- if (hasOffloadInfo) {
- data.read(&offloadInfo, sizeof(audio_offload_info_t));
+ uint32_t latencyMs;
+ audio_io_handle_t output;
+ status_t status = openOutput(module, &output, &config,
+ &devices, address, &latencyMs, flags);
+ ALOGV("OPEN_OUTPUT output, %d", output);
+ reply->writeInt32((int32_t)status);
+ if (status == NO_ERROR) {
+ reply->writeInt32((int32_t)output);
+ reply->write(&config, sizeof(audio_config_t));
+ reply->writeInt32(devices);
+ reply->writeInt32(latencyMs);
}
- audio_io_handle_t output = openOutput(module,
- &devices,
- &samplingRate,
- &format,
- &channelMask,
- &latency,
- flags,
- hasOffloadInfo ? &offloadInfo : NULL);
- ALOGV("OPEN_OUTPUT output, %p", output);
- reply->writeInt32((int32_t) output);
- reply->writeInt32(devices);
- reply->writeInt32(samplingRate);
- reply->writeInt32(format);
- reply->writeInt32(channelMask);
- reply->writeInt32(latency);
return NO_ERROR;
} break;
case OPEN_DUPLICATE_OUTPUT: {
@@ -975,21 +1132,22 @@ status_t BnAudioFlinger::onTransact(
case OPEN_INPUT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
- audio_devices_t devices = (audio_devices_t)data.readInt32();
- uint32_t samplingRate = data.readInt32();
- audio_format_t format = (audio_format_t) data.readInt32();
- audio_channel_mask_t channelMask = (audio_channel_mask_t)data.readInt32();
-
- audio_io_handle_t input = openInput(module,
- &devices,
- &samplingRate,
- &format,
- &channelMask);
- reply->writeInt32((int32_t) input);
- reply->writeInt32(devices);
- reply->writeInt32(samplingRate);
- reply->writeInt32(format);
- reply->writeInt32(channelMask);
+ audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
+ audio_config_t config;
+ data.read(&config, sizeof(audio_config_t));
+ audio_devices_t device = (audio_devices_t)data.readInt32();
+ String8 address(data.readString8());
+ audio_source_t source = (audio_source_t)data.readInt32();
+ audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
+
+ status_t status = openInput(module, &input, &config,
+ &device, address, source, flags);
+ reply->writeInt32((int32_t) status);
+ if (status == NO_ERROR) {
+ reply->writeInt32((int32_t) input);
+ reply->write(&config, sizeof(audio_config_t));
+ reply->writeInt32(device);
+ }
return NO_ERROR;
} break;
case CLOSE_INPUT: {
@@ -997,11 +1155,10 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32(closeInput((audio_io_handle_t) data.readInt32()));
return NO_ERROR;
} break;
- case SET_STREAM_OUTPUT: {
+ case INVALIDATE_STREAM: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- uint32_t stream = data.readInt32();
- audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
- reply->writeInt32(setStreamOutput((audio_stream_type_t) stream, output));
+ audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
+ reply->writeInt32(invalidateStream(stream));
return NO_ERROR;
} break;
case SET_VOICE_VOLUME: {
@@ -1026,24 +1183,26 @@ status_t BnAudioFlinger::onTransact(
case GET_INPUT_FRAMES_LOST: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
- reply->writeInt32(getInputFramesLost(ioHandle));
+ reply->writeInt32((int32_t) getInputFramesLost(ioHandle));
return NO_ERROR;
} break;
case NEW_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(newAudioSessionId());
+ reply->writeInt32(newAudioUniqueId());
return NO_ERROR;
} break;
case ACQUIRE_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int audioSession = data.readInt32();
- acquireAudioSessionId(audioSession);
+ int pid = data.readInt32();
+ acquireAudioSessionId(audioSession, pid);
return NO_ERROR;
} break;
case RELEASE_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int audioSession = data.readInt32();
- releaseAudioSessionId(audioSession);
+ int pid = data.readInt32();
+ releaseAudioSessionId(audioSession, pid);
return NO_ERROR;
} break;
case QUERY_NUM_EFFECTS: {
@@ -1095,7 +1254,7 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32(status);
reply->writeInt32(id);
reply->writeInt32(enabled);
- reply->writeStrongBinder(effect->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(effect));
reply->write(&desc, sizeof(effect_descriptor_t));
return NO_ERROR;
} break;
@@ -1119,7 +1278,7 @@ status_t BnAudioFlinger::onTransact(
} break;
case GET_PRIMARY_OUTPUT_FRAME_COUNT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- reply->writeInt32(getPrimaryOutputFrameCount());
+ reply->writeInt64(getPrimaryOutputFrameCount());
return NO_ERROR;
} break;
case SET_LOW_RAM_DEVICE: {
@@ -1128,6 +1287,81 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32(setLowRamDevice(isLowRamDevice));
return NO_ERROR;
} break;
+ case LIST_AUDIO_PORTS: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ unsigned int num_ports = data.readInt32();
+ struct audio_port *ports =
+ (struct audio_port *)calloc(num_ports,
+ sizeof(struct audio_port));
+ status_t status = listAudioPorts(&num_ports, ports);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(num_ports);
+ reply->write(&ports, num_ports * sizeof(struct audio_port));
+ }
+ free(ports);
+ return NO_ERROR;
+ } break;
+ case GET_AUDIO_PORT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ struct audio_port port;
+ data.read(&port, sizeof(struct audio_port));
+ status_t status = getAudioPort(&port);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&port, sizeof(struct audio_port));
+ }
+ return NO_ERROR;
+ } break;
+ case CREATE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ struct audio_patch patch;
+ data.read(&patch, sizeof(struct audio_patch));
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = createAudioPatch(&patch, &handle);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&handle, sizeof(audio_patch_handle_t));
+ }
+ return NO_ERROR;
+ } break;
+ case RELEASE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = releaseAudioPatch(handle);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
+ case LIST_AUDIO_PATCHES: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ unsigned int num_patches = data.readInt32();
+ struct audio_patch *patches =
+ (struct audio_patch *)calloc(num_patches,
+ sizeof(struct audio_patch));
+ status_t status = listAudioPatches(&num_patches, patches);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(num_patches);
+ reply->write(&patches, num_patches * sizeof(struct audio_patch));
+ }
+ free(patches);
+ return NO_ERROR;
+ } break;
+ case SET_AUDIO_PORT_CONFIG: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ struct audio_port_config config;
+ data.read(&config, sizeof(struct audio_port_config));
+ status_t status = setAudioPortConfig(&config);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
+ case GET_AUDIO_HW_SYNC: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ reply->writeInt32(getAudioHwSyncForSession((audio_session_t)data.readInt32()));
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index 3c0d4cf..1c299f7 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -55,7 +55,7 @@ public:
data.writeInt32(desc->samplingRate);
data.writeInt32(desc->format);
data.writeInt32(desc->channelMask);
- data.writeInt32(desc->frameCount);
+ data.writeInt64(desc->frameCount);
data.writeInt32(desc->latency);
}
remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);
@@ -85,7 +85,7 @@ status_t BnAudioFlingerClient::onTransact(
desc.samplingRate = data.readInt32();
desc.format = (audio_format_t) data.readInt32();
desc.channelMask = (audio_channel_mask_t) data.readInt32();
- desc.frameCount = data.readInt32();
+ desc.frameCount = data.readInt64();
desc.latency = data.readInt32();
param2 = &desc;
}
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 4be3c09..dbc7a9e 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -23,6 +23,7 @@
#include <binder/Parcel.h>
+#include <media/AudioEffect.h>
#include <media/IAudioPolicyService.h>
#include <system/audio.h>
@@ -40,7 +41,7 @@ enum {
START_OUTPUT,
STOP_OUTPUT,
RELEASE_OUTPUT,
- GET_INPUT,
+ GET_INPUT_FOR_ATTR,
START_INPUT,
STOP_INPUT,
RELEASE_INPUT,
@@ -57,7 +58,19 @@ enum {
QUERY_DEFAULT_PRE_PROCESSING,
SET_EFFECT_ENABLED,
IS_STREAM_ACTIVE_REMOTELY,
- IS_OFFLOAD_SUPPORTED
+ IS_OFFLOAD_SUPPORTED,
+ LIST_AUDIO_PORTS,
+ GET_AUDIO_PORT,
+ CREATE_AUDIO_PATCH,
+ RELEASE_AUDIO_PATCH,
+ LIST_AUDIO_PATCHES,
+ SET_AUDIO_PORT_CONFIG,
+ REGISTER_CLIENT,
+ GET_OUTPUT_FOR_ATTR,
+ ACQUIRE_SOUNDTRIGGER_SESSION,
+ RELEASE_SOUNDTRIGGER_SESSION,
+ GET_PHONE_STATE,
+ REGISTER_POLICY_MIXES,
};
class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -137,6 +150,7 @@ public:
data.writeInt32(static_cast <uint32_t>(format));
data.writeInt32(channelMask);
data.writeInt32(static_cast <uint32_t>(flags));
+ // hasOffloadInfo
if (offloadInfo == NULL) {
data.writeInt32(0);
} else {
@@ -147,81 +161,174 @@ public:
return static_cast <audio_io_handle_t> (reply.readInt32());
}
+ virtual status_t getOutputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *output,
+ audio_session_t session,
+ audio_stream_type_t *stream,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ const audio_offload_info_t *offloadInfo)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ if (attr == NULL) {
+ if (stream == NULL) {
+ ALOGE("getOutputForAttr(): NULL audio attributes and stream type");
+ return BAD_VALUE;
+ }
+ if (*stream == AUDIO_STREAM_DEFAULT) {
+ ALOGE("getOutputForAttr unspecified stream type");
+ return BAD_VALUE;
+ }
+ }
+ if (output == NULL) {
+ ALOGE("getOutputForAttr NULL output - shouldn't happen");
+ return BAD_VALUE;
+ }
+ if (attr == NULL) {
+ data.writeInt32(0);
+ } else {
+ data.writeInt32(1);
+ data.write(attr, sizeof(audio_attributes_t));
+ }
+ data.writeInt32(session);
+ if (stream == NULL) {
+ data.writeInt32(0);
+ } else {
+ data.writeInt32(1);
+ data.writeInt32(*stream);
+ }
+ data.writeInt32(samplingRate);
+ data.writeInt32(static_cast <uint32_t>(format));
+ data.writeInt32(channelMask);
+ data.writeInt32(static_cast <uint32_t>(flags));
+ // hasOffloadInfo
+ if (offloadInfo == NULL) {
+ data.writeInt32(0);
+ } else {
+ data.writeInt32(1);
+ data.write(offloadInfo, sizeof(audio_offload_info_t));
+ }
+ status_t status = remote()->transact(GET_OUTPUT_FOR_ATTR, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = (status_t)reply.readInt32();
+ if (status != NO_ERROR) {
+ return status;
+ }
+ *output = (audio_io_handle_t)reply.readInt32();
+ if (stream != NULL) {
+ *stream = (audio_stream_type_t)reply.readInt32();
+ }
+ return status;
+ }
+
virtual status_t startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
data.writeInt32((int32_t) stream);
- data.writeInt32(session);
+ data.writeInt32((int32_t)session);
remote()->transact(START_OUTPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
virtual status_t stopOutput(audio_io_handle_t output,
audio_stream_type_t stream,
- int session)
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
data.writeInt32((int32_t) stream);
- data.writeInt32(session);
+ data.writeInt32((int32_t)session);
remote()->transact(STOP_OUTPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
- virtual void releaseOutput(audio_io_handle_t output)
+ virtual void releaseOutput(audio_io_handle_t output,
+ audio_stream_type_t stream,
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(output);
+ data.writeInt32((int32_t)stream);
+ data.writeInt32((int32_t)session);
remote()->transact(RELEASE_OUTPUT, data, &reply);
}
- virtual audio_io_handle_t getInput(
- audio_source_t inputSource,
- uint32_t samplingRate,
- audio_format_t format,
- audio_channel_mask_t channelMask,
- int audioSession)
+ virtual status_t getInputForAttr(const audio_attributes_t *attr,
+ audio_io_handle_t *input,
+ audio_session_t session,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_input_flags_t flags)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
- data.writeInt32((int32_t) inputSource);
+ if (attr == NULL) {
+ ALOGE("getInputForAttr NULL attr - shouldn't happen");
+ return BAD_VALUE;
+ }
+ if (input == NULL) {
+ ALOGE("getInputForAttr NULL input - shouldn't happen");
+ return BAD_VALUE;
+ }
+ data.write(attr, sizeof(audio_attributes_t));
+ data.writeInt32(session);
data.writeInt32(samplingRate);
data.writeInt32(static_cast <uint32_t>(format));
data.writeInt32(channelMask);
- data.writeInt32(audioSession);
- remote()->transact(GET_INPUT, data, &reply);
- return static_cast <audio_io_handle_t> (reply.readInt32());
+ data.writeInt32(flags);
+ status_t status = remote()->transact(GET_INPUT_FOR_ATTR, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = reply.readInt32();
+ if (status != NO_ERROR) {
+ return status;
+ }
+ *input = (audio_io_handle_t)reply.readInt32();
+ return NO_ERROR;
}
- virtual status_t startInput(audio_io_handle_t input)
+ virtual status_t startInput(audio_io_handle_t input,
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(input);
+ data.writeInt32(session);
remote()->transact(START_INPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
- virtual status_t stopInput(audio_io_handle_t input)
+ virtual status_t stopInput(audio_io_handle_t input,
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(input);
+ data.writeInt32(session);
remote()->transact(STOP_INPUT, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
- virtual void releaseInput(audio_io_handle_t input)
+ virtual void releaseInput(audio_io_handle_t input,
+ audio_session_t session)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(input);
+ data.writeInt32(session);
remote()->transact(RELEASE_INPUT, data, &reply);
}
@@ -389,7 +496,218 @@ public:
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.write(&info, sizeof(audio_offload_info_t));
remote()->transact(IS_OFFLOAD_SUPPORTED, data, &reply);
- return reply.readInt32(); }
+ return reply.readInt32();
+ }
+
+ virtual status_t listAudioPorts(audio_port_role_t role,
+ audio_port_type_t type,
+ unsigned int *num_ports,
+ struct audio_port *ports,
+ unsigned int *generation)
+ {
+ if (num_ports == NULL || (*num_ports != 0 && ports == NULL) ||
+ generation == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ unsigned int numPortsReq = (ports == NULL) ? 0 : *num_ports;
+ data.writeInt32(role);
+ data.writeInt32(type);
+ data.writeInt32(numPortsReq);
+ status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ *num_ports = (unsigned int)reply.readInt32();
+ }
+ if (status == NO_ERROR) {
+ if (numPortsReq > *num_ports) {
+ numPortsReq = *num_ports;
+ }
+ if (numPortsReq > 0) {
+ reply.read(ports, numPortsReq * sizeof(struct audio_port));
+ }
+ *generation = reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t getAudioPort(struct audio_port *port)
+ {
+ if (port == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(port, sizeof(struct audio_port));
+ status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(port, sizeof(struct audio_port));
+ return status;
+ }
+
+ virtual status_t createAudioPatch(const struct audio_patch *patch,
+ audio_patch_handle_t *handle)
+ {
+ if (patch == NULL || handle == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(patch, sizeof(struct audio_patch));
+ data.write(handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(handle, sizeof(audio_patch_handle_t));
+ return status;
+ }
+
+ virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t listAudioPatches(unsigned int *num_patches,
+ struct audio_patch *patches,
+ unsigned int *generation)
+ {
+ if (num_patches == NULL || (*num_patches != 0 && patches == NULL) ||
+ generation == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ unsigned int numPatchesReq = (patches == NULL) ? 0 : *num_patches;
+ data.writeInt32(numPatchesReq);
+ status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ *num_patches = (unsigned int)reply.readInt32();
+ }
+ if (status == NO_ERROR) {
+ if (numPatchesReq > *num_patches) {
+ numPatchesReq = *num_patches;
+ }
+ if (numPatchesReq > 0) {
+ reply.read(patches, numPatchesReq * sizeof(struct audio_patch));
+ }
+ *generation = reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t setAudioPortConfig(const struct audio_port_config *config)
+ {
+ if (config == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(config, sizeof(struct audio_port_config));
+ status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual void registerClient(const sp<IAudioPolicyServiceClient>& client)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeStrongBinder(IInterface::asBinder(client));
+ remote()->transact(REGISTER_CLIENT, data, &reply);
+ }
+
+ virtual status_t acquireSoundTriggerSession(audio_session_t *session,
+ audio_io_handle_t *ioHandle,
+ audio_devices_t *device)
+ {
+ if (session == NULL || ioHandle == NULL || device == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ status_t status = remote()->transact(ACQUIRE_SOUNDTRIGGER_SESSION, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = (status_t)reply.readInt32();
+ if (status == NO_ERROR) {
+ *session = (audio_session_t)reply.readInt32();
+ *ioHandle = (audio_io_handle_t)reply.readInt32();
+ *device = (audio_devices_t)reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t releaseSoundTriggerSession(audio_session_t session)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(session);
+ status_t status = remote()->transact(RELEASE_SOUNDTRIGGER_SESSION, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ return (status_t)reply.readInt32();
+ }
+
+ virtual audio_mode_t getPhoneState()
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ status_t status = remote()->transact(GET_PHONE_STATE, data, &reply);
+ if (status != NO_ERROR) {
+ return AUDIO_MODE_INVALID;
+ }
+ return (audio_mode_t)reply.readInt32();
+ }
+
+ virtual status_t registerPolicyMixes(Vector<AudioMix> mixes, bool registration)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeInt32(registration ? 1 : 0);
+ size_t size = mixes.size();
+ if (size > MAX_MIXES_PER_POLICY) {
+ size = MAX_MIXES_PER_POLICY;
+ }
+ size_t sizePosition = data.dataPosition();
+ data.writeInt32(size);
+ size_t finalSize = size;
+ for (size_t i = 0; i < size; i++) {
+ size_t position = data.dataPosition();
+ if (mixes[i].writeToParcel(&data) != NO_ERROR) {
+ data.setDataPosition(position);
+ finalSize--;
+ }
+ }
+ if (size != finalSize) {
+ size_t position = data.dataPosition();
+ data.setDataPosition(sizePosition);
+ data.writeInt32(finalSize);
+ data.setDataPosition(position);
+ }
+ status_t status = remote()->transact(REGISTER_POLICY_MIXES, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -473,13 +791,48 @@ status_t BnAudioPolicyService::onTransact(
return NO_ERROR;
} break;
+ case GET_OUTPUT_FOR_ATTR: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_attributes_t attr;
+ bool hasAttributes = data.readInt32() != 0;
+ if (hasAttributes) {
+ data.read(&attr, sizeof(audio_attributes_t));
+ }
+ audio_session_t session = (audio_session_t)data.readInt32();
+ audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+ bool hasStream = data.readInt32() != 0;
+ if (hasStream) {
+ stream = (audio_stream_type_t)data.readInt32();
+ }
+ uint32_t samplingRate = data.readInt32();
+ audio_format_t format = (audio_format_t) data.readInt32();
+ audio_channel_mask_t channelMask = data.readInt32();
+ audio_output_flags_t flags =
+ static_cast <audio_output_flags_t>(data.readInt32());
+ bool hasOffloadInfo = data.readInt32() != 0;
+ audio_offload_info_t offloadInfo;
+ if (hasOffloadInfo) {
+ data.read(&offloadInfo, sizeof(audio_offload_info_t));
+ }
+ audio_io_handle_t output;
+ status_t status = getOutputForAttr(hasAttributes ? &attr : NULL,
+ &output, session, &stream,
+ samplingRate, format, channelMask,
+ flags, hasOffloadInfo ? &offloadInfo : NULL);
+ reply->writeInt32(status);
+ reply->writeInt32(output);
+ reply->writeInt32(stream);
+ return NO_ERROR;
+ } break;
+
case START_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
- uint32_t stream = data.readInt32();
- int session = data.readInt32();
+ audio_stream_type_t stream =
+ static_cast <audio_stream_type_t>(data.readInt32());
+ audio_session_t session = (audio_session_t)data.readInt32();
reply->writeInt32(static_cast <uint32_t>(startOutput(output,
- (audio_stream_type_t)stream,
+ stream,
session)));
return NO_ERROR;
} break;
@@ -487,10 +840,11 @@ status_t BnAudioPolicyService::onTransact(
case STOP_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
- uint32_t stream = data.readInt32();
- int session = data.readInt32();
+ audio_stream_type_t stream =
+ static_cast <audio_stream_type_t>(data.readInt32());
+ audio_session_t session = (audio_session_t)data.readInt32();
reply->writeInt32(static_cast <uint32_t>(stopOutput(output,
- (audio_stream_type_t)stream,
+ stream,
session)));
return NO_ERROR;
} break;
@@ -498,44 +852,53 @@ status_t BnAudioPolicyService::onTransact(
case RELEASE_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
- releaseOutput(output);
+ audio_stream_type_t stream = (audio_stream_type_t)data.readInt32();
+ audio_session_t session = (audio_session_t)data.readInt32();
+ releaseOutput(output, stream, session);
return NO_ERROR;
} break;
- case GET_INPUT: {
+ case GET_INPUT_FOR_ATTR: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- audio_source_t inputSource = (audio_source_t) data.readInt32();
+ audio_attributes_t attr;
+ data.read(&attr, sizeof(audio_attributes_t));
+ audio_session_t session = (audio_session_t)data.readInt32();
uint32_t samplingRate = data.readInt32();
audio_format_t format = (audio_format_t) data.readInt32();
audio_channel_mask_t channelMask = data.readInt32();
- int audioSession = data.readInt32();
- audio_io_handle_t input = getInput(inputSource,
- samplingRate,
- format,
- channelMask,
- audioSession);
- reply->writeInt32(static_cast <int>(input));
+ audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
+ audio_io_handle_t input;
+ status_t status = getInputForAttr(&attr, &input, session,
+ samplingRate, format, channelMask,
+ flags);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(input);
+ }
return NO_ERROR;
} break;
case START_INPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32());
- reply->writeInt32(static_cast <uint32_t>(startInput(input)));
+ audio_session_t session = static_cast <audio_session_t>(data.readInt32());
+ reply->writeInt32(static_cast <uint32_t>(startInput(input, session)));
return NO_ERROR;
} break;
case STOP_INPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32());
- reply->writeInt32(static_cast <uint32_t>(stopInput(input)));
+ audio_session_t session = static_cast <audio_session_t>(data.readInt32());
+ reply->writeInt32(static_cast <uint32_t>(stopInput(input, session)));
return NO_ERROR;
} break;
case RELEASE_INPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t input = static_cast <audio_io_handle_t>(data.readInt32());
- releaseInput(input);
+ audio_session_t session = static_cast <audio_session_t>(data.readInt32());
+ releaseInput(input, session);
return NO_ERROR;
} break;
@@ -633,7 +996,7 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
uint32_t inPastMs = (uint32_t)data.readInt32();
- reply->writeInt32( isStreamActive((audio_stream_type_t) stream, inPastMs) );
+ reply->writeInt32( isStreamActive(stream, inPastMs) );
return NO_ERROR;
} break;
@@ -641,7 +1004,7 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
uint32_t inPastMs = (uint32_t)data.readInt32();
- reply->writeInt32( isStreamActiveRemotely((audio_stream_type_t) stream, inPastMs) );
+ reply->writeInt32( isStreamActiveRemotely(stream, inPastMs) );
return NO_ERROR;
} break;
@@ -656,16 +1019,18 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
int audioSession = data.readInt32();
uint32_t count = data.readInt32();
+ if (count > AudioEffect::kMaxPreProcessing) {
+ count = AudioEffect::kMaxPreProcessing;
+ }
uint32_t retCount = count;
- effect_descriptor_t *descriptors =
- (effect_descriptor_t *)new char[count * sizeof(effect_descriptor_t)];
+ effect_descriptor_t *descriptors = new effect_descriptor_t[count];
status_t status = queryDefaultPreProcessing(audioSession, descriptors, &retCount);
reply->writeInt32(status);
if (status != NO_ERROR && status != NO_MEMORY) {
retCount = 0;
}
reply->writeInt32(retCount);
- if (retCount) {
+ if (retCount != 0) {
if (retCount < count) {
count = retCount;
}
@@ -684,6 +1049,156 @@ status_t BnAudioPolicyService::onTransact(
return NO_ERROR;
}
+ case LIST_AUDIO_PORTS: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_port_role_t role = (audio_port_role_t)data.readInt32();
+ audio_port_type_t type = (audio_port_type_t)data.readInt32();
+ unsigned int numPortsReq = data.readInt32();
+ unsigned int numPorts = numPortsReq;
+ unsigned int generation;
+ struct audio_port *ports =
+ (struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port));
+ status_t status = listAudioPorts(role, type, &numPorts, ports, &generation);
+ reply->writeInt32(status);
+ reply->writeInt32(numPorts);
+
+ if (status == NO_ERROR) {
+ if (numPortsReq > numPorts) {
+ numPortsReq = numPorts;
+ }
+ reply->write(ports, numPortsReq * sizeof(struct audio_port));
+ reply->writeInt32(generation);
+ }
+ free(ports);
+ return NO_ERROR;
+ }
+
+ case GET_AUDIO_PORT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ struct audio_port port;
+ data.read(&port, sizeof(struct audio_port));
+ status_t status = getAudioPort(&port);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&port, sizeof(struct audio_port));
+ }
+ return NO_ERROR;
+ }
+
+ case CREATE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ struct audio_patch patch;
+ data.read(&patch, sizeof(struct audio_patch));
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = createAudioPatch(&patch, &handle);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&handle, sizeof(audio_patch_handle_t));
+ }
+ return NO_ERROR;
+ }
+
+ case RELEASE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = releaseAudioPatch(handle);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case LIST_AUDIO_PATCHES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ unsigned int numPatchesReq = data.readInt32();
+ unsigned int numPatches = numPatchesReq;
+ unsigned int generation;
+ struct audio_patch *patches =
+ (struct audio_patch *)calloc(numPatchesReq,
+ sizeof(struct audio_patch));
+ status_t status = listAudioPatches(&numPatches, patches, &generation);
+ reply->writeInt32(status);
+ reply->writeInt32(numPatches);
+ if (status == NO_ERROR) {
+ if (numPatchesReq > numPatches) {
+ numPatchesReq = numPatches;
+ }
+ reply->write(patches, numPatchesReq * sizeof(struct audio_patch));
+ reply->writeInt32(generation);
+ }
+ free(patches);
+ return NO_ERROR;
+ }
+
+ case SET_AUDIO_PORT_CONFIG: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ struct audio_port_config config;
+ data.read(&config, sizeof(struct audio_port_config));
+ status_t status = setAudioPortConfig(&config);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case REGISTER_CLIENT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
+ data.readStrongBinder());
+ registerClient(client);
+ return NO_ERROR;
+ } break;
+
+ case ACQUIRE_SOUNDTRIGGER_SESSION: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
+ data.readStrongBinder());
+ audio_session_t session;
+ audio_io_handle_t ioHandle;
+ audio_devices_t device;
+ status_t status = acquireSoundTriggerSession(&session, &ioHandle, &device);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(session);
+ reply->writeInt32(ioHandle);
+ reply->writeInt32(device);
+ }
+ return NO_ERROR;
+ } break;
+
+ case RELEASE_SOUNDTRIGGER_SESSION: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
+ data.readStrongBinder());
+ audio_session_t session = (audio_session_t)data.readInt32();
+ status_t status = releaseSoundTriggerSession(session);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
+
+ case GET_PHONE_STATE: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ reply->writeInt32((int32_t)getPhoneState());
+ return NO_ERROR;
+ } break;
+
+ case REGISTER_POLICY_MIXES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ bool registration = data.readInt32() == 1;
+ Vector<AudioMix> mixes;
+ size_t size = (size_t)data.readInt32();
+ if (size > MAX_MIXES_PER_POLICY) {
+ size = MAX_MIXES_PER_POLICY;
+ }
+ for (size_t i = 0; i < size; i++) {
+ AudioMix mix;
+ if (mix.readFromParcel((Parcel*)&data) == NO_ERROR) {
+ mixes.add(mix);
+ }
+ }
+ status_t status = registerPolicyMixes(mixes, registration);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioPolicyServiceClient.cpp b/media/libmedia/IAudioPolicyServiceClient.cpp
new file mode 100644
index 0000000..e802277
--- /dev/null
+++ b/media/libmedia/IAudioPolicyServiceClient.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "IAudioPolicyServiceClient"
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <binder/Parcel.h>
+
+#include <media/IAudioPolicyServiceClient.h>
+#include <media/AudioSystem.h>
+
+namespace android {
+
+enum {
+ PORT_LIST_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
+ PATCH_LIST_UPDATE
+};
+
+class BpAudioPolicyServiceClient : public BpInterface<IAudioPolicyServiceClient>
+{
+public:
+ BpAudioPolicyServiceClient(const sp<IBinder>& impl)
+ : BpInterface<IAudioPolicyServiceClient>(impl)
+ {
+ }
+
+ void onAudioPortListUpdate()
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
+ remote()->transact(PORT_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+
+ void onAudioPatchListUpdate()
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
+ remote()->transact(PATCH_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(AudioPolicyServiceClient, "android.media.IAudioPolicyServiceClient");
+
+// ----------------------------------------------------------------------
+
+status_t BnAudioPolicyServiceClient::onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+{
+ switch (code) {
+ case PORT_LIST_UPDATE: {
+ CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
+ onAudioPortListUpdate();
+ return NO_ERROR;
+ } break;
+ case PATCH_LIST_UPDATE: {
+ CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
+ onAudioPatchListUpdate();
+ return NO_ERROR;
+ } break;
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp
index 4a7de65..8a4a383 100644
--- a/media/libmedia/IAudioRecord.cpp
+++ b/media/libmedia/IAudioRecord.cpp
@@ -29,7 +29,7 @@
namespace android {
enum {
- GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
+ UNUSED_WAS_GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
START,
STOP
};
@@ -42,18 +42,6 @@ public:
{
}
- virtual sp<IMemory> getCblk() const
- {
- Parcel data, reply;
- sp<IMemory> cblk;
- data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_CBLK, data, &reply);
- if (status == NO_ERROR) {
- cblk = interface_cast<IMemory>(reply.readStrongBinder());
- }
- return cblk;
- }
-
virtual status_t start(int /*AudioSystem::sync_event_t*/ event, int triggerSession)
{
Parcel data, reply;
@@ -86,11 +74,6 @@ status_t BnAudioRecord::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch (code) {
- case GET_CBLK: {
- CHECK_INTERFACE(IAudioRecord, data, reply);
- reply->writeStrongBinder(getCblk()->asBinder());
- return NO_ERROR;
- } break;
case START: {
CHECK_INTERFACE(IAudioRecord, data, reply);
int /*AudioSystem::sync_event_t*/ event = data.readInt32();
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index 3cd9cfd..df209fd 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -60,6 +60,9 @@ public:
status_t status = remote()->transact(GET_CBLK, data, &reply);
if (status == NO_ERROR) {
cblk = interface_cast<IMemory>(reply.readStrongBinder());
+ if (cblk != 0 && cblk->pointer() == NULL) {
+ cblk.clear();
+ }
}
return cblk;
}
@@ -115,13 +118,16 @@ public:
virtual status_t allocateTimedBuffer(size_t size, sp<IMemory>* buffer) {
Parcel data, reply;
data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- data.writeInt32(size);
+ data.writeInt64(size);
status_t status = remote()->transact(ALLOCATE_TIMED_BUFFER,
data, &reply);
if (status == NO_ERROR) {
status = reply.readInt32();
if (status == NO_ERROR) {
*buffer = interface_cast<IMemory>(reply.readStrongBinder());
+ if (*buffer != 0 && (*buffer)->pointer() == NULL) {
+ (*buffer).clear();
+ }
}
}
return status;
@@ -131,7 +137,7 @@ public:
int64_t pts) {
Parcel data, reply;
data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor());
- data.writeStrongBinder(buffer->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(buffer));
data.writeInt64(pts);
status_t status = remote()->transact(QUEUE_TIMED_BUFFER,
data, &reply);
@@ -201,7 +207,7 @@ status_t BnAudioTrack::onTransact(
switch (code) {
case GET_CBLK: {
CHECK_INTERFACE(IAudioTrack, data, reply);
- reply->writeStrongBinder(getCblk()->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(getCblk()));
return NO_ERROR;
} break;
case START: {
@@ -232,10 +238,10 @@ status_t BnAudioTrack::onTransact(
case ALLOCATE_TIMED_BUFFER: {
CHECK_INTERFACE(IAudioTrack, data, reply);
sp<IMemory> buffer;
- status_t status = allocateTimedBuffer(data.readInt32(), &buffer);
+ status_t status = allocateTimedBuffer(data.readInt64(), &buffer);
reply->writeInt32(status);
if (status == NO_ERROR) {
- reply->writeStrongBinder(buffer->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(buffer));
}
return NO_ERROR;
} break;
diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp
index 98b183a..c26c5bf 100644
--- a/media/libmedia/ICrypto.cpp
+++ b/media/libmedia/ICrypto.cpp
@@ -33,6 +33,7 @@ enum {
DESTROY_PLUGIN,
REQUIRES_SECURE_COMPONENT,
DECRYPT,
+ NOTIFY_RESOLUTION,
};
struct BpCrypto : public BpInterface<ICrypto> {
@@ -131,7 +132,7 @@ struct BpCrypto : public BpInterface<ICrypto> {
data.write(subSamples, sizeof(CryptoPlugin::SubSample) * numSubSamples);
if (secure) {
- data.writeIntPtr((intptr_t)dstPtr);
+ data.writeInt64(static_cast<uint64_t>(reinterpret_cast<uintptr_t>(dstPtr)));
}
remote()->transact(DECRYPT, data, &reply);
@@ -149,6 +150,15 @@ struct BpCrypto : public BpInterface<ICrypto> {
return result;
}
+ virtual void notifyResolution(
+ uint32_t width, uint32_t height) {
+ Parcel data, reply;
+ data.writeInterfaceToken(ICrypto::getInterfaceDescriptor());
+ data.writeInt32(width);
+ data.writeInt32(height);
+ remote()->transact(NOTIFY_RESOLUTION, data, &reply);
+ }
+
private:
DISALLOW_EVIL_CONSTRUCTORS(BpCrypto);
};
@@ -249,7 +259,7 @@ status_t BnCrypto::onTransact(
void *dstPtr;
if (secure) {
- dstPtr = (void *)data.readIntPtr();
+ dstPtr = reinterpret_cast<void *>(static_cast<uintptr_t>(data.readInt64()));
} else {
dstPtr = malloc(totalSize);
}
@@ -290,10 +300,20 @@ status_t BnCrypto::onTransact(
return OK;
}
+ case NOTIFY_RESOLUTION:
+ {
+ CHECK_INTERFACE(ICrypto, data, reply);
+
+ int32_t width = data.readInt32();
+ int32_t height = data.readInt32();
+ notifyResolution(width, height);
+
+ return OK;
+ }
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
} // namespace android
-
diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp
index f7a9a75..b08fa82 100644
--- a/media/libmedia/IDrm.cpp
+++ b/media/libmedia/IDrm.cpp
@@ -51,8 +51,12 @@ enum {
ENCRYPT,
DECRYPT,
SIGN,
+ SIGN_RSA,
VERIFY,
- SET_LISTENER
+ SET_LISTENER,
+ UNPROVISION_DEVICE,
+ GET_SECURE_STOP,
+ RELEASE_ALL_SECURE_STOPS
};
struct BpDrm : public BpInterface<IDrm> {
@@ -196,11 +200,15 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
- virtual status_t getProvisionRequest(Vector<uint8_t> &request,
+ virtual status_t getProvisionRequest(String8 const &certType,
+ String8 const &certAuthority,
+ Vector<uint8_t> &request,
String8 &defaultUrl) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+ data.writeString8(certType);
+ data.writeString8(certAuthority);
remote()->transact(GET_PROVISION_REQUEST, data, &reply);
readVector(reply, request);
@@ -209,13 +217,27 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
- virtual status_t provideProvisionResponse(Vector<uint8_t> const &response) {
+ virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
+ Vector<uint8_t> &certificate,
+ Vector<uint8_t> &wrappedKey) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
writeVector(data, response);
remote()->transact(PROVIDE_PROVISION_RESPONSE, data, &reply);
+ readVector(reply, certificate);
+ readVector(reply, wrappedKey);
+
+ return reply.readInt32();
+ }
+
+ virtual status_t unprovisionDevice() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ remote()->transact(UNPROVISION_DEVICE, data, &reply);
+
return reply.readInt32();
}
@@ -235,6 +257,17 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ writeVector(data, ssid);
+ remote()->transact(GET_SECURE_STOP, data, &reply);
+
+ readVector(reply, secureStop);
+ return reply.readInt32();
+ }
+
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -245,6 +278,15 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t releaseAllSecureStops() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ remote()->transact(RELEASE_ALL_SECURE_STOPS, data, &reply);
+
+ return reply.readInt32();
+ }
+
virtual status_t getPropertyString(String8 const &name, String8 &value) const {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -386,10 +428,29 @@ struct BpDrm : public BpInterface<IDrm> {
return reply.readInt32();
}
+ virtual status_t signRSA(Vector<uint8_t> const &sessionId,
+ String8 const &algorithm,
+ Vector<uint8_t> const &message,
+ Vector<uint8_t> const &wrappedKey,
+ Vector<uint8_t> &signature) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
+
+ writeVector(data, sessionId);
+ data.writeString8(algorithm);
+ writeVector(data, message);
+ writeVector(data, wrappedKey);
+
+ remote()->transact(SIGN_RSA, data, &reply);
+ readVector(reply, signature);
+
+ return reply.readInt32();
+ }
+
virtual status_t setListener(const sp<IDrmClient>& listener) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
- data.writeStrongBinder(listener->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(listener));
remote()->transact(SET_LISTENER, data, &reply);
return reply.readInt32();
}
@@ -563,9 +624,13 @@ status_t BnDrm::onTransact(
case GET_PROVISION_REQUEST:
{
CHECK_INTERFACE(IDrm, data, reply);
+ String8 certType = data.readString8();
+ String8 certAuthority = data.readString8();
+
Vector<uint8_t> request;
String8 defaultUrl;
- status_t result = getProvisionRequest(request, defaultUrl);
+ status_t result = getProvisionRequest(certType, certAuthority,
+ request, defaultUrl);
writeVector(reply, request);
reply->writeString8(defaultUrl);
reply->writeInt32(result);
@@ -576,8 +641,21 @@ status_t BnDrm::onTransact(
{
CHECK_INTERFACE(IDrm, data, reply);
Vector<uint8_t> response;
+ Vector<uint8_t> certificate;
+ Vector<uint8_t> wrappedKey;
readVector(data, response);
- reply->writeInt32(provideProvisionResponse(response));
+ status_t result = provideProvisionResponse(response, certificate, wrappedKey);
+ writeVector(reply, certificate);
+ writeVector(reply, wrappedKey);
+ reply->writeInt32(result);
+ return OK;
+ }
+
+ case UNPROVISION_DEVICE:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ status_t result = unprovisionDevice();
+ reply->writeInt32(result);
return OK;
}
@@ -599,6 +677,17 @@ status_t BnDrm::onTransact(
return OK;
}
+ case GET_SECURE_STOP:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ Vector<uint8_t> ssid, secureStop;
+ readVector(data, ssid);
+ status_t result = getSecureStop(ssid, secureStop);
+ writeVector(reply, secureStop);
+ reply->writeInt32(result);
+ return OK;
+ }
+
case RELEASE_SECURE_STOPS:
{
CHECK_INTERFACE(IDrm, data, reply);
@@ -608,6 +697,13 @@ status_t BnDrm::onTransact(
return OK;
}
+ case RELEASE_ALL_SECURE_STOPS:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ reply->writeInt32(releaseAllSecureStops());
+ return OK;
+ }
+
case GET_PROPERTY_STRING:
{
CHECK_INTERFACE(IDrm, data, reply);
@@ -725,6 +821,20 @@ status_t BnDrm::onTransact(
return OK;
}
+ case SIGN_RSA:
+ {
+ CHECK_INTERFACE(IDrm, data, reply);
+ Vector<uint8_t> sessionId, message, wrappedKey, signature;
+ readVector(data, sessionId);
+ String8 algorithm = data.readString8();
+ readVector(data, message);
+ readVector(data, wrappedKey);
+ uint32_t result = signRSA(sessionId, algorithm, message, wrappedKey, signature);
+ writeVector(reply, signature);
+ reply->writeInt32(result);
+ return OK;
+ }
+
case SET_LISTENER: {
CHECK_INTERFACE(IDrm, data, reply);
sp<IDrmClient> listener =
@@ -739,4 +849,3 @@ status_t BnDrm::onTransact(
}
} // namespace android
-
diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp
index a303a8f..c2fff78 100644
--- a/media/libmedia/IEffect.cpp
+++ b/media/libmedia/IEffect.cpp
@@ -117,6 +117,9 @@ public:
status_t status = remote()->transact(GET_CBLK, data, &reply);
if (status == NO_ERROR) {
cblk = interface_cast<IMemory>(reply.readStrongBinder());
+ if (cblk != 0 && cblk->pointer() == NULL) {
+ cblk.clear();
+ }
}
return cblk;
}
@@ -187,7 +190,7 @@ status_t BnEffect::onTransact(
case GET_CBLK: {
CHECK_INTERFACE(IEffect, data, reply);
- reply->writeStrongBinder(getCblk()->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(getCblk()));
return NO_ERROR;
} break;
diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp
index 1cf987a..9122f75 100644
--- a/media/libmedia/IHDCP.cpp
+++ b/media/libmedia/IHDCP.cpp
@@ -65,7 +65,7 @@ struct BpHDCP : public BpInterface<IHDCP> {
virtual status_t setObserver(const sp<IHDCPObserver> &observer) {
Parcel data, reply;
data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
- data.writeStrongBinder(observer->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(observer));
remote()->transact(HDCP_SET_OBSERVER, data, &reply);
return reply.readInt32();
}
diff --git a/media/libmedia/IMediaCodecList.cpp b/media/libmedia/IMediaCodecList.cpp
new file mode 100644
index 0000000..bf7c5ca
--- /dev/null
+++ b/media/libmedia/IMediaCodecList.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <binder/Parcel.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/IMediaCodecList.h>
+#include <media/MediaCodecInfo.h>
+
+#include <utils/Errors.h> // for status_t
+
+namespace android {
+
+enum {
+ CREATE = IBinder::FIRST_CALL_TRANSACTION,
+ COUNT_CODECS,
+ GET_CODEC_INFO,
+ FIND_CODEC_BY_TYPE,
+ FIND_CODEC_BY_NAME,
+};
+
+class BpMediaCodecList: public BpInterface<IMediaCodecList>
+{
+public:
+ BpMediaCodecList(const sp<IBinder>& impl)
+ : BpInterface<IMediaCodecList>(impl)
+ {
+ }
+
+ virtual size_t countCodecs() const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ remote()->transact(COUNT_CODECS, data, &reply);
+ return static_cast<size_t>(reply.readInt32());
+ }
+
+ virtual sp<MediaCodecInfo> getCodecInfo(size_t index) const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ data.writeInt32(index);
+ remote()->transact(GET_CODEC_INFO, data, &reply);
+ status_t err = reply.readInt32();
+ if (err == OK) {
+ return MediaCodecInfo::FromParcel(reply);
+ } else {
+ return NULL;
+ }
+ }
+
+ virtual ssize_t findCodecByType(
+ const char *type, bool encoder, size_t startIndex = 0) const
+ {
+ if (startIndex > INT32_MAX) {
+ return NAME_NOT_FOUND;
+ }
+
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ data.writeCString(type);
+ data.writeInt32(encoder);
+ data.writeInt32(startIndex);
+ remote()->transact(FIND_CODEC_BY_TYPE, data, &reply);
+ return static_cast<ssize_t>(reply.readInt32());
+ }
+
+ virtual ssize_t findCodecByName(const char *name) const
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor());
+ data.writeCString(name);
+ remote()->transact(FIND_CODEC_BY_NAME, data, &reply);
+ return static_cast<ssize_t>(reply.readInt32());
+ }
+};
+
+IMPLEMENT_META_INTERFACE(MediaCodecList, "android.media.IMediaCodecList");
+
+// ----------------------------------------------------------------------
+
+status_t BnMediaCodecList::onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+{
+ switch (code) {
+ case COUNT_CODECS:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ size_t count = countCodecs();
+ if (count > INT32_MAX) {
+ count = INT32_MAX;
+ }
+ reply->writeInt32(count);
+ return NO_ERROR;
+ }
+ break;
+
+ case GET_CODEC_INFO:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ size_t index = static_cast<size_t>(data.readInt32());
+ const sp<MediaCodecInfo> info = getCodecInfo(index);
+ if (info != NULL) {
+ reply->writeInt32(OK);
+ info->writeToParcel(reply);
+ } else {
+ reply->writeInt32(-ERANGE);
+ }
+ return NO_ERROR;
+ }
+ break;
+
+ case FIND_CODEC_BY_TYPE:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ const char *type = data.readCString();
+ bool isEncoder = static_cast<bool>(data.readInt32());
+ size_t startIndex = static_cast<size_t>(data.readInt32());
+ ssize_t index = findCodecByType(type, isEncoder, startIndex);
+ if (index > INT32_MAX || index < 0) {
+ index = NAME_NOT_FOUND;
+ }
+ reply->writeInt32(index);
+ return NO_ERROR;
+ }
+ break;
+
+ case FIND_CODEC_BY_NAME:
+ {
+ CHECK_INTERFACE(IMediaCodecList, data, reply);
+ const char *name = data.readCString();
+ ssize_t index = findCodecByName(name);
+ if (index > INT32_MAX || index < 0) {
+ index = NAME_NOT_FOUND;
+ }
+ reply->writeInt32(index);
+ return NO_ERROR;
+ }
+ break;
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index 9db5b1b..38e9ca0 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -75,7 +75,7 @@ IMediaDeathNotifier::removeObitRecipient(const wp<IMediaDeathNotifier>& recipien
}
void
-IMediaDeathNotifier::DeathNotifier::binderDied(const wp<IBinder>& who) {
+IMediaDeathNotifier::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
ALOGW("media server died");
// Need to do this with the lock held
@@ -104,7 +104,7 @@ IMediaDeathNotifier::DeathNotifier::~DeathNotifier()
Mutex::Autolock _l(sServiceLock);
sObitRecipients.clear();
if (sMediaPlayerService != 0) {
- sMediaPlayerService->asBinder()->unlinkToDeath(this);
+ IInterface::asBinder(sMediaPlayerService)->unlinkToDeath(this);
}
}
diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp
new file mode 100644
index 0000000..7e26ee6
--- /dev/null
+++ b/media/libmedia/IMediaHTTPConnection.cpp
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IMediaHTTPConnection"
+#include <utils/Log.h>
+
+#include <media/IMediaHTTPConnection.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+enum {
+ CONNECT = IBinder::FIRST_CALL_TRANSACTION,
+ DISCONNECT,
+ READ_AT,
+ GET_SIZE,
+ GET_MIME_TYPE,
+ GET_URI
+};
+
+struct BpMediaHTTPConnection : public BpInterface<IMediaHTTPConnection> {
+ BpMediaHTTPConnection(const sp<IBinder> &impl)
+ : BpInterface<IMediaHTTPConnection>(impl) {
+ }
+
+ virtual bool connect(
+ const char *uri, const KeyedVector<String8, String8> *headers) {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ String16 tmp(uri);
+ data.writeString16(tmp);
+
+ tmp = String16("");
+ if (headers != NULL) {
+ for (size_t i = 0; i < headers->size(); ++i) {
+ String16 key(headers->keyAt(i).string());
+ String16 val(headers->valueAt(i).string());
+
+ tmp.append(key);
+ tmp.append(String16(": "));
+ tmp.append(val);
+ tmp.append(String16("\r\n"));
+ }
+ }
+ data.writeString16(tmp);
+
+ remote()->transact(CONNECT, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ sp<IBinder> binder = reply.readStrongBinder();
+ mMemory = interface_cast<IMemory>(binder);
+
+ return mMemory != NULL;
+ }
+
+ virtual void disconnect() {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(DISCONNECT, data, &reply);
+ }
+
+ virtual ssize_t readAt(off64_t offset, void *buffer, size_t size) {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ data.writeInt64(offset);
+ data.writeInt32(size);
+
+ status_t err = remote()->transact(READ_AT, data, &reply);
+ if (err != OK) {
+ ALOGE("remote readAt failed");
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t len = reply.readInt32();
+
+ if (len > 0) {
+ memcpy(buffer, mMemory->pointer(), len);
+ }
+
+ return len;
+ }
+
+ virtual off64_t getSize() {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(GET_SIZE, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ return reply.readInt64();
+ }
+
+ virtual status_t getMIMEType(String8 *mimeType) {
+ *mimeType = String8("");
+
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(GET_MIME_TYPE, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ *mimeType = String8(reply.readString16());
+
+ return OK;
+ }
+
+ virtual status_t getUri(String8 *uri) {
+ *uri = String8("");
+
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(GET_URI, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ *uri = String8(reply.readString16());
+
+ return OK;
+ }
+
+private:
+ sp<IMemory> mMemory;
+};
+
+IMPLEMENT_META_INTERFACE(
+ MediaHTTPConnection, "android.media.IMediaHTTPConnection");
+
+} // namespace android
+
diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp
new file mode 100644
index 0000000..1260582
--- /dev/null
+++ b/media/libmedia/IMediaHTTPService.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IMediaHTTPService"
+#include <utils/Log.h>
+
+#include <media/IMediaHTTPService.h>
+
+#include <binder/Parcel.h>
+#include <media/IMediaHTTPConnection.h>
+
+namespace android {
+
+enum {
+ MAKE_HTTP = IBinder::FIRST_CALL_TRANSACTION,
+};
+
+struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> {
+ BpMediaHTTPService(const sp<IBinder> &impl)
+ : BpInterface<IMediaHTTPService>(impl) {
+ }
+
+ virtual sp<IMediaHTTPConnection> makeHTTPConnection() {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPService::getInterfaceDescriptor());
+
+ remote()->transact(MAKE_HTTP, data, &reply);
+
+ status_t err = reply.readInt32();
+
+ if (err != OK) {
+ return NULL;
+ }
+
+ return interface_cast<IMediaHTTPConnection>(reply.readStrongBinder());
+ }
+};
+
+IMPLEMENT_META_INTERFACE(
+ MediaHTTPService, "android.media.IMediaHTTPService");
+
+} // namespace android
+
diff --git a/media/libmedia/IMediaLogService.cpp b/media/libmedia/IMediaLogService.cpp
index 33239a7..a4af7b7 100644
--- a/media/libmedia/IMediaLogService.cpp
+++ b/media/libmedia/IMediaLogService.cpp
@@ -42,8 +42,8 @@ public:
virtual void registerWriter(const sp<IMemory>& shared, size_t size, const char *name) {
Parcel data, reply;
data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor());
- data.writeStrongBinder(shared->asBinder());
- data.writeInt32((int32_t) size);
+ data.writeStrongBinder(IInterface::asBinder(shared));
+ data.writeInt64((int64_t) size);
data.writeCString(name);
status_t status = remote()->transact(REGISTER_WRITER, data, &reply);
// FIXME ignores status
@@ -52,7 +52,7 @@ public:
virtual void unregisterWriter(const sp<IMemory>& shared) {
Parcel data, reply;
data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor());
- data.writeStrongBinder(shared->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(shared));
status_t status = remote()->transact(UNREGISTER_WRITER, data, &reply);
// FIXME ignores status
}
@@ -71,7 +71,7 @@ status_t BnMediaLogService::onTransact(
case REGISTER_WRITER: {
CHECK_INTERFACE(IMediaLogService, data, reply);
sp<IMemory> shared = interface_cast<IMemory>(data.readStrongBinder());
- size_t size = (size_t) data.readInt32();
+ size_t size = (size_t) data.readInt64();
const char *name = data.readCString();
registerWriter(shared, size, name);
return NO_ERROR;
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index bb066a0..aa2665a 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -15,9 +15,12 @@
** limitations under the License.
*/
+#include <inttypes.h>
#include <stdint.h>
#include <sys/types.h>
+
#include <binder/Parcel.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaMetadataRetriever.h>
#include <utils/String8.h>
#include <utils/KeyedVector.h>
@@ -84,17 +87,23 @@ public:
}
status_t setDataSource(
- const char *srcUrl, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *srcUrl,
+ const KeyedVector<String8, String8> *headers)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
+ data.writeInt32(httpService != NULL);
+ if (httpService != NULL) {
+ data.writeStrongBinder(IInterface::asBinder(httpService));
+ }
data.writeCString(srcUrl);
if (headers == NULL) {
data.writeInt32(0);
} else {
// serialize the headers
- data.writeInt32(headers->size());
+ data.writeInt64(headers->size());
for (size_t i = 0; i < headers->size(); ++i) {
data.writeString8(headers->keyAt(i));
data.writeString8(headers->valueAt(i));
@@ -118,7 +127,7 @@ public:
sp<IMemory> getFrameAtTime(int64_t timeUs, int option)
{
- ALOGV("getTimeAtTime: time(%lld us) and option(%d)", timeUs, option);
+ ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
data.writeInt64(timeUs);
@@ -195,18 +204,26 @@ status_t BnMediaMetadataRetriever::onTransact(
} break;
case SET_DATA_SOURCE_URL: {
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
+
+ sp<IMediaHTTPService> httpService;
+ if (data.readInt32()) {
+ httpService =
+ interface_cast<IMediaHTTPService>(data.readStrongBinder());
+ }
+
const char* srcUrl = data.readCString();
KeyedVector<String8, String8> headers;
- int32_t numHeaders = data.readInt32();
- for (int i = 0; i < numHeaders; ++i) {
+ size_t numHeaders = (size_t) data.readInt64();
+ for (size_t i = 0; i < numHeaders; ++i) {
String8 key = data.readString8();
String8 value = data.readString8();
headers.add(key, value);
}
reply->writeInt32(
- setDataSource(srcUrl, numHeaders > 0 ? &headers : NULL));
+ setDataSource(
+ httpService, srcUrl, numHeaders > 0 ? &headers : NULL));
return NO_ERROR;
} break;
@@ -222,14 +239,14 @@ status_t BnMediaMetadataRetriever::onTransact(
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
int64_t timeUs = data.readInt64();
int option = data.readInt32();
- ALOGV("getTimeAtTime: time(%lld us) and option(%d)", timeUs, option);
+ ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
setSchedPolicy(data);
#endif
sp<IMemory> bitmap = getFrameAtTime(timeUs, option);
if (bitmap != 0) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
- reply->writeStrongBinder(bitmap->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(bitmap));
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
@@ -246,7 +263,7 @@ status_t BnMediaMetadataRetriever::onTransact(
sp<IMemory> albumArt = extractAlbumArt();
if (albumArt != 0) { // Don't send NULL across the binder interface
reply->writeInt32(NO_ERROR);
- reply->writeStrongBinder(albumArt->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(albumArt));
} else {
reply->writeInt32(UNKNOWN_ERROR);
}
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index e79bcd2..7f3e5cc 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -21,6 +21,7 @@
#include <binder/Parcel.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayer.h>
#include <media/IStreamSource.h>
@@ -75,11 +76,17 @@ public:
remote()->transact(DISCONNECT, data, &reply);
}
- status_t setDataSource(const char* url,
+ status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
const KeyedVector<String8, String8>* headers)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ data.writeInt32(httpService != NULL);
+ if (httpService != NULL) {
+ data.writeStrongBinder(IInterface::asBinder(httpService));
+ }
data.writeCString(url);
if (headers == NULL) {
data.writeInt32(0);
@@ -108,7 +115,7 @@ public:
status_t setDataSource(const sp<IStreamSource> &source) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
- data.writeStrongBinder(source->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(source));
remote()->transact(SET_DATA_SOURCE_STREAM, data, &reply);
return reply.readInt32();
}
@@ -118,7 +125,7 @@ public:
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
- sp<IBinder> b(bufferProducer->asBinder());
+ sp<IBinder> b(IInterface::asBinder(bufferProducer));
data.writeStrongBinder(b);
remote()->transact(SET_VIDEO_SURFACETEXTURE, data, &reply);
return reply.readInt32();
@@ -316,7 +323,7 @@ public:
status_t setNextPlayer(const sp<IMediaPlayer>& player) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
- sp<IBinder> b(player->asBinder());
+ sp<IBinder> b(IInterface::asBinder(player));
data.writeStrongBinder(b);
remote()->transact(SET_NEXT_PLAYER, data, &reply);
return reply.readInt32();
@@ -355,6 +362,13 @@ status_t BnMediaPlayer::onTransact(
} break;
case SET_DATA_SOURCE_URL: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
+
+ sp<IMediaHTTPService> httpService;
+ if (data.readInt32()) {
+ httpService =
+ interface_cast<IMediaHTTPService>(data.readStrongBinder());
+ }
+
const char* url = data.readCString();
KeyedVector<String8, String8> headers;
int32_t numHeaders = data.readInt32();
@@ -363,7 +377,8 @@ status_t BnMediaPlayer::onTransact(
String8 value = data.readString8();
headers.add(key, value);
}
- reply->writeInt32(setDataSource(url, numHeaders > 0 ? &headers : NULL));
+ reply->writeInt32(setDataSource(
+ httpService, url, numHeaders > 0 ? &headers : NULL));
return NO_ERROR;
} break;
case SET_DATA_SOURCE_FD: {
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 3c22b4c..feea267 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,8 @@
#include <media/ICrypto.h>
#include <media/IDrm.h>
#include <media/IHDCP.h>
+#include <media/IMediaCodecList.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
#include <media/IOMX.h>
@@ -37,8 +39,6 @@ namespace android {
enum {
CREATE = IBinder::FIRST_CALL_TRANSACTION,
- DECODE_URL,
- DECODE_FD,
CREATE_MEDIA_RECORDER,
CREATE_METADATA_RETRIEVER,
GET_OMX,
@@ -48,7 +48,7 @@ enum {
ADD_BATTERY_DATA,
PULL_BATTERY_DATA,
LISTEN_FOR_REMOTE_DISPLAY,
- UPDATE_PROXY_CONFIG,
+ GET_CODEC_LIST,
};
class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -71,7 +71,7 @@ public:
const sp<IMediaPlayerClient>& client, int audioSessionId) {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- data.writeStrongBinder(client->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(client));
data.writeInt32(audioSessionId);
remote()->transact(CREATE, data, &reply);
@@ -86,50 +86,6 @@ public:
return interface_cast<IMediaRecorder>(reply.readStrongBinder());
}
- virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- data.writeCString(url);
- data.writeStrongBinder(heap->asBinder());
- status_t status = remote()->transact(DECODE_URL, data, &reply);
- if (status == NO_ERROR) {
- status = (status_t)reply.readInt32();
- if (status == NO_ERROR) {
- *pSampleRate = uint32_t(reply.readInt32());
- *pNumChannels = reply.readInt32();
- *pFormat = (audio_format_t)reply.readInt32();
- *pSize = (size_t)reply.readInt32();
- }
- }
- return status;
- }
-
- virtual status_t decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate,
- int* pNumChannels, audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- data.writeFileDescriptor(fd);
- data.writeInt64(offset);
- data.writeInt64(length);
- data.writeStrongBinder(heap->asBinder());
- status_t status = remote()->transact(DECODE_FD, data, &reply);
- if (status == NO_ERROR) {
- status = (status_t)reply.readInt32();
- if (status == NO_ERROR) {
- *pSampleRate = uint32_t(reply.readInt32());
- *pNumChannels = reply.readInt32();
- *pFormat = (audio_format_t)reply.readInt32();
- *pSize = (size_t)reply.readInt32();
- }
- }
- return status;
- }
-
virtual sp<IOMX> getOMX() {
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
@@ -177,29 +133,17 @@ public:
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- data.writeStrongBinder(client->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(client));
data.writeString8(iface);
remote()->transact(LISTEN_FOR_REMOTE_DISPLAY, data, &reply);
return interface_cast<IRemoteDisplay>(reply.readStrongBinder());
}
- virtual status_t updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
+ virtual sp<IMediaCodecList> getCodecList() const {
Parcel data, reply;
-
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- if (host == NULL) {
- data.writeInt32(0);
- } else {
- data.writeInt32(1);
- data.writeCString(host);
- data.writeInt32(port);
- data.writeCString(exclusionList);
- }
-
- remote()->transact(UPDATE_PROXY_CONFIG, data, &reply);
-
- return reply.readInt32();
+ remote()->transact(GET_CODEC_LIST, data, &reply);
+ return interface_cast<IMediaCodecList>(reply.readStrongBinder());
}
};
@@ -217,83 +161,44 @@ status_t BnMediaPlayerService::onTransact(
interface_cast<IMediaPlayerClient>(data.readStrongBinder());
int audioSessionId = data.readInt32();
sp<IMediaPlayer> player = create(client, audioSessionId);
- reply->writeStrongBinder(player->asBinder());
- return NO_ERROR;
- } break;
- case DECODE_URL: {
- CHECK_INTERFACE(IMediaPlayerService, data, reply);
- const char* url = data.readCString();
- sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
- uint32_t sampleRate;
- int numChannels;
- audio_format_t format;
- size_t size;
- status_t status = decode(url, &sampleRate, &numChannels, &format, heap, &size);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeInt32(sampleRate);
- reply->writeInt32(numChannels);
- reply->writeInt32((int32_t)format);
- reply->writeInt32((int32_t)size);
- }
- return NO_ERROR;
- } break;
- case DECODE_FD: {
- CHECK_INTERFACE(IMediaPlayerService, data, reply);
- int fd = dup(data.readFileDescriptor());
- int64_t offset = data.readInt64();
- int64_t length = data.readInt64();
- sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
- uint32_t sampleRate;
- int numChannels;
- audio_format_t format;
- size_t size;
- status_t status = decode(fd, offset, length, &sampleRate, &numChannels, &format,
- heap, &size);
- reply->writeInt32(status);
- if (status == NO_ERROR) {
- reply->writeInt32(sampleRate);
- reply->writeInt32(numChannels);
- reply->writeInt32((int32_t)format);
- reply->writeInt32((int32_t)size);
- }
+ reply->writeStrongBinder(IInterface::asBinder(player));
return NO_ERROR;
} break;
case CREATE_MEDIA_RECORDER: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IMediaRecorder> recorder = createMediaRecorder();
- reply->writeStrongBinder(recorder->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(recorder));
return NO_ERROR;
} break;
case CREATE_METADATA_RETRIEVER: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IMediaMetadataRetriever> retriever = createMetadataRetriever();
- reply->writeStrongBinder(retriever->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(retriever));
return NO_ERROR;
} break;
case GET_OMX: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IOMX> omx = getOMX();
- reply->writeStrongBinder(omx->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(omx));
return NO_ERROR;
} break;
case MAKE_CRYPTO: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<ICrypto> crypto = makeCrypto();
- reply->writeStrongBinder(crypto->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(crypto));
return NO_ERROR;
} break;
case MAKE_DRM: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
sp<IDrm> drm = makeDrm();
- reply->writeStrongBinder(drm->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(drm));
return NO_ERROR;
} break;
case MAKE_HDCP: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
bool createEncryptionModule = data.readInt32();
sp<IHDCP> hdcp = makeHDCP(createEncryptionModule);
- reply->writeStrongBinder(hdcp->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(hdcp));
return NO_ERROR;
} break;
case ADD_BATTERY_DATA: {
@@ -313,27 +218,15 @@ status_t BnMediaPlayerService::onTransact(
interface_cast<IRemoteDisplayClient>(data.readStrongBinder()));
String8 iface(data.readString8());
sp<IRemoteDisplay> display(listenForRemoteDisplay(client, iface));
- reply->writeStrongBinder(display->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(display));
return NO_ERROR;
} break;
- case UPDATE_PROXY_CONFIG:
- {
+ case GET_CODEC_LIST: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
-
- const char *host = NULL;
- int32_t port = 0;
- const char *exclusionList = NULL;
-
- if (data.readInt32()) {
- host = data.readCString();
- port = data.readInt32();
- exclusionList = data.readCString();
- }
-
- reply->writeInt32(updateProxyConfig(host, port, exclusionList));
-
- return OK;
- }
+ sp<IMediaCodecList> mcl = getCodecList();
+ reply->writeStrongBinder(IInterface::asBinder(mcl));
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 8e58162..a733b68 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -17,6 +17,10 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "IMediaRecorder"
+
+#include <inttypes.h>
+#include <unistd.h>
+
#include <utils/Log.h>
#include <binder/Parcel.h>
#include <camera/ICamera.h>
@@ -24,8 +28,6 @@
#include <media/IMediaRecorder.h>
#include <gui/Surface.h>
#include <gui/IGraphicBufferProducer.h>
-#include <unistd.h>
-
namespace android {
@@ -68,8 +70,8 @@ public:
ALOGV("setCamera(%p,%p)", camera.get(), proxy.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(camera->asBinder());
- data.writeStrongBinder(proxy->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(camera));
+ data.writeStrongBinder(IInterface::asBinder(proxy));
remote()->transact(SET_CAMERA, data, &reply);
return reply.readInt32();
}
@@ -92,7 +94,7 @@ public:
ALOGV("setPreviewSurface(%p)", surface.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(surface));
remote()->transact(SET_PREVIEW_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -167,7 +169,7 @@ public:
}
status_t setOutputFile(int fd, int64_t offset, int64_t length) {
- ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
data.writeFileDescriptor(fd);
@@ -213,7 +215,7 @@ public:
ALOGV("setListener(%p)", listener.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(listener->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(listener));
remote()->transact(SET_LISTENER, data, &reply);
return reply.readInt32();
}
@@ -466,7 +468,7 @@ status_t BnMediaRecorder::onTransact(
int returnedNull= (surfaceMediaSource == NULL) ? 1 : 0 ;
reply->writeInt32(returnedNull);
if (!returnedNull) {
- reply->writeStrongBinder(surfaceMediaSource->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(surfaceMediaSource));
}
return NO_ERROR;
} break;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 71ce320..e208df9 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -54,6 +54,7 @@ enum {
GET_GRAPHIC_BUFFER_USAGE,
SET_INTERNAL_OPTION,
UPDATE_GRAPHIC_BUFFER_IN_META,
+ CONFIGURE_VIDEO_TUNNEL_MODE,
};
class BpOMX : public BpInterface<IOMX> {
@@ -65,7 +66,7 @@ public:
virtual bool livesLocally(node_id node, pid_t pid) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(pid);
remote()->transact(LIVES_LOCALLY, data, &reply);
@@ -99,12 +100,12 @@ public:
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeCString(name);
- data.writeStrongBinder(observer->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(observer));
remote()->transact(ALLOCATE_NODE, data, &reply);
status_t err = reply.readInt32();
if (err == OK) {
- *node = (void*)reply.readIntPtr();
+ *node = (node_id)reply.readInt32();
} else {
*node = 0;
}
@@ -115,7 +116,7 @@ public:
virtual status_t freeNode(node_id node) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
remote()->transact(FREE_NODE, data, &reply);
return reply.readInt32();
@@ -125,7 +126,7 @@ public:
node_id node, OMX_COMMANDTYPE cmd, OMX_S32 param) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(cmd);
data.writeInt32(param);
remote()->transact(SEND_COMMAND, data, &reply);
@@ -138,9 +139,9 @@ public:
void *params, size_t size) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(index);
- data.writeInt32(size);
+ data.writeInt64(size);
data.write(params, size);
remote()->transact(GET_PARAMETER, data, &reply);
@@ -159,9 +160,9 @@ public:
const void *params, size_t size) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(index);
- data.writeInt32(size);
+ data.writeInt64(size);
data.write(params, size);
remote()->transact(SET_PARAMETER, data, &reply);
@@ -173,9 +174,9 @@ public:
void *params, size_t size) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(index);
- data.writeInt32(size);
+ data.writeInt64(size);
data.write(params, size);
remote()->transact(GET_CONFIG, data, &reply);
@@ -194,9 +195,9 @@ public:
const void *params, size_t size) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(index);
- data.writeInt32(size);
+ data.writeInt64(size);
data.write(params, size);
remote()->transact(SET_CONFIG, data, &reply);
@@ -207,7 +208,7 @@ public:
node_id node, OMX_STATETYPE* state) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
remote()->transact(GET_STATE, data, &reply);
*state = static_cast<OMX_STATETYPE>(reply.readInt32());
@@ -218,7 +219,7 @@ public:
node_id node, OMX_U32 port_index, OMX_BOOL enable) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.writeInt32((uint32_t)enable);
remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply);
@@ -231,7 +232,7 @@ public:
node_id node, OMX_U32 port_index, OMX_U32* usage) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
remote()->transact(GET_GRAPHIC_BUFFER_USAGE, data, &reply);
@@ -245,9 +246,9 @@ public:
buffer_id *buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
- data.writeStrongBinder(params->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(params));
remote()->transact(USE_BUFFER, data, &reply);
status_t err = reply.readInt32();
@@ -257,7 +258,7 @@ public:
return err;
}
- *buffer = (void*)reply.readIntPtr();
+ *buffer = (buffer_id)reply.readInt32();
return err;
}
@@ -268,7 +269,7 @@ public:
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.write(*graphicBuffer);
remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
@@ -280,7 +281,7 @@ public:
return err;
}
- *buffer = (void*)reply.readIntPtr();
+ *buffer = (buffer_id)reply.readInt32();
return err;
}
@@ -290,10 +291,10 @@ public:
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.write(*graphicBuffer);
- data.writeIntPtr((intptr_t)buffer);
+ data.writeInt32((int32_t)buffer);
remote()->transact(UPDATE_GRAPHIC_BUFFER_IN_META, data, &reply);
status_t err = reply.readInt32();
@@ -306,7 +307,7 @@ public:
Parcel data, reply;
status_t err;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
err = remote()->transact(CREATE_INPUT_SURFACE, data, &reply);
if (err != OK) {
@@ -329,7 +330,7 @@ public:
Parcel data, reply;
status_t err;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
err = remote()->transact(SIGNAL_END_OF_INPUT_STREAM, data, &reply);
if (err != OK) {
ALOGW("binder transaction failed: %d", err);
@@ -343,7 +344,7 @@ public:
node_id node, OMX_U32 port_index, OMX_BOOL enable) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.writeInt32((uint32_t)enable);
remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
@@ -357,7 +358,7 @@ public:
OMX_U32 max_width, OMX_U32 max_height) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
data.writeInt32((int32_t)enable);
data.writeInt32(max_width);
@@ -368,14 +369,33 @@ public:
return err;
}
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle ) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeInt32((int32_t)node);
+ data.writeInt32(portIndex);
+ data.writeInt32((int32_t)tunneled);
+ data.writeInt32(audioHwSync);
+ remote()->transact(CONFIGURE_VIDEO_TUNNEL_MODE, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (sidebandHandle) {
+ *sidebandHandle = (native_handle_t *)reply.readNativeHandle();
+ }
+ return err;
+ }
+
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
- data.writeInt32(size);
+ data.writeInt64(size);
remote()->transact(ALLOC_BUFFER, data, &reply);
status_t err = reply.readInt32();
@@ -385,8 +405,8 @@ public:
return err;
}
- *buffer = (void *)reply.readIntPtr();
- *buffer_data = (void *)reply.readIntPtr();
+ *buffer = (buffer_id)reply.readInt32();
+ *buffer_data = (void *)reply.readInt64();
return err;
}
@@ -396,9 +416,9 @@ public:
buffer_id *buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
- data.writeStrongBinder(params->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(params));
remote()->transact(ALLOC_BUFFER_WITH_BACKUP, data, &reply);
status_t err = reply.readInt32();
@@ -408,7 +428,7 @@ public:
return err;
}
- *buffer = (void*)reply.readIntPtr();
+ *buffer = (buffer_id)reply.readInt32();
return err;
}
@@ -417,9 +437,9 @@ public:
node_id node, OMX_U32 port_index, buffer_id buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
- data.writeIntPtr((intptr_t)buffer);
+ data.writeInt32((int32_t)buffer);
remote()->transact(FREE_BUFFER, data, &reply);
return reply.readInt32();
@@ -428,8 +448,8 @@ public:
virtual status_t fillBuffer(node_id node, buffer_id buffer) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
- data.writeIntPtr((intptr_t)buffer);
+ data.writeInt32((int32_t)node);
+ data.writeInt32((int32_t)buffer);
remote()->transact(FILL_BUFFER, data, &reply);
return reply.readInt32();
@@ -442,8 +462,8 @@ public:
OMX_U32 flags, OMX_TICKS timestamp) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
- data.writeIntPtr((intptr_t)buffer);
+ data.writeInt32((int32_t)node);
+ data.writeInt32((int32_t)buffer);
data.writeInt32(range_offset);
data.writeInt32(range_length);
data.writeInt32(flags);
@@ -459,7 +479,7 @@ public:
OMX_INDEXTYPE *index) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeCString(parameter_name);
remote()->transact(GET_EXTENSION_INDEX, data, &reply);
@@ -482,9 +502,9 @@ public:
size_t size) {
Parcel data, reply;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
- data.writeIntPtr((intptr_t)node);
+ data.writeInt32((int32_t)node);
data.writeInt32(port_index);
- data.writeInt32(size);
+ data.writeInt64(size);
data.write(optionData, size);
data.writeInt32(type);
remote()->transact(SET_INTERNAL_OPTION, data, &reply);
@@ -509,7 +529,7 @@ status_t BnOMX::onTransact(
case LIVES_LOCALLY:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void *)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
pid_t pid = (pid_t)data.readInt32();
reply->writeInt32(livesLocally(node, pid));
@@ -553,7 +573,7 @@ status_t BnOMX::onTransact(
status_t err = allocateNode(name, observer, &node);
reply->writeInt32(err);
if (err == OK) {
- reply->writeIntPtr((intptr_t)node);
+ reply->writeInt32((int32_t)node);
}
return NO_ERROR;
@@ -563,7 +583,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
reply->writeInt32(freeNode(node));
@@ -574,7 +594,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_COMMANDTYPE cmd =
static_cast<OMX_COMMANDTYPE>(data.readInt32());
@@ -593,10 +613,10 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32());
- size_t size = data.readInt32();
+ size_t size = data.readInt64();
void *params = malloc(size);
data.read(params, size);
@@ -644,7 +664,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_STATETYPE state = OMX_StateInvalid;
status_t err = getState(node, &state);
@@ -658,7 +678,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
@@ -672,7 +692,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_U32 usage = 0;
@@ -687,7 +707,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
sp<IMemory> params =
interface_cast<IMemory>(data.readStrongBinder());
@@ -697,7 +717,7 @@ status_t BnOMX::onTransact(
reply->writeInt32(err);
if (err == OK) {
- reply->writeIntPtr((intptr_t)buffer);
+ reply->writeInt32((int32_t)buffer);
}
return NO_ERROR;
@@ -707,7 +727,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
data.read(*graphicBuffer);
@@ -718,7 +738,7 @@ status_t BnOMX::onTransact(
reply->writeInt32(err);
if (err == OK) {
- reply->writeIntPtr((intptr_t)buffer);
+ reply->writeInt32((int32_t)buffer);
}
return NO_ERROR;
@@ -728,11 +748,11 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
data.read(*graphicBuffer);
- buffer_id buffer = (void*)data.readIntPtr();
+ buffer_id buffer = (buffer_id)data.readInt32();
status_t err = updateGraphicBufferInMeta(
node, port_index, graphicBuffer, buffer);
@@ -745,7 +765,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
sp<IGraphicBufferProducer> bufferProducer;
@@ -755,7 +775,7 @@ status_t BnOMX::onTransact(
reply->writeInt32(err);
if (err == OK) {
- reply->writeStrongBinder(bufferProducer->asBinder());
+ reply->writeStrongBinder(IInterface::asBinder(bufferProducer));
}
return NO_ERROR;
@@ -765,7 +785,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
status_t err = signalEndOfInputStream(node);
reply->writeInt32(err);
@@ -777,7 +797,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
@@ -791,7 +811,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
OMX_BOOL enable = (OMX_BOOL)data.readInt32();
OMX_U32 max_width = data.readInt32();
@@ -804,13 +824,31 @@ status_t BnOMX::onTransact(
return NO_ERROR;
}
+ case CONFIGURE_VIDEO_TUNNEL_MODE:
+ {
+ CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+ node_id node = (node_id)data.readInt32();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL tunneled = (OMX_BOOL)data.readInt32();
+ OMX_U32 audio_hw_sync = data.readInt32();
+
+ native_handle_t *sideband_handle;
+ status_t err = configureVideoTunnelMode(
+ node, port_index, tunneled, audio_hw_sync, &sideband_handle);
+ reply->writeInt32(err);
+ reply->writeNativeHandle(sideband_handle);
+
+ return NO_ERROR;
+ }
+
case ALLOC_BUFFER:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
- size_t size = data.readInt32();
+ size_t size = data.readInt64();
buffer_id buffer;
void *buffer_data;
@@ -819,8 +857,8 @@ status_t BnOMX::onTransact(
reply->writeInt32(err);
if (err == OK) {
- reply->writeIntPtr((intptr_t)buffer);
- reply->writeIntPtr((intptr_t)buffer_data);
+ reply->writeInt32((int32_t)buffer);
+ reply->writeInt64((uintptr_t)buffer_data);
}
return NO_ERROR;
@@ -830,7 +868,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
sp<IMemory> params =
interface_cast<IMemory>(data.readStrongBinder());
@@ -842,7 +880,7 @@ status_t BnOMX::onTransact(
reply->writeInt32(err);
if (err == OK) {
- reply->writeIntPtr((intptr_t)buffer);
+ reply->writeInt32((int32_t)buffer);
}
return NO_ERROR;
@@ -852,9 +890,9 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
- buffer_id buffer = (void*)data.readIntPtr();
+ buffer_id buffer = (buffer_id)data.readInt32();
reply->writeInt32(freeBuffer(node, port_index, buffer));
return NO_ERROR;
@@ -864,8 +902,8 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
- buffer_id buffer = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
+ buffer_id buffer = (buffer_id)data.readInt32();
reply->writeInt32(fillBuffer(node, buffer));
return NO_ERROR;
@@ -875,8 +913,8 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
- buffer_id buffer = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
+ buffer_id buffer = (buffer_id)data.readInt32();
OMX_U32 range_offset = data.readInt32();
OMX_U32 range_length = data.readInt32();
OMX_U32 flags = data.readInt32();
@@ -894,7 +932,7 @@ status_t BnOMX::onTransact(
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
- node_id node = (void*)data.readIntPtr();
+ node_id node = (node_id)data.readInt32();
const char *parameter_name = data.readCString();
OMX_INDEXTYPE index;
@@ -927,6 +965,8 @@ public:
data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor());
data.write(&msg, sizeof(msg));
+ ALOGV("onMessage writing message %d, size %zu", msg.type, sizeof(msg));
+
remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY);
}
};
@@ -943,6 +983,8 @@ status_t BnOMXObserver::onTransact(
omx_message msg;
data.read(&msg, sizeof(msg));
+ ALOGV("onTransact reading message %d, size %zu", msg.type, sizeof(msg));
+
// XXX Could use readInplace maybe?
onMessage(msg);
diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp
index 7190879..9d63bc9 100644
--- a/media/libmedia/IRemoteDisplayClient.cpp
+++ b/media/libmedia/IRemoteDisplayClient.cpp
@@ -42,7 +42,7 @@ public:
{
Parcel data, reply;
data.writeInterfaceToken(IRemoteDisplayClient::getInterfaceDescriptor());
- data.writeStrongBinder(bufferProducer->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(bufferProducer));
data.writeInt32(width);
data.writeInt32(height);
data.writeInt32(flags);
diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp
index 68ffca8..d480aef 100644
--- a/media/libmedia/IStreamSource.cpp
+++ b/media/libmedia/IStreamSource.cpp
@@ -55,16 +55,16 @@ struct BpStreamSource : public BpInterface<IStreamSource> {
virtual void setListener(const sp<IStreamListener> &listener) {
Parcel data, reply;
data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
- data.writeStrongBinder(listener->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(listener));
remote()->transact(SET_LISTENER, data, &reply);
}
virtual void setBuffers(const Vector<sp<IMemory> > &buffers) {
Parcel data, reply;
data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
- data.writeInt32(static_cast<int32_t>(buffers.size()));
+ data.writeInt64(static_cast<int64_t>(buffers.size()));
for (size_t i = 0; i < buffers.size(); ++i) {
- data.writeStrongBinder(buffers.itemAt(i)->asBinder());
+ data.writeStrongBinder(IInterface::asBinder(buffers.itemAt(i)));
}
remote()->transact(SET_BUFFERS, data, &reply);
}
@@ -72,7 +72,7 @@ struct BpStreamSource : public BpInterface<IStreamSource> {
virtual void onBufferAvailable(size_t index) {
Parcel data, reply;
data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor());
- data.writeInt32(static_cast<int32_t>(index));
+ data.writeInt64(static_cast<int64_t>(index));
remote()->transact(
ON_BUFFER_AVAILABLE, data, &reply, IBinder::FLAG_ONEWAY);
}
@@ -102,7 +102,7 @@ status_t BnStreamSource::onTransact(
case SET_BUFFERS:
{
CHECK_INTERFACE(IStreamSource, data, reply);
- size_t n = static_cast<size_t>(data.readInt32());
+ size_t n = static_cast<size_t>(data.readInt64());
Vector<sp<IMemory> > buffers;
for (size_t i = 0; i < n; ++i) {
sp<IMemory> mem =
@@ -117,7 +117,7 @@ status_t BnStreamSource::onTransact(
case ON_BUFFER_AVAILABLE:
{
CHECK_INTERFACE(IStreamSource, data, reply);
- onBufferAvailable(static_cast<size_t>(data.readInt32()));
+ onBufferAvailable(static_cast<size_t>(data.readInt64()));
break;
}
@@ -145,8 +145,8 @@ struct BpStreamListener : public BpInterface<IStreamListener> {
virtual void queueBuffer(size_t index, size_t size) {
Parcel data, reply;
data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor());
- data.writeInt32(static_cast<int32_t>(index));
- data.writeInt32(static_cast<int32_t>(size));
+ data.writeInt64(static_cast<int64_t>(index));
+ data.writeInt64(static_cast<int64_t>(size));
remote()->transact(QUEUE_BUFFER, data, &reply, IBinder::FLAG_ONEWAY);
}
@@ -177,8 +177,8 @@ status_t BnStreamListener::onTransact(
case QUEUE_BUFFER:
{
CHECK_INTERFACE(IStreamListener, data, reply);
- size_t index = static_cast<size_t>(data.readInt32());
- size_t size = static_cast<size_t>(data.readInt32());
+ size_t index = static_cast<size_t>(data.readInt64());
+ size_t size = static_cast<size_t>(data.readInt64());
queueBuffer(index, size);
break;
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index e914b34..721d8d7 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -36,7 +36,6 @@ JetPlayer::JetPlayer(void *javaJetPlayer, int maxTracks, int trackBufferSize) :
mPaused(false),
mMaxTracks(maxTracks),
mEasData(NULL),
- mEasJetFileLoc(NULL),
mTrackBufferSize(trackBufferSize)
{
ALOGV("JetPlayer constructor");
@@ -90,7 +89,7 @@ int JetPlayer::init()
pLibConfig->sampleRate,
AUDIO_FORMAT_PCM_16_BIT,
audio_channel_out_mask_from_count(pLibConfig->numChannels),
- mTrackBufferSize,
+ (size_t) mTrackBufferSize,
AUDIO_OUTPUT_FLAG_NONE);
// create render and playback thread
@@ -133,10 +132,7 @@ int JetPlayer::release()
JET_Shutdown(mEasData);
EAS_Shutdown(mEasData);
}
- if (mEasJetFileLoc) {
- free(mEasJetFileLoc);
- mEasJetFileLoc = NULL;
- }
+ mIoWrapper.clear();
if (mAudioTrack != 0) {
mAudioTrack->stop();
mAudioTrack->flush();
@@ -327,16 +323,9 @@ int JetPlayer::loadFromFile(const char* path)
Mutex::Autolock lock(mMutex);
- mEasJetFileLoc = (EAS_FILE_LOCATOR) malloc(sizeof(EAS_FILE));
- strncpy(mJetFilePath, path, sizeof(mJetFilePath));
- mJetFilePath[sizeof(mJetFilePath) - 1] = '\0';
- mEasJetFileLoc->path = mJetFilePath;
-
- mEasJetFileLoc->fd = 0;
- mEasJetFileLoc->length = 0;
- mEasJetFileLoc->offset = 0;
+ mIoWrapper = new MidiIoWrapper(path);
- EAS_RESULT result = JET_OpenFile(mEasData, mEasJetFileLoc);
+ EAS_RESULT result = JET_OpenFile(mEasData, mIoWrapper->getLocator());
if (result != EAS_SUCCESS)
mState = EAS_STATE_ERROR;
else
@@ -352,13 +341,9 @@ int JetPlayer::loadFromFD(const int fd, const long long offset, const long long
Mutex::Autolock lock(mMutex);
- mEasJetFileLoc = (EAS_FILE_LOCATOR) malloc(sizeof(EAS_FILE));
- mEasJetFileLoc->fd = fd;
- mEasJetFileLoc->offset = offset;
- mEasJetFileLoc->length = length;
- mEasJetFileLoc->path = NULL;
+ mIoWrapper = new MidiIoWrapper(fd, offset, length);
- EAS_RESULT result = JET_OpenFile(mEasData, mEasJetFileLoc);
+ EAS_RESULT result = JET_OpenFile(mEasData, mIoWrapper->getLocator());
if (result != EAS_SUCCESS)
mState = EAS_STATE_ERROR;
else
@@ -459,7 +444,6 @@ int JetPlayer::clearQueue()
//-------------------------------------------------------------------------------------------------
void JetPlayer::dump()
{
- ALOGE("JetPlayer dump: JET file=%s", mEasJetFileLoc->path);
}
void JetPlayer::dumpJetStatus(S_JET_STATUS* pJetStatus)
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
new file mode 100644
index 0000000..7b4c4e2
--- /dev/null
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -0,0 +1,266 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecInfo"
+#include <utils/Log.h>
+
+#include <media/IOMX.h>
+
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <binder/Parcel.h>
+
+#include <media/stagefright/OMXCodec.h>
+
+namespace android {
+
+void MediaCodecInfo::Capabilities::getSupportedProfileLevels(
+ Vector<ProfileLevel> *profileLevels) const {
+ profileLevels->clear();
+ profileLevels->appendVector(mProfileLevels);
+}
+
+void MediaCodecInfo::Capabilities::getSupportedColorFormats(
+ Vector<uint32_t> *colorFormats) const {
+ colorFormats->clear();
+ colorFormats->appendVector(mColorFormats);
+}
+
+uint32_t MediaCodecInfo::Capabilities::getFlags() const {
+ return mFlags;
+}
+
+const sp<AMessage> MediaCodecInfo::Capabilities::getDetails() const {
+ return mDetails;
+}
+
+MediaCodecInfo::Capabilities::Capabilities()
+ : mFlags(0) {
+ mDetails = new AMessage;
+}
+
+// static
+sp<MediaCodecInfo::Capabilities> MediaCodecInfo::Capabilities::FromParcel(
+ const Parcel &parcel) {
+ sp<MediaCodecInfo::Capabilities> caps = new Capabilities();
+ size_t size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ ProfileLevel profileLevel;
+ profileLevel.mProfile = static_cast<uint32_t>(parcel.readInt32());
+ profileLevel.mLevel = static_cast<uint32_t>(parcel.readInt32());
+ if (caps != NULL) {
+ caps->mProfileLevels.push_back(profileLevel);
+ }
+ }
+ size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ uint32_t color = static_cast<uint32_t>(parcel.readInt32());
+ if (caps != NULL) {
+ caps->mColorFormats.push_back(color);
+ }
+ }
+ uint32_t flags = static_cast<uint32_t>(parcel.readInt32());
+ sp<AMessage> details = AMessage::FromParcel(parcel);
+ if (caps != NULL) {
+ caps->mFlags = flags;
+ caps->mDetails = details;
+ }
+ return caps;
+}
+
+status_t MediaCodecInfo::Capabilities::writeToParcel(Parcel *parcel) const {
+ CHECK_LE(mProfileLevels.size(), INT32_MAX);
+ parcel->writeInt32(mProfileLevels.size());
+ for (size_t i = 0; i < mProfileLevels.size(); i++) {
+ parcel->writeInt32(mProfileLevels.itemAt(i).mProfile);
+ parcel->writeInt32(mProfileLevels.itemAt(i).mLevel);
+ }
+ CHECK_LE(mColorFormats.size(), INT32_MAX);
+ parcel->writeInt32(mColorFormats.size());
+ for (size_t i = 0; i < mColorFormats.size(); i++) {
+ parcel->writeInt32(mColorFormats.itemAt(i));
+ }
+ parcel->writeInt32(mFlags);
+ mDetails->writeToParcel(parcel);
+ return OK;
+}
+
+bool MediaCodecInfo::isEncoder() const {
+ return mIsEncoder;
+}
+
+bool MediaCodecInfo::hasQuirk(const char *name) const {
+ for (size_t ix = 0; ix < mQuirks.size(); ix++) {
+ if (mQuirks.itemAt(ix).equalsIgnoreCase(name)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void MediaCodecInfo::getSupportedMimes(Vector<AString> *mimes) const {
+ mimes->clear();
+ for (size_t ix = 0; ix < mCaps.size(); ix++) {
+ mimes->push_back(mCaps.keyAt(ix));
+ }
+}
+
+const sp<MediaCodecInfo::Capabilities>
+MediaCodecInfo::getCapabilitiesFor(const char *mime) const {
+ ssize_t ix = getCapabilityIndex(mime);
+ if (ix >= 0) {
+ return mCaps.valueAt(ix);
+ }
+ return NULL;
+}
+
+const char *MediaCodecInfo::getCodecName() const {
+ return mName.c_str();
+}
+
+// static
+sp<MediaCodecInfo> MediaCodecInfo::FromParcel(const Parcel &parcel) {
+ AString name = AString::FromParcel(parcel);
+ bool isEncoder = static_cast<bool>(parcel.readInt32());
+ sp<MediaCodecInfo> info = new MediaCodecInfo(name, isEncoder, NULL);
+ size_t size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ AString quirk = AString::FromParcel(parcel);
+ if (info != NULL) {
+ info->mQuirks.push_back(quirk);
+ }
+ }
+ size = static_cast<size_t>(parcel.readInt32());
+ for (size_t i = 0; i < size; i++) {
+ AString mime = AString::FromParcel(parcel);
+ sp<Capabilities> caps = Capabilities::FromParcel(parcel);
+ if (info != NULL) {
+ info->mCaps.add(mime, caps);
+ }
+ }
+ return info;
+}
+
+status_t MediaCodecInfo::writeToParcel(Parcel *parcel) const {
+ mName.writeToParcel(parcel);
+ parcel->writeInt32(mIsEncoder);
+ parcel->writeInt32(mQuirks.size());
+ for (size_t i = 0; i < mQuirks.size(); i++) {
+ mQuirks.itemAt(i).writeToParcel(parcel);
+ }
+ parcel->writeInt32(mCaps.size());
+ for (size_t i = 0; i < mCaps.size(); i++) {
+ mCaps.keyAt(i).writeToParcel(parcel);
+ mCaps.valueAt(i)->writeToParcel(parcel);
+ }
+ return OK;
+}
+
+ssize_t MediaCodecInfo::getCapabilityIndex(const char *mime) const {
+ for (size_t ix = 0; ix < mCaps.size(); ix++) {
+ if (mCaps.keyAt(ix).equalsIgnoreCase(mime)) {
+ return ix;
+ }
+ }
+ return -1;
+}
+
+MediaCodecInfo::MediaCodecInfo(AString name, bool encoder, const char *mime)
+ : mName(name),
+ mIsEncoder(encoder),
+ mHasSoleMime(false) {
+ if (mime != NULL) {
+ addMime(mime);
+ mHasSoleMime = true;
+ }
+}
+
+status_t MediaCodecInfo::addMime(const char *mime) {
+ if (mHasSoleMime) {
+ ALOGE("Codec '%s' already had its type specified", mName.c_str());
+ return -EINVAL;
+ }
+ ssize_t ix = getCapabilityIndex(mime);
+ if (ix >= 0) {
+ mCurrentCaps = mCaps.valueAt(ix);
+ } else {
+ mCurrentCaps = new Capabilities();
+ mCaps.add(AString(mime), mCurrentCaps);
+ }
+ return OK;
+}
+
+void MediaCodecInfo::removeMime(const char *mime) {
+ ssize_t ix = getCapabilityIndex(mime);
+ if (ix >= 0) {
+ mCaps.removeItemsAt(ix);
+ // mCurrentCaps will be removed when completed
+ }
+}
+
+status_t MediaCodecInfo::initializeCapabilities(const CodecCapabilities &caps) {
+ mCurrentCaps->mProfileLevels.clear();
+ mCurrentCaps->mColorFormats.clear();
+
+ for (size_t i = 0; i < caps.mProfileLevels.size(); ++i) {
+ const CodecProfileLevel &src = caps.mProfileLevels.itemAt(i);
+
+ ProfileLevel profileLevel;
+ profileLevel.mProfile = src.mProfile;
+ profileLevel.mLevel = src.mLevel;
+ mCurrentCaps->mProfileLevels.push_back(profileLevel);
+ }
+
+ for (size_t i = 0; i < caps.mColorFormats.size(); ++i) {
+ mCurrentCaps->mColorFormats.push_back(caps.mColorFormats.itemAt(i));
+ }
+
+ mCurrentCaps->mFlags = caps.mFlags;
+ mCurrentCaps->mDetails = new AMessage;
+
+ return OK;
+}
+
+void MediaCodecInfo::addQuirk(const char *name) {
+ if (!hasQuirk(name)) {
+ mQuirks.push(name);
+ }
+}
+
+void MediaCodecInfo::complete() {
+ mCurrentCaps = NULL;
+}
+
+void MediaCodecInfo::addDetail(const AString &key, const AString &value) {
+ mCurrentCaps->mDetails->setString(key.c_str(), value.c_str());
+}
+
+void MediaCodecInfo::addFeature(const AString &key, int32_t value) {
+ AString tag = "feature-";
+ tag.append(key);
+ mCurrentCaps->mDetails->setInt32(tag.c_str(), value);
+}
+
+void MediaCodecInfo::addFeature(const AString &key, const char *value) {
+ AString tag = "feature-";
+ tag.append(key);
+ mCurrentCaps->mDetails->setString(tag.c_str(), value);
+}
+
+} // namespace android
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 8319cd7..e2e6042 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -69,6 +69,7 @@ const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
{"480p", CAMCORDER_QUALITY_480P},
{"720p", CAMCORDER_QUALITY_720P},
{"1080p", CAMCORDER_QUALITY_1080P},
+ {"2160p", CAMCORDER_QUALITY_2160P},
{"qvga", CAMCORDER_QUALITY_QVGA},
{"timelapselow", CAMCORDER_QUALITY_TIME_LAPSE_LOW},
@@ -78,11 +79,25 @@ const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
{"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
{"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
{"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P},
+ {"timelapse2160p", CAMCORDER_QUALITY_TIME_LAPSE_2160P},
{"timelapseqvga", CAMCORDER_QUALITY_TIME_LAPSE_QVGA},
+
+ {"highspeedlow", CAMCORDER_QUALITY_HIGH_SPEED_LOW},
+ {"highspeedhigh", CAMCORDER_QUALITY_HIGH_SPEED_HIGH},
+ {"highspeed480p", CAMCORDER_QUALITY_HIGH_SPEED_480P},
+ {"highspeed720p", CAMCORDER_QUALITY_HIGH_SPEED_720P},
+ {"highspeed1080p", CAMCORDER_QUALITY_HIGH_SPEED_1080P},
+ {"highspeed2160p", CAMCORDER_QUALITY_HIGH_SPEED_2160P},
};
+#if LOG_NDEBUG
+#define UNUSED __unused
+#else
+#define UNUSED
+#endif
+
/*static*/ void
-MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec)
+MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec UNUSED)
{
ALOGV("video codec:");
ALOGV("codec = %d", codec.mCodec);
@@ -93,7 +108,7 @@ MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec)
}
/*static*/ void
-MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec)
+MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec UNUSED)
{
ALOGV("audio codec:");
ALOGV("codec = %d", codec.mCodec);
@@ -103,7 +118,7 @@ MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec)
}
/*static*/ void
-MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap)
+MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap UNUSED)
{
ALOGV("video encoder cap:");
ALOGV("codec = %d", cap.mCodec);
@@ -114,7 +129,7 @@ MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap)
}
/*static*/ void
-MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap)
+MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap UNUSED)
{
ALOGV("audio encoder cap:");
ALOGV("codec = %d", cap.mCodec);
@@ -124,21 +139,21 @@ MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap)
}
/*static*/ void
-MediaProfiles::logVideoDecoderCap(const MediaProfiles::VideoDecoderCap& cap)
+MediaProfiles::logVideoDecoderCap(const MediaProfiles::VideoDecoderCap& cap UNUSED)
{
ALOGV("video decoder cap:");
ALOGV("codec = %d", cap.mCodec);
}
/*static*/ void
-MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap)
+MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap UNUSED)
{
ALOGV("audio codec cap:");
ALOGV("codec = %d", cap.mCodec);
}
/*static*/ void
-MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap)
+MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED)
{
ALOGV("videoeditor cap:");
ALOGV("mMaxInputFrameWidth = %d", cap.mMaxInputFrameWidth);
@@ -466,8 +481,13 @@ static bool isTimelapseProfile(camcorder_quality quality) {
quality <= CAMCORDER_QUALITY_TIME_LAPSE_LIST_END;
}
+static bool isHighSpeedProfile(camcorder_quality quality) {
+ return quality >= CAMCORDER_QUALITY_HIGH_SPEED_LIST_START &&
+ quality <= CAMCORDER_QUALITY_HIGH_SPEED_LIST_END;
+}
+
void MediaProfiles::initRequiredProfileRefs(const Vector<int>& cameraIds) {
- ALOGV("Number of camera ids: %d", cameraIds.size());
+ ALOGV("Number of camera ids: %zu", cameraIds.size());
CHECK(cameraIds.size() > 0);
mRequiredProfileRefs = new RequiredProfiles[cameraIds.size()];
for (size_t i = 0, n = cameraIds.size(); i < n; ++i) {
@@ -513,14 +533,17 @@ void MediaProfiles::checkAndAddRequiredProfilesIfNecessary() {
camcorder_quality refQuality;
VideoCodec *codec = NULL;
- // Check high and low from either camcorder profile or timelapse profile
- // but not both. Default, check camcorder profile
+ // Check high and low from either camcorder profile, timelapse profile
+ // or high speed profile, but not all of them. Default, check camcorder profile
size_t j = 0;
size_t o = 2;
if (isTimelapseProfile(quality)) {
// Check timelapse profile instead.
j = 2;
o = kNumRequiredProfiles;
+ } else if (isHighSpeedProfile(quality)) {
+ // Skip the check for high speed profile.
+ continue;
} else {
// Must be camcorder profile.
CHECK(isCamcorderProfile(quality));
@@ -594,14 +617,14 @@ void MediaProfiles::checkAndAddRequiredProfilesIfNecessary() {
int index = getCamcorderProfileIndex(cameraId, profile->mQuality);
if (index != -1) {
- ALOGV("Profile quality %d for camera %d already exists",
+ ALOGV("Profile quality %d for camera %zu already exists",
profile->mQuality, cameraId);
CHECK(index == refIndex);
continue;
}
// Insert the new profile
- ALOGV("Add a profile: quality %d=>%d for camera %d",
+ ALOGV("Add a profile: quality %d=>%d for camera %zu",
mCamcorderProfiles[info->mRefProfileIndex]->mQuality,
profile->mQuality, cameraId);
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index 28b5aa7..dcbb769 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -237,4 +237,24 @@ MediaScanResult MediaScanner::doProcessDirectoryEntry(
return MEDIA_SCAN_RESULT_OK;
}
+MediaAlbumArt *MediaAlbumArt::clone() {
+ size_t byte_size = this->size() + sizeof(MediaAlbumArt);
+ MediaAlbumArt *result = reinterpret_cast<MediaAlbumArt *>(malloc(byte_size));
+ result->mSize = this->size();
+ memcpy(&result->mData[0], &this->mData[0], this->size());
+ return result;
+}
+
+void MediaAlbumArt::init(MediaAlbumArt *instance, int32_t dataSize, const void *data) {
+ instance->mSize = dataSize;
+ memcpy(&instance->mData[0], data, dataSize);
+}
+
+MediaAlbumArt *MediaAlbumArt::fromData(int32_t dataSize, const void* data) {
+ size_t byte_size = sizeof(MediaAlbumArt) + dataSize;
+ MediaAlbumArt *result = reinterpret_cast<MediaAlbumArt *>(malloc(byte_size));
+ init(result, dataSize, data);
+ return result;
+}
+
} // namespace android
diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp
index 93a4a4c..9f803cb 100644
--- a/media/libmedia/MediaScannerClient.cpp
+++ b/media/libmedia/MediaScannerClient.cpp
@@ -14,217 +14,38 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaScannerClient"
+#include <utils/Log.h>
+
#include <media/mediascanner.h>
+#include "CharacterEncodingDetector.h"
#include "StringArray.h"
-#include "autodetect.h"
-#include "unicode/ucnv.h"
-#include "unicode/ustring.h"
-
namespace android {
-MediaScannerClient::MediaScannerClient()
- : mNames(NULL),
- mValues(NULL),
- mLocaleEncoding(kEncodingNone)
-{
+MediaScannerClient::MediaScannerClient() {
}
-MediaScannerClient::~MediaScannerClient()
-{
- delete mNames;
- delete mValues;
+MediaScannerClient::~MediaScannerClient() {
}
void MediaScannerClient::setLocale(const char* locale)
{
- if (!locale) return;
-
- if (!strncmp(locale, "ja", 2))
- mLocaleEncoding = kEncodingShiftJIS;
- else if (!strncmp(locale, "ko", 2))
- mLocaleEncoding = kEncodingEUCKR;
- else if (!strncmp(locale, "zh", 2)) {
- if (!strcmp(locale, "zh_CN")) {
- // simplified chinese for mainland China
- mLocaleEncoding = kEncodingGBK;
- } else {
- // assume traditional for non-mainland Chinese locales (Taiwan, Hong Kong, Singapore)
- mLocaleEncoding = kEncodingBig5;
- }
- }
+ mLocale = locale; // not currently used
}
-void MediaScannerClient::beginFile()
-{
- mNames = new StringArray;
- mValues = new StringArray;
+void MediaScannerClient::beginFile() {
}
status_t MediaScannerClient::addStringTag(const char* name, const char* value)
{
- if (mLocaleEncoding != kEncodingNone) {
- // don't bother caching strings that are all ASCII.
- // call handleStringTag directly instead.
- // check to see if value (which should be utf8) has any non-ASCII characters
- bool nonAscii = false;
- const char* chp = value;
- char ch;
- while ((ch = *chp++)) {
- if (ch & 0x80) {
- nonAscii = true;
- break;
- }
- }
-
- if (nonAscii) {
- // save the strings for later so they can be used for native encoding detection
- mNames->push_back(name);
- mValues->push_back(value);
- return OK;
- }
- // else fall through
- }
-
- // autodetection is not necessary, so no need to cache the values
- // pass directly to the client instead
- return handleStringTag(name, value);
-}
-
-static uint32_t possibleEncodings(const char* s)
-{
- uint32_t result = kEncodingAll;
- // if s contains a native encoding, then it was mistakenly encoded in utf8 as if it were latin-1
- // so we need to reverse the latin-1 -> utf8 conversion to get the native chars back
- uint8_t ch1, ch2;
- uint8_t* chp = (uint8_t *)s;
-
- while ((ch1 = *chp++)) {
- if (ch1 & 0x80) {
- ch2 = *chp++;
- ch1 = ((ch1 << 6) & 0xC0) | (ch2 & 0x3F);
- // ch1 is now the first byte of the potential native char
-
- ch2 = *chp++;
- if (ch2 & 0x80)
- ch2 = ((ch2 << 6) & 0xC0) | (*chp++ & 0x3F);
- // ch2 is now the second byte of the potential native char
- int ch = (int)ch1 << 8 | (int)ch2;
- result &= findPossibleEncodings(ch);
- }
- // else ASCII character, which could be anything
- }
-
- return result;
-}
-
-void MediaScannerClient::convertValues(uint32_t encoding)
-{
- const char* enc = NULL;
- switch (encoding) {
- case kEncodingShiftJIS:
- enc = "shift-jis";
- break;
- case kEncodingGBK:
- enc = "gbk";
- break;
- case kEncodingBig5:
- enc = "Big5";
- break;
- case kEncodingEUCKR:
- enc = "EUC-KR";
- break;
- }
-
- if (enc) {
- UErrorCode status = U_ZERO_ERROR;
-
- UConverter *conv = ucnv_open(enc, &status);
- if (U_FAILURE(status)) {
- ALOGE("could not create UConverter for %s", enc);
- return;
- }
- UConverter *utf8Conv = ucnv_open("UTF-8", &status);
- if (U_FAILURE(status)) {
- ALOGE("could not create UConverter for UTF-8");
- ucnv_close(conv);
- return;
- }
-
- // for each value string, convert from native encoding to UTF-8
- for (int i = 0; i < mNames->size(); i++) {
- // first we need to untangle the utf8 and convert it back to the original bytes
- // since we are reducing the length of the string, we can do this in place
- uint8_t* src = (uint8_t *)mValues->getEntry(i);
- int len = strlen((char *)src);
- uint8_t* dest = src;
-
- uint8_t uch;
- while ((uch = *src++)) {
- if (uch & 0x80)
- *dest++ = ((uch << 6) & 0xC0) | (*src++ & 0x3F);
- else
- *dest++ = uch;
- }
- *dest = 0;
-
- // now convert from native encoding to UTF-8
- const char* source = mValues->getEntry(i);
- int targetLength = len * 3 + 1;
- char* buffer = new char[targetLength];
- // don't normally check for NULL, but in this case targetLength may be large
- if (!buffer)
- break;
- char* target = buffer;
-
- ucnv_convertEx(utf8Conv, conv, &target, target + targetLength,
- &source, (const char *)dest, NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
- if (U_FAILURE(status)) {
- ALOGE("ucnv_convertEx failed: %d", status);
- mValues->setEntry(i, "???");
- } else {
- // zero terminate
- *target = 0;
- mValues->setEntry(i, buffer);
- }
-
- delete[] buffer;
- }
-
- ucnv_close(conv);
- ucnv_close(utf8Conv);
- }
+ handleStringTag(name, value);
+ return OK;
}
-void MediaScannerClient::endFile()
-{
- if (mLocaleEncoding != kEncodingNone) {
- int size = mNames->size();
- uint32_t encoding = kEncodingAll;
-
- // compute a bit mask containing all possible encodings
- for (int i = 0; i < mNames->size(); i++)
- encoding &= possibleEncodings(mValues->getEntry(i));
-
- // if the locale encoding matches, then assume we have a native encoding.
- if (encoding & mLocaleEncoding)
- convertValues(mLocaleEncoding);
-
- // finally, push all name/value pairs to the client
- for (int i = 0; i < mNames->size(); i++) {
- status_t status = handleStringTag(mNames->getEntry(i), mValues->getEntry(i));
- if (status) {
- break;
- }
- }
- }
- // else addStringTag() has done all the work so we have nothing to do
-
- delete mNames;
- delete mValues;
- mNames = NULL;
- mValues = NULL;
+void MediaScannerClient::endFile() {
}
} // namespace android
diff --git a/media/libmedia/MemoryLeakTrackUtil.cpp b/media/libmedia/MemoryLeakTrackUtil.cpp
index f004ca4..d31f721 100644
--- a/media/libmedia/MemoryLeakTrackUtil.cpp
+++ b/media/libmedia/MemoryLeakTrackUtil.cpp
@@ -17,6 +17,8 @@
#include <media/MemoryLeakTrackUtil.h>
#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
#include <sys/types.h>
#include <unistd.h>
diff --git a/media/libmedia/MidiIoWrapper.cpp b/media/libmedia/MidiIoWrapper.cpp
new file mode 100644
index 0000000..5197ce2
--- /dev/null
+++ b/media/libmedia/MidiIoWrapper.cpp
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MidiIoWrapper"
+#include <utils/Log.h>
+#include <utils/RefBase.h>
+
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#include "media/MidiIoWrapper.h"
+
+static int readAt(void *handle, void *buffer, int pos, int size) {
+ return ((android::MidiIoWrapper*)handle)->readAt(buffer, pos, size);
+}
+static int size(void *handle) {
+ return ((android::MidiIoWrapper*)handle)->size();
+}
+
+namespace android {
+
+MidiIoWrapper::MidiIoWrapper(const char *path) {
+ ALOGV("MidiIoWrapper(%s)", path);
+ mFd = open(path, O_RDONLY | O_LARGEFILE);
+ mBase = 0;
+ mLength = lseek(mFd, 0, SEEK_END);
+}
+
+MidiIoWrapper::MidiIoWrapper(int fd, off64_t offset, int64_t size) {
+ ALOGV("MidiIoWrapper(fd=%d)", fd);
+ mFd = dup(fd);
+ mBase = offset;
+ mLength = size;
+}
+
+MidiIoWrapper::MidiIoWrapper(const sp<DataSource> &source) {
+ ALOGV("MidiIoWrapper(DataSource)");
+ mFd = -1;
+ mDataSource = source;
+ off64_t l;
+ if (mDataSource->getSize(&l) == OK) {
+ mLength = l;
+ } else {
+ mLength = 0;
+ }
+}
+
+MidiIoWrapper::~MidiIoWrapper() {
+ ALOGV("~MidiIoWrapper");
+ close(mFd);
+}
+
+int MidiIoWrapper::readAt(void *buffer, int offset, int size) {
+ ALOGV("readAt(%p, %d, %d)", buffer, offset, size);
+
+ if (mDataSource != NULL) {
+ return mDataSource->readAt(offset, buffer, size);
+ }
+ lseek(mFd, mBase + offset, SEEK_SET);
+ if (offset + size > mLength) {
+ size = mLength - offset;
+ }
+ return read(mFd, buffer, size);
+}
+
+int MidiIoWrapper::size() {
+ ALOGV("size() = %d", int(mLength));
+ return mLength;
+}
+
+EAS_FILE_LOCATOR MidiIoWrapper::getLocator() {
+ mEasFile.handle = this;
+ mEasFile.readAt = ::readAt;
+ mEasFile.size = ::size;
+ return &mEasFile;
+}
+
+} // namespace android
diff --git a/media/libmedia/SingleStateQueue.cpp b/media/libmedia/SingleStateQueue.cpp
index 3503baa..c241184 100644
--- a/media/libmedia/SingleStateQueue.cpp
+++ b/media/libmedia/SingleStateQueue.cpp
@@ -16,7 +16,6 @@
#include <new>
#include <cutils/atomic.h>
-#include <cutils/atomic-inline.h> // for android_memory_barrier()
#include <media/SingleStateQueue.h>
namespace android {
diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp
deleted file mode 100644
index 22e9fad..0000000
--- a/media/libmedia/SoundPool.cpp
+++ /dev/null
@@ -1,909 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoundPool"
-#include <utils/Log.h>
-
-#define USE_SHARED_MEM_BUFFER
-
-#include <media/AudioTrack.h>
-#include <media/mediaplayer.h>
-#include <media/SoundPool.h>
-#include "SoundPoolThread.h"
-
-namespace android
-{
-
-int kDefaultBufferCount = 4;
-uint32_t kMaxSampleRate = 48000;
-uint32_t kDefaultSampleRate = 44100;
-uint32_t kDefaultFrameCount = 1200;
-size_t kDefaultHeapSize = 1024 * 1024; // 1MB
-
-
-SoundPool::SoundPool(int maxChannels, audio_stream_type_t streamType, int srcQuality)
-{
- ALOGV("SoundPool constructor: maxChannels=%d, streamType=%d, srcQuality=%d",
- maxChannels, streamType, srcQuality);
-
- // check limits
- mMaxChannels = maxChannels;
- if (mMaxChannels < 1) {
- mMaxChannels = 1;
- }
- else if (mMaxChannels > 32) {
- mMaxChannels = 32;
- }
- ALOGW_IF(maxChannels != mMaxChannels, "App requested %d channels", maxChannels);
-
- mQuit = false;
- mDecodeThread = 0;
- mStreamType = streamType;
- mSrcQuality = srcQuality;
- mAllocated = 0;
- mNextSampleID = 0;
- mNextChannelID = 0;
-
- mCallback = 0;
- mUserData = 0;
-
- mChannelPool = new SoundChannel[mMaxChannels];
- for (int i = 0; i < mMaxChannels; ++i) {
- mChannelPool[i].init(this);
- mChannels.push_back(&mChannelPool[i]);
- }
-
- // start decode thread
- startThreads();
-}
-
-SoundPool::~SoundPool()
-{
- ALOGV("SoundPool destructor");
- mDecodeThread->quit();
- quit();
-
- Mutex::Autolock lock(&mLock);
-
- mChannels.clear();
- if (mChannelPool)
- delete [] mChannelPool;
- // clean up samples
- ALOGV("clear samples");
- mSamples.clear();
-
- if (mDecodeThread)
- delete mDecodeThread;
-}
-
-void SoundPool::addToRestartList(SoundChannel* channel)
-{
- Mutex::Autolock lock(&mRestartLock);
- if (!mQuit) {
- mRestart.push_back(channel);
- mCondition.signal();
- }
-}
-
-void SoundPool::addToStopList(SoundChannel* channel)
-{
- Mutex::Autolock lock(&mRestartLock);
- if (!mQuit) {
- mStop.push_back(channel);
- mCondition.signal();
- }
-}
-
-int SoundPool::beginThread(void* arg)
-{
- SoundPool* p = (SoundPool*)arg;
- return p->run();
-}
-
-int SoundPool::run()
-{
- mRestartLock.lock();
- while (!mQuit) {
- mCondition.wait(mRestartLock);
- ALOGV("awake");
- if (mQuit) break;
-
- while (!mStop.empty()) {
- SoundChannel* channel;
- ALOGV("Getting channel from stop list");
- List<SoundChannel* >::iterator iter = mStop.begin();
- channel = *iter;
- mStop.erase(iter);
- mRestartLock.unlock();
- if (channel != 0) {
- Mutex::Autolock lock(&mLock);
- channel->stop();
- }
- mRestartLock.lock();
- if (mQuit) break;
- }
-
- while (!mRestart.empty()) {
- SoundChannel* channel;
- ALOGV("Getting channel from list");
- List<SoundChannel*>::iterator iter = mRestart.begin();
- channel = *iter;
- mRestart.erase(iter);
- mRestartLock.unlock();
- if (channel != 0) {
- Mutex::Autolock lock(&mLock);
- channel->nextEvent();
- }
- mRestartLock.lock();
- if (mQuit) break;
- }
- }
-
- mStop.clear();
- mRestart.clear();
- mCondition.signal();
- mRestartLock.unlock();
- ALOGV("goodbye");
- return 0;
-}
-
-void SoundPool::quit()
-{
- mRestartLock.lock();
- mQuit = true;
- mCondition.signal();
- mCondition.wait(mRestartLock);
- ALOGV("return from quit");
- mRestartLock.unlock();
-}
-
-bool SoundPool::startThreads()
-{
- createThreadEtc(beginThread, this, "SoundPool");
- if (mDecodeThread == NULL)
- mDecodeThread = new SoundPoolThread(this);
- return mDecodeThread != NULL;
-}
-
-SoundChannel* SoundPool::findChannel(int channelID)
-{
- for (int i = 0; i < mMaxChannels; ++i) {
- if (mChannelPool[i].channelID() == channelID) {
- return &mChannelPool[i];
- }
- }
- return NULL;
-}
-
-SoundChannel* SoundPool::findNextChannel(int channelID)
-{
- for (int i = 0; i < mMaxChannels; ++i) {
- if (mChannelPool[i].nextChannelID() == channelID) {
- return &mChannelPool[i];
- }
- }
- return NULL;
-}
-
-int SoundPool::load(const char* path, int priority)
-{
- ALOGV("load: path=%s, priority=%d", path, priority);
- Mutex::Autolock lock(&mLock);
- sp<Sample> sample = new Sample(++mNextSampleID, path);
- mSamples.add(sample->sampleID(), sample);
- doLoad(sample);
- return sample->sampleID();
-}
-
-int SoundPool::load(int fd, int64_t offset, int64_t length, int priority)
-{
- ALOGV("load: fd=%d, offset=%lld, length=%lld, priority=%d",
- fd, offset, length, priority);
- Mutex::Autolock lock(&mLock);
- sp<Sample> sample = new Sample(++mNextSampleID, fd, offset, length);
- mSamples.add(sample->sampleID(), sample);
- doLoad(sample);
- return sample->sampleID();
-}
-
-void SoundPool::doLoad(sp<Sample>& sample)
-{
- ALOGV("doLoad: loading sample sampleID=%d", sample->sampleID());
- sample->startLoad();
- mDecodeThread->loadSample(sample->sampleID());
-}
-
-bool SoundPool::unload(int sampleID)
-{
- ALOGV("unload: sampleID=%d", sampleID);
- Mutex::Autolock lock(&mLock);
- return mSamples.removeItem(sampleID);
-}
-
-int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
- int priority, int loop, float rate)
-{
- ALOGV("play sampleID=%d, leftVolume=%f, rightVolume=%f, priority=%d, loop=%d, rate=%f",
- sampleID, leftVolume, rightVolume, priority, loop, rate);
- sp<Sample> sample;
- SoundChannel* channel;
- int channelID;
-
- Mutex::Autolock lock(&mLock);
-
- if (mQuit) {
- return 0;
- }
- // is sample ready?
- sample = findSample(sampleID);
- if ((sample == 0) || (sample->state() != Sample::READY)) {
- ALOGW(" sample %d not READY", sampleID);
- return 0;
- }
-
- dump();
-
- // allocate a channel
- channel = allocateChannel_l(priority);
-
- // no channel allocated - return 0
- if (!channel) {
- ALOGV("No channel allocated");
- return 0;
- }
-
- channelID = ++mNextChannelID;
-
- ALOGV("play channel %p state = %d", channel, channel->state());
- channel->play(sample, channelID, leftVolume, rightVolume, priority, loop, rate);
- return channelID;
-}
-
-SoundChannel* SoundPool::allocateChannel_l(int priority)
-{
- List<SoundChannel*>::iterator iter;
- SoundChannel* channel = NULL;
-
- // allocate a channel
- if (!mChannels.empty()) {
- iter = mChannels.begin();
- if (priority >= (*iter)->priority()) {
- channel = *iter;
- mChannels.erase(iter);
- ALOGV("Allocated active channel");
- }
- }
-
- // update priority and put it back in the list
- if (channel) {
- channel->setPriority(priority);
- for (iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
- if (priority < (*iter)->priority()) {
- break;
- }
- }
- mChannels.insert(iter, channel);
- }
- return channel;
-}
-
-// move a channel from its current position to the front of the list
-void SoundPool::moveToFront_l(SoundChannel* channel)
-{
- for (List<SoundChannel*>::iterator iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
- if (*iter == channel) {
- mChannels.erase(iter);
- mChannels.push_front(channel);
- break;
- }
- }
-}
-
-void SoundPool::pause(int channelID)
-{
- ALOGV("pause(%d)", channelID);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->pause();
- }
-}
-
-void SoundPool::autoPause()
-{
- ALOGV("autoPause()");
- Mutex::Autolock lock(&mLock);
- for (int i = 0; i < mMaxChannels; ++i) {
- SoundChannel* channel = &mChannelPool[i];
- channel->autoPause();
- }
-}
-
-void SoundPool::resume(int channelID)
-{
- ALOGV("resume(%d)", channelID);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->resume();
- }
-}
-
-void SoundPool::autoResume()
-{
- ALOGV("autoResume()");
- Mutex::Autolock lock(&mLock);
- for (int i = 0; i < mMaxChannels; ++i) {
- SoundChannel* channel = &mChannelPool[i];
- channel->autoResume();
- }
-}
-
-void SoundPool::stop(int channelID)
-{
- ALOGV("stop(%d)", channelID);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->stop();
- } else {
- channel = findNextChannel(channelID);
- if (channel)
- channel->clearNextEvent();
- }
-}
-
-void SoundPool::setVolume(int channelID, float leftVolume, float rightVolume)
-{
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setVolume(leftVolume, rightVolume);
- }
-}
-
-void SoundPool::setPriority(int channelID, int priority)
-{
- ALOGV("setPriority(%d, %d)", channelID, priority);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setPriority(priority);
- }
-}
-
-void SoundPool::setLoop(int channelID, int loop)
-{
- ALOGV("setLoop(%d, %d)", channelID, loop);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setLoop(loop);
- }
-}
-
-void SoundPool::setRate(int channelID, float rate)
-{
- ALOGV("setRate(%d, %f)", channelID, rate);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setRate(rate);
- }
-}
-
-// call with lock held
-void SoundPool::done_l(SoundChannel* channel)
-{
- ALOGV("done_l(%d)", channel->channelID());
- // if "stolen", play next event
- if (channel->nextChannelID() != 0) {
- ALOGV("add to restart list");
- addToRestartList(channel);
- }
-
- // return to idle state
- else {
- ALOGV("move to front");
- moveToFront_l(channel);
- }
-}
-
-void SoundPool::setCallback(SoundPoolCallback* callback, void* user)
-{
- Mutex::Autolock lock(&mCallbackLock);
- mCallback = callback;
- mUserData = user;
-}
-
-void SoundPool::notify(SoundPoolEvent event)
-{
- Mutex::Autolock lock(&mCallbackLock);
- if (mCallback != NULL) {
- mCallback(event, this, mUserData);
- }
-}
-
-void SoundPool::dump()
-{
- for (int i = 0; i < mMaxChannels; ++i) {
- mChannelPool[i].dump();
- }
-}
-
-
-Sample::Sample(int sampleID, const char* url)
-{
- init();
- mSampleID = sampleID;
- mUrl = strdup(url);
- ALOGV("create sampleID=%d, url=%s", mSampleID, mUrl);
-}
-
-Sample::Sample(int sampleID, int fd, int64_t offset, int64_t length)
-{
- init();
- mSampleID = sampleID;
- mFd = dup(fd);
- mOffset = offset;
- mLength = length;
- ALOGV("create sampleID=%d, fd=%d, offset=%lld, length=%lld", mSampleID, mFd, mLength, mOffset);
-}
-
-void Sample::init()
-{
- mSize = 0;
- mRefCount = 0;
- mSampleID = 0;
- mState = UNLOADED;
- mFd = -1;
- mOffset = 0;
- mLength = 0;
- mUrl = 0;
-}
-
-Sample::~Sample()
-{
- ALOGV("Sample::destructor sampleID=%d, fd=%d", mSampleID, mFd);
- if (mFd > 0) {
- ALOGV("close(%d)", mFd);
- ::close(mFd);
- }
- free(mUrl);
-}
-
-status_t Sample::doLoad()
-{
- uint32_t sampleRate;
- int numChannels;
- audio_format_t format;
- status_t status;
- mHeap = new MemoryHeapBase(kDefaultHeapSize);
-
- ALOGV("Start decode");
- if (mUrl) {
- status = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format, mHeap, &mSize);
- } else {
- status = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format,
- mHeap, &mSize);
- ALOGV("close(%d)", mFd);
- ::close(mFd);
- mFd = -1;
- }
- if (status != NO_ERROR) {
- ALOGE("Unable to load sample: %s", mUrl);
- goto error;
- }
- ALOGV("pointer = %p, size = %u, sampleRate = %u, numChannels = %d",
- mHeap->getBase(), mSize, sampleRate, numChannels);
-
- if (sampleRate > kMaxSampleRate) {
- ALOGE("Sample rate (%u) out of range", sampleRate);
- status = BAD_VALUE;
- goto error;
- }
-
- if ((numChannels < 1) || (numChannels > 2)) {
- ALOGE("Sample channel count (%d) out of range", numChannels);
- status = BAD_VALUE;
- goto error;
- }
-
- mData = new MemoryBase(mHeap, 0, mSize);
- mSampleRate = sampleRate;
- mNumChannels = numChannels;
- mFormat = format;
- mState = READY;
- return NO_ERROR;
-
-error:
- mHeap.clear();
- return status;
-}
-
-
-void SoundChannel::init(SoundPool* soundPool)
-{
- mSoundPool = soundPool;
-}
-
-// call with sound pool lock held
-void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftVolume,
- float rightVolume, int priority, int loop, float rate)
-{
- sp<AudioTrack> oldTrack;
- sp<AudioTrack> newTrack;
- status_t status;
-
- { // scope for the lock
- Mutex::Autolock lock(&mLock);
-
- ALOGV("SoundChannel::play %p: sampleID=%d, channelID=%d, leftVolume=%f, rightVolume=%f,"
- " priority=%d, loop=%d, rate=%f",
- this, sample->sampleID(), nextChannelID, leftVolume, rightVolume,
- priority, loop, rate);
-
- // if not idle, this voice is being stolen
- if (mState != IDLE) {
- ALOGV("channel %d stolen - event queued for channel %d", channelID(), nextChannelID);
- mNextEvent.set(sample, nextChannelID, leftVolume, rightVolume, priority, loop, rate);
- stop_l();
- return;
- }
-
- // initialize track
- size_t afFrameCount;
- uint32_t afSampleRate;
- audio_stream_type_t streamType = mSoundPool->streamType();
- if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) {
- afFrameCount = kDefaultFrameCount;
- }
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
- afSampleRate = kDefaultSampleRate;
- }
- int numChannels = sample->numChannels();
- uint32_t sampleRate = uint32_t(float(sample->sampleRate()) * rate + 0.5);
- uint32_t totalFrames = (kDefaultBufferCount * afFrameCount * sampleRate) / afSampleRate;
- uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
- uint32_t frameCount = 0;
-
- if (loop) {
- frameCount = sample->size()/numChannels/
- ((sample->format() == AUDIO_FORMAT_PCM_16_BIT) ? sizeof(int16_t) : sizeof(uint8_t));
- }
-
-#ifndef USE_SHARED_MEM_BUFFER
- // Ensure minimum audio buffer size in case of short looped sample
- if(frameCount < totalFrames) {
- frameCount = totalFrames;
- }
-#endif
-
- // mToggle toggles each time a track is started on a given channel.
- // The toggle is concatenated with the SoundChannel address and passed to AudioTrack
- // as callback user data. This enables the detection of callbacks received from the old
- // audio track while the new one is being started and avoids processing them with
- // wrong audio audio buffer size (mAudioBufferSize)
- unsigned long toggle = mToggle ^ 1;
- void *userData = (void *)((unsigned long)this | toggle);
- uint32_t channels = (numChannels == 2) ?
- AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO;
-
- // do not create a new audio track if current track is compatible with sample parameters
-#ifdef USE_SHARED_MEM_BUFFER
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
-#else
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channels, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
- bufferFrames);
-#endif
- oldTrack = mAudioTrack;
- status = newTrack->initCheck();
- if (status != NO_ERROR) {
- ALOGE("Error creating AudioTrack");
- goto exit;
- }
- ALOGV("setVolume %p", newTrack.get());
- newTrack->setVolume(leftVolume, rightVolume);
- newTrack->setLoop(0, frameCount, loop);
-
- // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
- mToggle = toggle;
- mAudioTrack = newTrack;
- mPos = 0;
- mSample = sample;
- mChannelID = nextChannelID;
- mPriority = priority;
- mLoop = loop;
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- mNumChannels = numChannels;
- mRate = rate;
- clearNextEvent();
- mState = PLAYING;
- mAudioTrack->start();
- mAudioBufferSize = newTrack->frameCount()*newTrack->frameSize();
- }
-
-exit:
- ALOGV("delete oldTrack %p", oldTrack.get());
- if (status != NO_ERROR) {
- mAudioTrack.clear();
- }
-}
-
-void SoundChannel::nextEvent()
-{
- sp<Sample> sample;
- int nextChannelID;
- float leftVolume;
- float rightVolume;
- int priority;
- int loop;
- float rate;
-
- // check for valid event
- {
- Mutex::Autolock lock(&mLock);
- nextChannelID = mNextEvent.channelID();
- if (nextChannelID == 0) {
- ALOGV("stolen channel has no event");
- return;
- }
-
- sample = mNextEvent.sample();
- leftVolume = mNextEvent.leftVolume();
- rightVolume = mNextEvent.rightVolume();
- priority = mNextEvent.priority();
- loop = mNextEvent.loop();
- rate = mNextEvent.rate();
- }
-
- ALOGV("Starting stolen channel %d -> %d", channelID(), nextChannelID);
- play(sample, nextChannelID, leftVolume, rightVolume, priority, loop, rate);
-}
-
-void SoundChannel::callback(int event, void* user, void *info)
-{
- SoundChannel* channel = static_cast<SoundChannel*>((void *)((unsigned long)user & ~1));
-
- channel->process(event, info, (unsigned long)user & 1);
-}
-
-void SoundChannel::process(int event, void *info, unsigned long toggle)
-{
- //ALOGV("process(%d)", mChannelID);
-
- Mutex::Autolock lock(&mLock);
-
- AudioTrack::Buffer* b = NULL;
- if (event == AudioTrack::EVENT_MORE_DATA) {
- b = static_cast<AudioTrack::Buffer *>(info);
- }
-
- if (mToggle != toggle) {
- ALOGV("process wrong toggle %p channel %d", this, mChannelID);
- if (b != NULL) {
- b->size = 0;
- }
- return;
- }
-
- sp<Sample> sample = mSample;
-
-// ALOGV("SoundChannel::process event %d", event);
-
- if (event == AudioTrack::EVENT_MORE_DATA) {
-
- // check for stop state
- if (b->size == 0) return;
-
- if (mState == IDLE) {
- b->size = 0;
- return;
- }
-
- if (sample != 0) {
- // fill buffer
- uint8_t* q = (uint8_t*) b->i8;
- size_t count = 0;
-
- if (mPos < (int)sample->size()) {
- uint8_t* p = sample->data() + mPos;
- count = sample->size() - mPos;
- if (count > b->size) {
- count = b->size;
- }
- memcpy(q, p, count);
-// ALOGV("fill: q=%p, p=%p, mPos=%u, b->size=%u, count=%d", q, p, mPos, b->size, count);
- } else if (mPos < mAudioBufferSize) {
- count = mAudioBufferSize - mPos;
- if (count > b->size) {
- count = b->size;
- }
- memset(q, 0, count);
-// ALOGV("fill extra: q=%p, mPos=%u, b->size=%u, count=%d", q, mPos, b->size, count);
- }
-
- mPos += count;
- b->size = count;
- //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]);
- }
- } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END ||
- event == AudioTrack::EVENT_NEW_IAUDIOTRACK) {
- ALOGV("process %p channel %d event %s",
- this, mChannelID, (event == AudioTrack::EVENT_UNDERRUN) ? "UNDERRUN" :
- (event == AudioTrack::EVENT_BUFFER_END) ? "BUFFER_END" : "NEW_IAUDIOTRACK");
- mSoundPool->addToStopList(this);
- } else if (event == AudioTrack::EVENT_LOOP_END) {
- ALOGV("End loop %p channel %d", this, mChannelID);
- } else {
- ALOGW("SoundChannel::process unexpected event %d", event);
- }
-}
-
-
-// call with lock held
-bool SoundChannel::doStop_l()
-{
- if (mState != IDLE) {
- setVolume_l(0, 0);
- ALOGV("stop");
- mAudioTrack->stop();
- mSample.clear();
- mState = IDLE;
- mPriority = IDLE_PRIORITY;
- return true;
- }
- return false;
-}
-
-// call with lock held and sound pool lock held
-void SoundChannel::stop_l()
-{
- if (doStop_l()) {
- mSoundPool->done_l(this);
- }
-}
-
-// call with sound pool lock held
-void SoundChannel::stop()
-{
- bool stopped;
- {
- Mutex::Autolock lock(&mLock);
- stopped = doStop_l();
- }
-
- if (stopped) {
- mSoundPool->done_l(this);
- }
-}
-
-//FIXME: Pause is a little broken right now
-void SoundChannel::pause()
-{
- Mutex::Autolock lock(&mLock);
- if (mState == PLAYING) {
- ALOGV("pause track");
- mState = PAUSED;
- mAudioTrack->pause();
- }
-}
-
-void SoundChannel::autoPause()
-{
- Mutex::Autolock lock(&mLock);
- if (mState == PLAYING) {
- ALOGV("pause track");
- mState = PAUSED;
- mAutoPaused = true;
- mAudioTrack->pause();
- }
-}
-
-void SoundChannel::resume()
-{
- Mutex::Autolock lock(&mLock);
- if (mState == PAUSED) {
- ALOGV("resume track");
- mState = PLAYING;
- mAutoPaused = false;
- mAudioTrack->start();
- }
-}
-
-void SoundChannel::autoResume()
-{
- Mutex::Autolock lock(&mLock);
- if (mAutoPaused && (mState == PAUSED)) {
- ALOGV("resume track");
- mState = PLAYING;
- mAutoPaused = false;
- mAudioTrack->start();
- }
-}
-
-void SoundChannel::setRate(float rate)
-{
- Mutex::Autolock lock(&mLock);
- if (mAudioTrack != NULL && mSample != 0) {
- uint32_t sampleRate = uint32_t(float(mSample->sampleRate()) * rate + 0.5);
- mAudioTrack->setSampleRate(sampleRate);
- mRate = rate;
- }
-}
-
-// call with lock held
-void SoundChannel::setVolume_l(float leftVolume, float rightVolume)
-{
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- if (mAudioTrack != NULL)
- mAudioTrack->setVolume(leftVolume, rightVolume);
-}
-
-void SoundChannel::setVolume(float leftVolume, float rightVolume)
-{
- Mutex::Autolock lock(&mLock);
- setVolume_l(leftVolume, rightVolume);
-}
-
-void SoundChannel::setLoop(int loop)
-{
- Mutex::Autolock lock(&mLock);
- if (mAudioTrack != NULL && mSample != 0) {
- uint32_t loopEnd = mSample->size()/mNumChannels/
- ((mSample->format() == AUDIO_FORMAT_PCM_16_BIT) ? sizeof(int16_t) : sizeof(uint8_t));
- mAudioTrack->setLoop(0, loopEnd, loop);
- mLoop = loop;
- }
-}
-
-SoundChannel::~SoundChannel()
-{
- ALOGV("SoundChannel destructor %p", this);
- {
- Mutex::Autolock lock(&mLock);
- clearNextEvent();
- doStop_l();
- }
- // do not call AudioTrack destructor with mLock held as it will wait for the AudioTrack
- // callback thread to exit which may need to execute process() and acquire the mLock.
- mAudioTrack.clear();
-}
-
-void SoundChannel::dump()
-{
- ALOGV("mState = %d mChannelID=%d, mNumChannels=%d, mPos = %d, mPriority=%d, mLoop=%d",
- mState, mChannelID, mNumChannels, mPos, mPriority, mLoop);
-}
-
-void SoundEvent::set(const sp<Sample>& sample, int channelID, float leftVolume,
- float rightVolume, int priority, int loop, float rate)
-{
- mSample = sample;
- mChannelID = channelID;
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- mPriority = priority;
- mLoop = loop;
- mRate =rate;
-}
-
-} // end namespace android
diff --git a/media/libmedia/SoundPoolThread.cpp b/media/libmedia/SoundPoolThread.cpp
deleted file mode 100644
index ba3b482..0000000
--- a/media/libmedia/SoundPoolThread.cpp
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoundPoolThread"
-#include "utils/Log.h"
-
-#include "SoundPoolThread.h"
-
-namespace android {
-
-void SoundPoolThread::write(SoundPoolMsg msg) {
- Mutex::Autolock lock(&mLock);
- while (mMsgQueue.size() >= maxMessages) {
- mCondition.wait(mLock);
- }
-
- // if thread is quitting, don't add to queue
- if (mRunning) {
- mMsgQueue.push(msg);
- mCondition.signal();
- }
-}
-
-const SoundPoolMsg SoundPoolThread::read() {
- Mutex::Autolock lock(&mLock);
- while (mMsgQueue.size() == 0) {
- mCondition.wait(mLock);
- }
- SoundPoolMsg msg = mMsgQueue[0];
- mMsgQueue.removeAt(0);
- mCondition.signal();
- return msg;
-}
-
-void SoundPoolThread::quit() {
- Mutex::Autolock lock(&mLock);
- if (mRunning) {
- mRunning = false;
- mMsgQueue.clear();
- mMsgQueue.push(SoundPoolMsg(SoundPoolMsg::KILL, 0));
- mCondition.signal();
- mCondition.wait(mLock);
- }
- ALOGV("return from quit");
-}
-
-SoundPoolThread::SoundPoolThread(SoundPool* soundPool) :
- mSoundPool(soundPool)
-{
- mMsgQueue.setCapacity(maxMessages);
- if (createThreadEtc(beginThread, this, "SoundPoolThread")) {
- mRunning = true;
- }
-}
-
-SoundPoolThread::~SoundPoolThread()
-{
- quit();
-}
-
-int SoundPoolThread::beginThread(void* arg) {
- ALOGV("beginThread");
- SoundPoolThread* soundPoolThread = (SoundPoolThread*)arg;
- return soundPoolThread->run();
-}
-
-int SoundPoolThread::run() {
- ALOGV("run");
- for (;;) {
- SoundPoolMsg msg = read();
- ALOGV("Got message m=%d, mData=%d", msg.mMessageType, msg.mData);
- switch (msg.mMessageType) {
- case SoundPoolMsg::KILL:
- ALOGV("goodbye");
- return NO_ERROR;
- case SoundPoolMsg::LOAD_SAMPLE:
- doLoadSample(msg.mData);
- break;
- default:
- ALOGW("run: Unrecognized message %d\n",
- msg.mMessageType);
- break;
- }
- }
-}
-
-void SoundPoolThread::loadSample(int sampleID) {
- write(SoundPoolMsg(SoundPoolMsg::LOAD_SAMPLE, sampleID));
-}
-
-void SoundPoolThread::doLoadSample(int sampleID) {
- sp <Sample> sample = mSoundPool->findSample(sampleID);
- status_t status = -1;
- if (sample != 0) {
- status = sample->doLoad();
- }
- mSoundPool->notify(SoundPoolEvent(SoundPoolEvent::SAMPLE_LOADED, sampleID, status));
-}
-
-} // end namespace android
diff --git a/media/libmedia/SoundPoolThread.h b/media/libmedia/SoundPoolThread.h
deleted file mode 100644
index 7e96900..0000000
--- a/media/libmedia/SoundPoolThread.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOUNDPOOLTHREAD_H_
-#define SOUNDPOOLTHREAD_H_
-
-#include <utils/threads.h>
-#include <utils/Vector.h>
-#include <media/AudioTrack.h>
-
-#include <media/SoundPool.h>
-
-namespace android {
-
-class SoundPoolMsg {
-public:
- enum MessageType { INVALID, KILL, LOAD_SAMPLE };
- SoundPoolMsg() : mMessageType(INVALID), mData(0) {}
- SoundPoolMsg(MessageType MessageType, int data) :
- mMessageType(MessageType), mData(data) {}
- uint16_t mMessageType;
- uint16_t mData;
-};
-
-/*
- * This class handles background requests from the SoundPool
- */
-class SoundPoolThread {
-public:
- SoundPoolThread(SoundPool* SoundPool);
- ~SoundPoolThread();
- void loadSample(int sampleID);
- void quit();
- void write(SoundPoolMsg msg);
-
-private:
- static const size_t maxMessages = 5;
-
- static int beginThread(void* arg);
- int run();
- void doLoadSample(int sampleID);
- const SoundPoolMsg read();
-
- Mutex mLock;
- Condition mCondition;
- Vector<SoundPoolMsg> mMsgQueue;
- SoundPool* mSoundPool;
- bool mRunning;
-};
-
-} // end namespace android
-
-#endif /*SOUNDPOOLTHREAD_H_*/
diff --git a/media/libmedia/StringArray.h b/media/libmedia/StringArray.h
deleted file mode 100644
index ae47085..0000000
--- a/media/libmedia/StringArray.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//
-// Sortable array of strings. STL-ish, but STL-free.
-//
-#ifndef _LIBS_MEDIA_STRING_ARRAY_H
-#define _LIBS_MEDIA_STRING_ARRAY_H
-
-#include <stdlib.h>
-#include <string.h>
-
-namespace android {
-
-//
-// An expanding array of strings. Add, get, sort, delete.
-//
-class StringArray {
-public:
- StringArray();
- virtual ~StringArray();
-
- //
- // Add a string. A copy of the string is made.
- //
- bool push_back(const char* str);
-
- //
- // Delete an entry.
- //
- void erase(int idx);
-
- //
- // Sort the array.
- //
- void sort(int (*compare)(const void*, const void*));
-
- //
- // Pass this to the sort routine to do an ascending alphabetical sort.
- //
- static int cmpAscendingAlpha(const void* pstr1, const void* pstr2);
-
- //
- // Get the #of items in the array.
- //
- inline int size(void) const { return mCurrent; }
-
- //
- // Return entry N.
- // [should use operator[] here]
- //
- const char* getEntry(int idx) const {
- return (unsigned(idx) >= unsigned(mCurrent)) ? NULL : mArray[idx];
- }
-
- //
- // Set entry N to specified string.
- // [should use operator[] here]
- //
- void setEntry(int idx, const char* str);
-
-private:
- int mMax;
- int mCurrent;
- char** mArray;
-};
-
-}; // namespace android
-
-#endif // _LIBS_MEDIA_STRING_ARRAY_H
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index adef3be..2cc4685 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -28,718 +28,718 @@ namespace android {
// Descriptors for all available tones (See ToneGenerator::ToneDescriptor class declaration for details)
const ToneGenerator::ToneDescriptor ToneGenerator::sToneDescriptors[] = {
- { segments: {{ duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1336, 941, 0 }, 0, 0},
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_0
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1209, 697, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_1
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1336, 697, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_2
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1477, 697, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_3
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1209, 770, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_4
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1336, 770, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_5
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1477, 770, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_6
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1209, 852, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_7
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1336, 852, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_8
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1477, 852, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_9
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1209, 941, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_S
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1477, 941, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_P
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1633, 697, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_A
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1633, 770, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_B
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1633, 852, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_C
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 1633, 941, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_DTMF_D
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_DIAL
- { segments: { { duration: 500 , waveFreq: { 425, 0 }, 0, 0},
- { duration: 500, waveFreq: { 0 }, 0, 0},
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_BUSY
- { segments: { { duration: 200, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_CONGESTION
- { segments: { { duration: 200, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_SUP_RADIO_ACK
- { segments: { { duration: 200, waveFreq: { 425, 0 }, 0, 0},
- { duration: 200, waveFreq: { 0 }, 0, 0},
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 2,
- repeatSegment: 0 }, // TONE_SUP_RADIO_NOTAVAIL
- { segments: { { duration: 330, waveFreq: { 950, 1400, 1800, 0 }, 0, 0},
- { duration: 1000, waveFreq: { 0 }, 0, 0},
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_ERROR
- { segments: { { duration: 200, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 600, waveFreq: { 0 }, 0, 0 },
- { duration: 200, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_CALL_WAITING
- { segments: { { duration: 1000, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_RINGTONE
- { segments: { { duration: 40, waveFreq: { 400, 1200, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_PROP_BEEP
- { segments: { { duration: 100, waveFreq: { 1200, 0 }, 0, 0 },
- { duration: 100, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 1,
- repeatSegment: 0 }, // TONE_PROP_ACK
- { segments: { { duration: 400, waveFreq: { 300, 400, 500, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_PROP_NACK
- { segments: { { duration: 200, waveFreq: { 400, 1200, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_PROP_PROMPT
- { segments: { { duration: 40, waveFreq: { 400, 1200, 0 }, 0, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 40, waveFreq: { 400, 1200, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_PROP_BEEP2
- { segments: { { duration: 250, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 620, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_SUP_INTERCEPT
- { segments: { { duration: 250, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 620, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 7,
- repeatSegment: 0 }, // TONE_SUP_INTERCEPT_ABBREV
- { segments: { { duration: 250, waveFreq: { 480, 620, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 7,
- repeatSegment: 0 }, // TONE_SUP_CONGESTION_ABBREV
- { segments: { { duration: 100, waveFreq: { 350, 440, 0 }, 0, 0 },
- { duration: 100, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 2,
- repeatSegment: 0 }, // TONE_SUP_CONFIRM
- { segments: { { duration: 100, waveFreq: { 480, 0 }, 0, 0 },
- { duration: 100, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 3,
- repeatSegment: 0 }, // TONE_SUP_PIP
- { segments: {{ duration: ToneGenerator::TONEGEN_INF, waveFreq: { 425, 0 }, 0, 0},
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_DIAL_TONE_LITE
- { segments: { { duration: 2000, waveFreq: { 440, 480, 0 }, 0, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_NETWORK_USA_RINGBACK
- { segments: { { duration: 250, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 620, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_INTERCEPT
- { segments: { { duration: 250, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 620, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ABBR_INTERCEPT
- { segments: { { duration: 250, waveFreq: { 480, 620, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_REORDER
- { segments: { { duration: 250, waveFreq: { 480, 620, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 7,
- repeatSegment: 0 }, // TONE_CDMA_ABBR_REORDER
- { segments: { { duration: 500, waveFreq: { 480, 620, 0 }, 0, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_NETWORK_BUSY
- { segments: { { duration: 100, waveFreq: { 350, 440, 0 }, 0, 0 },
- { duration: 100, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 2,
- repeatSegment: 0 }, // TONE_CDMA_CONFIRM
- { segments: { { duration: 500, waveFreq: { 660, 1000, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ANSWER
- { segments: { { duration: 300, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_NETWORK_CALLWAITING
- { segments: { { duration: 100, waveFreq: { 480, 0 }, 0, 0 },
- { duration: 100, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: 3,
- repeatSegment: 0 }, // TONE_CDMA_PIP
-
- { segments: { { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 64, waveFreq: { 2556, 0}, 19, 0},
- { duration: 32, waveFreq: { 2091, 0}, 0, 0},
- { duration: 48, waveFreq: { 2556, 0}, 0, 0},
- { duration: 4000, waveFreq: { 0 }, 0, 0},
- { duration: 0, waveFreq: { 0 }, 0, 0}},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_NORMAL
- { segments: { { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 64, waveFreq: { 2556, 0}, 7, 0 },
- { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 400, waveFreq: { 0 }, 0, 0 },
- { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 64, waveFreq: { 2556, 0}, 7, 4 },
- { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_INTERGROUP
- { segments: { { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 64, waveFreq: { 2556, 0}, 3, 0 },
- { duration: 16, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 32, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 64, waveFreq: { 2556, 0}, 3, 4 },
- { duration: 16, waveFreq: { 2091, 0}, 0, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_SP_PRI
- { segments: { { duration: 0, waveFreq: { 0 }, 0, 0} },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT3
- { segments: { { duration: 32, waveFreq: { 2091, 0 }, 0, 0 },
- { duration: 64, waveFreq: { 2556, 0 }, 4, 0 },
- { duration: 20, waveFreq: { 2091, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 } , 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PING_RING
- { segments: { { duration: 0, waveFreq: { 0 }, 0, 0} },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT5
- { segments: { { duration: 0, waveFreq: { 0 }, 0, 0} },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT6
- { segments: { { duration: 0, waveFreq: { 0 }, 0, 0} },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT7
-
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 39, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_L
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 39, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_L
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 39, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_L
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 15, 0 },
- { duration: 400, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_SS
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 15, 0 },
- { duration: 400, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_SS
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 15, 0 },
- { duration: 400, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_SS
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 15, 6 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_SSL
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 15, 6 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_SSL
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 15, 6 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_SSL
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 19, 0 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 19, 3 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_SS_2
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 19, 0 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 19, 3 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_SS_2
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 19, 0 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 19, 3 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_SS_2
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 9, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 19, 3 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 9, 6 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_SLS
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 9, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 19, 3 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 9, 6 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_SLS
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 9, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 19, 3 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 9, 6 },
- { duration: 3000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_SLS
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 9, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 9, 3 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 9, 6 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 9, 9 },
- { duration: 2500, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_S_X4
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 9, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 9, 3 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 9, 6 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 9, 9 },
- { duration: 2500, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_S_X4
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 9, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 9, 3 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 9, 6 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 9, 9 },
- { duration: 2500, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_S_X4
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 19, 0 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_PBX_L
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 19, 0 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_PBX_L
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 19, 0 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_PBX_L
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 3 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_PBX_SS
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 3 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_PBX_SS
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 3 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_PBX_SS
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 15, 6 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_PBX_SSL
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 15, 6 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_PBX_SSL
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 15, 6 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_PBX_SSL
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 15, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 6 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_PBX_SLS
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 15, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 6 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_PBX_SLS
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 15, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 6 },
- { duration: 1000, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_PBX_SLS
- { segments: { { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 6 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 3700, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 4000, 0 }, 7, 9 },
- { duration: 800, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_HIGH_PBX_S_X4
- { segments: { { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 6 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2600, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 2900, 0 }, 7, 9 },
- { duration: 800, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_MED_PBX_S_X4
- { segments: { { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 0 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 3 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 6 },
- { duration: 200, waveFreq: { 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1300, 0 }, 0, 0 },
- { duration: 25, waveFreq: { 1450, 0 }, 7, 9 },
- { duration: 800, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_LOW_PBX_S_X4
-
- { segments: { { duration: 62, waveFreq: { 1109, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 784, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 740, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 622, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 1109, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ALERT_NETWORK_LITE
- { segments: { { duration: 62, waveFreq: { 1245, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 659, 0 }, 2, 0 },
- { duration: 62, waveFreq: { 1245, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ALERT_AUTOREDIAL_LITE
- { segments: { { duration: 400, waveFreq: { 1150, 770, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ONE_MIN_BEEP
- { segments: { { duration: 120, waveFreq: { 941, 1477, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_KEYPAD_VOLUME_KEY_LITE
- { segments: { { duration: 375, waveFreq: { 587, 0 }, 0, 0 },
- { duration: 125, waveFreq: { 1175, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_PRESSHOLDKEY_LITE
- { segments: { { duration: 62, waveFreq: { 587, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 784, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 831, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 784, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 1109, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 784, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 831, 0 }, 0, 0 },
- { duration: 62, waveFreq: { 784, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ALERT_INCALL_LITE
- { segments: { { duration: 125, waveFreq: { 941, 0 }, 0, 0 },
- { duration: 10, waveFreq: { 0 }, 2, 0 },
- { duration: 4990, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_CDMA_EMERGENCY_RINGBACK
- { segments: { { duration: 125, waveFreq: { 1319, 0 }, 0, 0 },
- { duration: 125, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 2,
- repeatSegment: 0 }, // TONE_CDMA_ALERT_CALL_GUARD
- { segments: { { duration: 125, waveFreq: { 1047, 0 }, 0, 0 },
- { duration: 125, waveFreq: { 370, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_SOFT_ERROR_LITE
- { segments: { { duration: 125, waveFreq: { 1480, 0 }, 0, 0 },
- { duration: 125, waveFreq: { 1397, 0 }, 0, 0 },
- { duration: 125, waveFreq: { 784, 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 } },
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_CALLDROP_LITE
-
- { segments: { { duration: 500, waveFreq: { 425, 0 }, 0, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_NETWORK_BUSY_ONE_SHOT
- { segments: { { duration: 400, waveFreq: { 1150, 770 }, 0, 0 },
- { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_ABBR_ALERT
- { segments: { { duration: 0, waveFreq: { 0 }, 0, 0 }},
- repeatCnt: 0,
- repeatSegment: 0 }, // TONE_CDMA_SIGNAL_OFF
-
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 350, 440, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_ANSI_DIAL
- { segments: { { duration: 500, waveFreq: { 480, 620, 0 }, 0, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_ANSI_BUSY
- { segments: { { duration: 250, waveFreq: { 480, 620, 0 }, 0, 0 },
- { duration: 250, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_ANSI_CONGESTION
- { segments: { { duration: 300, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 9700, waveFreq: { 0 }, 0, 0 },
- { duration: 100, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 100, waveFreq: { 0 }, 0, 0 },
- { duration: 100, waveFreq: { 440, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 1 }, // TONE_ANSI_CALL_WAITING
- { segments: { { duration: 2000, waveFreq: { 440, 480, 0 }, 0, 0 },
- { duration: 4000, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_ANSI_RINGTONE
- { segments: { { duration: ToneGenerator::TONEGEN_INF, waveFreq: { 400, 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_JAPAN_DIAL
- { segments: { { duration: 500, waveFreq: { 400, 0 }, 0, 0 },
- { duration: 500, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_JAPAN_BUSY
- { segments: { { duration: 1000, waveFreq: { 400, 0 }, 0, 0 },
- { duration: 2000, waveFreq: { 0 }, 0, 0 },
- { duration: 0 , waveFreq: { 0 }, 0, 0}},
- repeatCnt: ToneGenerator::TONEGEN_INF,
- repeatSegment: 0 }, // TONE_JAPAN_RADIO_ACK
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1336, 941, 0 }, 0, 0},
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_0
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1209, 697, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_1
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1336, 697, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_2
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1477, 697, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_3
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1209, 770, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_4
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1336, 770, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_5
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1477, 770, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_6
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1209, 852, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_7
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1336, 852, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_8
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1477, 852, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_9
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1209, 941, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_S
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1477, 941, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_P
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1633, 697, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_A
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1633, 770, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_B
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1633, 852, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_C
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 1633, 941, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_DTMF_D
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_DIAL
+ { .segments = { { .duration = 500 , .waveFreq = { 425, 0 }, 0, 0},
+ { .duration = 500, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_BUSY
+ { .segments = { { .duration = 200, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_CONGESTION
+ { .segments = { { .duration = 200, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_SUP_RADIO_ACK
+ { .segments = { { .duration = 200, .waveFreq = { 425, 0 }, 0, 0},
+ { .duration = 200, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 2,
+ .repeatSegment = 0 }, // TONE_SUP_RADIO_NOTAVAIL
+ { .segments = { { .duration = 330, .waveFreq = { 950, 1400, 1800, 0 }, 0, 0},
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_ERROR
+ { .segments = { { .duration = 200, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 600, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 200, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_CALL_WAITING
+ { .segments = { { .duration = 1000, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_RINGTONE
+ { .segments = { { .duration = 40, .waveFreq = { 400, 1200, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_PROP_BEEP
+ { .segments = { { .duration = 100, .waveFreq = { 1200, 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 1,
+ .repeatSegment = 0 }, // TONE_PROP_ACK
+ { .segments = { { .duration = 400, .waveFreq = { 300, 400, 500, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_PROP_NACK
+ { .segments = { { .duration = 200, .waveFreq = { 400, 1200, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_PROP_PROMPT
+ { .segments = { { .duration = 40, .waveFreq = { 400, 1200, 0 }, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 40, .waveFreq = { 400, 1200, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_PROP_BEEP2
+ { .segments = { { .duration = 250, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 620, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_SUP_INTERCEPT
+ { .segments = { { .duration = 250, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 620, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 7,
+ .repeatSegment = 0 }, // TONE_SUP_INTERCEPT_ABBREV
+ { .segments = { { .duration = 250, .waveFreq = { 480, 620, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 7,
+ .repeatSegment = 0 }, // TONE_SUP_CONGESTION_ABBREV
+ { .segments = { { .duration = 100, .waveFreq = { 350, 440, 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 2,
+ .repeatSegment = 0 }, // TONE_SUP_CONFIRM
+ { .segments = { { .duration = 100, .waveFreq = { 480, 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 3,
+ .repeatSegment = 0 }, // TONE_SUP_PIP
+ { .segments = {{ .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 425, 0 }, 0, 0},
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_DIAL_TONE_LITE
+ { .segments = { { .duration = 2000, .waveFreq = { 440, 480, 0 }, 0, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_NETWORK_USA_RINGBACK
+ { .segments = { { .duration = 250, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 620, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_INTERCEPT
+ { .segments = { { .duration = 250, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 620, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ABBR_INTERCEPT
+ { .segments = { { .duration = 250, .waveFreq = { 480, 620, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_REORDER
+ { .segments = { { .duration = 250, .waveFreq = { 480, 620, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 7,
+ .repeatSegment = 0 }, // TONE_CDMA_ABBR_REORDER
+ { .segments = { { .duration = 500, .waveFreq = { 480, 620, 0 }, 0, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_NETWORK_BUSY
+ { .segments = { { .duration = 100, .waveFreq = { 350, 440, 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 2,
+ .repeatSegment = 0 }, // TONE_CDMA_CONFIRM
+ { .segments = { { .duration = 500, .waveFreq = { 660, 1000, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ANSWER
+ { .segments = { { .duration = 300, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_NETWORK_CALLWAITING
+ { .segments = { { .duration = 100, .waveFreq = { 480, 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 3,
+ .repeatSegment = 0 }, // TONE_CDMA_PIP
+
+ { .segments = { { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 64, .waveFreq = { 2556, 0}, 19, 0},
+ { .duration = 32, .waveFreq = { 2091, 0}, 0, 0},
+ { .duration = 48, .waveFreq = { 2556, 0}, 0, 0},
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0},
+ { .duration = 0, .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_NORMAL
+ { .segments = { { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 64, .waveFreq = { 2556, 0}, 7, 0 },
+ { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 400, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 64, .waveFreq = { 2556, 0}, 7, 4 },
+ { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_INTERGROUP
+ { .segments = { { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 64, .waveFreq = { 2556, 0}, 3, 0 },
+ { .duration = 16, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 32, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 64, .waveFreq = { 2556, 0}, 3, 4 },
+ { .duration = 16, .waveFreq = { 2091, 0}, 0, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_SP_PRI
+ { .segments = { { .duration = 0, .waveFreq = { 0 }, 0, 0} },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT3
+ { .segments = { { .duration = 32, .waveFreq = { 2091, 0 }, 0, 0 },
+ { .duration = 64, .waveFreq = { 2556, 0 }, 4, 0 },
+ { .duration = 20, .waveFreq = { 2091, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 } , 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PING_RING
+ { .segments = { { .duration = 0, .waveFreq = { 0 }, 0, 0} },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT5
+ { .segments = { { .duration = 0, .waveFreq = { 0 }, 0, 0} },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT6
+ { .segments = { { .duration = 0, .waveFreq = { 0 }, 0, 0} },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALL_SIGNAL_ISDN_PAT7
+
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 39, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_L
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 39, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_L
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 39, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_L
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 15, 0 },
+ { .duration = 400, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_SS
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 15, 0 },
+ { .duration = 400, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_SS
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 15, 0 },
+ { .duration = 400, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_SS
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 15, 6 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_SSL
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 15, 6 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_SSL
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 15, 6 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_SSL
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 19, 0 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 19, 3 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_SS_2
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 19, 0 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 19, 3 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_SS_2
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 19, 0 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 19, 3 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_SS_2
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 9, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 19, 3 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 9, 6 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_SLS
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 9, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 19, 3 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 9, 6 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_SLS
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 9, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 19, 3 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 9, 6 },
+ { .duration = 3000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_SLS
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 9, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 9, 3 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 9, 6 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 9, 9 },
+ { .duration = 2500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_S_X4
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 9, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 9, 3 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 9, 6 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 9, 9 },
+ { .duration = 2500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_S_X4
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 9, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 9, 3 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 9, 6 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 9, 9 },
+ { .duration = 2500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_S_X4
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 19, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_PBX_L
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 19, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_PBX_L
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 19, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_PBX_L
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 3 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_PBX_SS
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 3 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_PBX_SS
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 3 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_PBX_SS
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 15, 6 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_PBX_SSL
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 15, 6 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_PBX_SSL
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 15, 6 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_PBX_SSL
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 15, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 6 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_PBX_SLS
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 15, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 6 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_PBX_SLS
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 15, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 6 },
+ { .duration = 1000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_PBX_SLS
+ { .segments = { { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 6 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 3700, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 4000, 0 }, 7, 9 },
+ { .duration = 800, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_HIGH_PBX_S_X4
+ { .segments = { { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 6 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2600, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 2900, 0 }, 7, 9 },
+ { .duration = 800, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_MED_PBX_S_X4
+ { .segments = { { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 0 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 3 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 6 },
+ { .duration = 200, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1300, 0 }, 0, 0 },
+ { .duration = 25, .waveFreq = { 1450, 0 }, 7, 9 },
+ { .duration = 800, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_LOW_PBX_S_X4
+
+ { .segments = { { .duration = 62, .waveFreq = { 1109, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 784, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 740, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 622, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 1109, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ALERT_NETWORK_LITE
+ { .segments = { { .duration = 62, .waveFreq = { 1245, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 659, 0 }, 2, 0 },
+ { .duration = 62, .waveFreq = { 1245, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ALERT_AUTOREDIAL_LITE
+ { .segments = { { .duration = 400, .waveFreq = { 1150, 770, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ONE_MIN_BEEP
+ { .segments = { { .duration = 120, .waveFreq = { 941, 1477, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_KEYPAD_VOLUME_KEY_LITE
+ { .segments = { { .duration = 375, .waveFreq = { 587, 0 }, 0, 0 },
+ { .duration = 125, .waveFreq = { 1175, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_PRESSHOLDKEY_LITE
+ { .segments = { { .duration = 62, .waveFreq = { 587, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 784, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 831, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 784, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 1109, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 784, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 831, 0 }, 0, 0 },
+ { .duration = 62, .waveFreq = { 784, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ALERT_INCALL_LITE
+ { .segments = { { .duration = 125, .waveFreq = { 941, 0 }, 0, 0 },
+ { .duration = 10, .waveFreq = { 0 }, 2, 0 },
+ { .duration = 4990, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_CDMA_EMERGENCY_RINGBACK
+ { .segments = { { .duration = 125, .waveFreq = { 1319, 0 }, 0, 0 },
+ { .duration = 125, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 2,
+ .repeatSegment = 0 }, // TONE_CDMA_ALERT_CALL_GUARD
+ { .segments = { { .duration = 125, .waveFreq = { 1047, 0 }, 0, 0 },
+ { .duration = 125, .waveFreq = { 370, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_SOFT_ERROR_LITE
+ { .segments = { { .duration = 125, .waveFreq = { 1480, 0 }, 0, 0 },
+ { .duration = 125, .waveFreq = { 1397, 0 }, 0, 0 },
+ { .duration = 125, .waveFreq = { 784, 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 } },
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_CALLDROP_LITE
+
+ { .segments = { { .duration = 500, .waveFreq = { 425, 0 }, 0, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_NETWORK_BUSY_ONE_SHOT
+ { .segments = { { .duration = 400, .waveFreq = { 1150, 770 }, 0, 0 },
+ { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_ABBR_ALERT
+ { .segments = { { .duration = 0, .waveFreq = { 0 }, 0, 0 }},
+ .repeatCnt = 0,
+ .repeatSegment = 0 }, // TONE_CDMA_SIGNAL_OFF
+
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 350, 440, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_ANSI_DIAL
+ { .segments = { { .duration = 500, .waveFreq = { 480, 620, 0 }, 0, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_ANSI_BUSY
+ { .segments = { { .duration = 250, .waveFreq = { 480, 620, 0 }, 0, 0 },
+ { .duration = 250, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_ANSI_CONGESTION
+ { .segments = { { .duration = 300, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 9700, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 100, .waveFreq = { 440, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 1 }, // TONE_ANSI_CALL_WAITING
+ { .segments = { { .duration = 2000, .waveFreq = { 440, 480, 0 }, 0, 0 },
+ { .duration = 4000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_ANSI_RINGTONE
+ { .segments = { { .duration = ToneGenerator::TONEGEN_INF, .waveFreq = { 400, 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_JAPAN_DIAL
+ { .segments = { { .duration = 500, .waveFreq = { 400, 0 }, 0, 0 },
+ { .duration = 500, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_JAPAN_BUSY
+ { .segments = { { .duration = 1000, .waveFreq = { 400, 0 }, 0, 0 },
+ { .duration = 2000, .waveFreq = { 0 }, 0, 0 },
+ { .duration = 0 , .waveFreq = { 0 }, 0, 0}},
+ .repeatCnt = ToneGenerator::TONEGEN_INF,
+ .repeatSegment = 0 }, // TONE_JAPAN_RADIO_ACK
@@ -1057,7 +1057,7 @@ bool ToneGenerator::initAudioTrack() {
0, // notificationFrames
0, // sharedBuffer
mThreadCanCallJava,
- 0, // sessionId
+ AUDIO_SESSION_ALLOCATE,
AudioTrack::TRANSFER_CALLBACK);
if (mpAudioTrack->initCheck() != NO_ERROR) {
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index c146b8d..f91e3e4 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -52,6 +52,13 @@ Visualizer::Visualizer (int32_t priority,
Visualizer::~Visualizer()
{
+ ALOGV("Visualizer::~Visualizer()");
+ if (mCaptureThread != NULL) {
+ mCaptureThread->requestExitAndWait();
+ mCaptureThread.clear();
+ }
+ mCaptureCallBack = NULL;
+ mCaptureFlags = 0;
}
status_t Visualizer::setEnabled(bool enabled)
@@ -102,20 +109,18 @@ status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t
return INVALID_OPERATION;
}
- sp<CaptureThread> t = mCaptureThread;
- if (t != 0) {
- t->mLock.lock();
+ if (mCaptureThread != 0) {
+ mCaptureLock.unlock();
+ mCaptureThread->requestExitAndWait();
+ mCaptureLock.lock();
}
+
mCaptureThread.clear();
mCaptureCallBack = cbk;
mCaptureCbkUser = user;
mCaptureFlags = flags;
mCaptureRate = rate;
- if (t != 0) {
- t->mLock.unlock();
- }
-
if (cbk != NULL) {
mCaptureThread = new CaptureThread(*this, rate, ((flags & CAPTURE_CALL_JAVA) != 0));
}
diff --git a/media/libmedia/autodetect.cpp b/media/libmedia/autodetect.cpp
deleted file mode 100644
index be5c3b2..0000000
--- a/media/libmedia/autodetect.cpp
+++ /dev/null
@@ -1,885 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "autodetect.h"
-
-struct CharRange {
- uint16_t first;
- uint16_t last;
-};
-
-#define ARRAY_SIZE(x) (sizeof(x) / sizeof(*x))
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT
-static const CharRange kShiftJISRanges[] = {
- { 0x8140, 0x817E },
- { 0x8180, 0x81AC },
- { 0x81B8, 0x81BF },
- { 0x81C8, 0x81CE },
- { 0x81DA, 0x81E8 },
- { 0x81F0, 0x81F7 },
- { 0x81FC, 0x81FC },
- { 0x824F, 0x8258 },
- { 0x8260, 0x8279 },
- { 0x8281, 0x829A },
- { 0x829F, 0x82F1 },
- { 0x8340, 0x837E },
- { 0x8380, 0x8396 },
- { 0x839F, 0x83B6 },
- { 0x83BF, 0x83D6 },
- { 0x8440, 0x8460 },
- { 0x8470, 0x847E },
- { 0x8480, 0x8491 },
- { 0x849F, 0x84BE },
- { 0x8740, 0x875D },
- { 0x875F, 0x8775 },
- { 0x877E, 0x877E },
- { 0x8780, 0x879C },
- { 0x889F, 0x88FC },
- { 0x8940, 0x897E },
- { 0x8980, 0x89FC },
- { 0x8A40, 0x8A7E },
- { 0x8A80, 0x8AFC },
- { 0x8B40, 0x8B7E },
- { 0x8B80, 0x8BFC },
- { 0x8C40, 0x8C7E },
- { 0x8C80, 0x8CFC },
- { 0x8D40, 0x8D7E },
- { 0x8D80, 0x8DFC },
- { 0x8E40, 0x8E7E },
- { 0x8E80, 0x8EFC },
- { 0x8F40, 0x8F7E },
- { 0x8F80, 0x8FFC },
- { 0x9040, 0x907E },
- { 0x9080, 0x90FC },
- { 0x9140, 0x917E },
- { 0x9180, 0x91FC },
- { 0x9240, 0x927E },
- { 0x9280, 0x92FC },
- { 0x9340, 0x937E },
- { 0x9380, 0x93FC },
- { 0x9440, 0x947E },
- { 0x9480, 0x94FC },
- { 0x9540, 0x957E },
- { 0x9580, 0x95FC },
- { 0x9640, 0x967E },
- { 0x9680, 0x96FC },
- { 0x9740, 0x977E },
- { 0x9780, 0x97FC },
- { 0x9840, 0x9872 },
- { 0x989F, 0x98FC },
- { 0x9940, 0x997E },
- { 0x9980, 0x99FC },
- { 0x9A40, 0x9A7E },
- { 0x9A80, 0x9AFC },
- { 0x9B40, 0x9B7E },
- { 0x9B80, 0x9BFC },
- { 0x9C40, 0x9C7E },
- { 0x9C80, 0x9CFC },
- { 0x9D40, 0x9D7E },
- { 0x9D80, 0x9DFC },
- { 0x9E40, 0x9E7E },
- { 0x9E80, 0x9EFC },
- { 0x9F40, 0x9F7E },
- { 0x9F80, 0x9FFC },
- { 0xE040, 0xE07E },
- { 0xE080, 0xE0FC },
- { 0xE140, 0xE17E },
- { 0xE180, 0xE1FC },
- { 0xE240, 0xE27E },
- { 0xE280, 0xE2FC },
- { 0xE340, 0xE37E },
- { 0xE380, 0xE3FC },
- { 0xE440, 0xE47E },
- { 0xE480, 0xE4FC },
- { 0xE540, 0xE57E },
- { 0xE580, 0xE5FC },
- { 0xE640, 0xE67E },
- { 0xE680, 0xE6FC },
- { 0xE740, 0xE77E },
- { 0xE780, 0xE7FC },
- { 0xE840, 0xE87E },
- { 0xE880, 0xE8FC },
- { 0xE940, 0xE97E },
- { 0xE980, 0xE9FC },
- { 0xEA40, 0xEA7E },
- { 0xEA80, 0xEAA4 },
- { 0xED40, 0xED7E },
- { 0xED80, 0xEDFC },
- { 0xEE40, 0xEE7E },
- { 0xEE80, 0xEEEC },
- { 0xEEEF, 0xEEFC },
- { 0xFA40, 0xFA7E },
- { 0xFA80, 0xFAFC },
- { 0xFB40, 0xFB7E },
- { 0xFB80, 0xFBFC },
- { 0xFC40, 0xFC4B },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP936.TXT
-static const CharRange kGBKRanges[] = {
- { 0x8140, 0x817E },
- { 0x8180, 0x81FE },
- { 0x8240, 0x827E },
- { 0x8280, 0x82FE },
- { 0x8340, 0x837E },
- { 0x8380, 0x83FE },
- { 0x8440, 0x847E },
- { 0x8480, 0x84FE },
- { 0x8540, 0x857E },
- { 0x8580, 0x85FE },
- { 0x8640, 0x867E },
- { 0x8680, 0x86FE },
- { 0x8740, 0x877E },
- { 0x8780, 0x87FE },
- { 0x8840, 0x887E },
- { 0x8880, 0x88FE },
- { 0x8940, 0x897E },
- { 0x8980, 0x89FE },
- { 0x8A40, 0x8A7E },
- { 0x8A80, 0x8AFE },
- { 0x8B40, 0x8B7E },
- { 0x8B80, 0x8BFE },
- { 0x8C40, 0x8C7E },
- { 0x8C80, 0x8CFE },
- { 0x8D40, 0x8D7E },
- { 0x8D80, 0x8DFE },
- { 0x8E40, 0x8E7E },
- { 0x8E80, 0x8EFE },
- { 0x8F40, 0x8F7E },
- { 0x8F80, 0x8FFE },
- { 0x9040, 0x907E },
- { 0x9080, 0x90FE },
- { 0x9140, 0x917E },
- { 0x9180, 0x91FE },
- { 0x9240, 0x927E },
- { 0x9280, 0x92FE },
- { 0x9340, 0x937E },
- { 0x9380, 0x93FE },
- { 0x9440, 0x947E },
- { 0x9480, 0x94FE },
- { 0x9540, 0x957E },
- { 0x9580, 0x95FE },
- { 0x9640, 0x967E },
- { 0x9680, 0x96FE },
- { 0x9740, 0x977E },
- { 0x9780, 0x97FE },
- { 0x9840, 0x987E },
- { 0x9880, 0x98FE },
- { 0x9940, 0x997E },
- { 0x9980, 0x99FE },
- { 0x9A40, 0x9A7E },
- { 0x9A80, 0x9AFE },
- { 0x9B40, 0x9B7E },
- { 0x9B80, 0x9BFE },
- { 0x9C40, 0x9C7E },
- { 0x9C80, 0x9CFE },
- { 0x9D40, 0x9D7E },
- { 0x9D80, 0x9DFE },
- { 0x9E40, 0x9E7E },
- { 0x9E80, 0x9EFE },
- { 0x9F40, 0x9F7E },
- { 0x9F80, 0x9FFE },
- { 0xA040, 0xA07E },
- { 0xA080, 0xA0FE },
- { 0xA1A1, 0xA1FE },
- { 0xA2A1, 0xA2AA },
- { 0xA2B1, 0xA2E2 },
- { 0xA2E5, 0xA2EE },
- { 0xA2F1, 0xA2FC },
- { 0xA3A1, 0xA3FE },
- { 0xA4A1, 0xA4F3 },
- { 0xA5A1, 0xA5F6 },
- { 0xA6A1, 0xA6B8 },
- { 0xA6C1, 0xA6D8 },
- { 0xA6E0, 0xA6EB },
- { 0xA6EE, 0xA6F2 },
- { 0xA6F4, 0xA6F5 },
- { 0xA7A1, 0xA7C1 },
- { 0xA7D1, 0xA7F1 },
- { 0xA840, 0xA87E },
- { 0xA880, 0xA895 },
- { 0xA8A1, 0xA8BB },
- { 0xA8BD, 0xA8BE },
- { 0xA8C0, 0xA8C0 },
- { 0xA8C5, 0xA8E9 },
- { 0xA940, 0xA957 },
- { 0xA959, 0xA95A },
- { 0xA95C, 0xA95C },
- { 0xA960, 0xA97E },
- { 0xA980, 0xA988 },
- { 0xA996, 0xA996 },
- { 0xA9A4, 0xA9EF },
- { 0xAA40, 0xAA7E },
- { 0xAA80, 0xAAA0 },
- { 0xAB40, 0xAB7E },
- { 0xAB80, 0xABA0 },
- { 0xAC40, 0xAC7E },
- { 0xAC80, 0xACA0 },
- { 0xAD40, 0xAD7E },
- { 0xAD80, 0xADA0 },
- { 0xAE40, 0xAE7E },
- { 0xAE80, 0xAEA0 },
- { 0xAF40, 0xAF7E },
- { 0xAF80, 0xAFA0 },
- { 0xB040, 0xB07E },
- { 0xB080, 0xB0FE },
- { 0xB140, 0xB17E },
- { 0xB180, 0xB1FE },
- { 0xB240, 0xB27E },
- { 0xB280, 0xB2FE },
- { 0xB340, 0xB37E },
- { 0xB380, 0xB3FE },
- { 0xB440, 0xB47E },
- { 0xB480, 0xB4FE },
- { 0xB540, 0xB57E },
- { 0xB580, 0xB5FE },
- { 0xB640, 0xB67E },
- { 0xB680, 0xB6FE },
- { 0xB740, 0xB77E },
- { 0xB780, 0xB7FE },
- { 0xB840, 0xB87E },
- { 0xB880, 0xB8FE },
- { 0xB940, 0xB97E },
- { 0xB980, 0xB9FE },
- { 0xBA40, 0xBA7E },
- { 0xBA80, 0xBAFE },
- { 0xBB40, 0xBB7E },
- { 0xBB80, 0xBBFE },
- { 0xBC40, 0xBC7E },
- { 0xBC80, 0xBCFE },
- { 0xBD40, 0xBD7E },
- { 0xBD80, 0xBDFE },
- { 0xBE40, 0xBE7E },
- { 0xBE80, 0xBEFE },
- { 0xBF40, 0xBF7E },
- { 0xBF80, 0xBFFE },
- { 0xC040, 0xC07E },
- { 0xC080, 0xC0FE },
- { 0xC140, 0xC17E },
- { 0xC180, 0xC1FE },
- { 0xC240, 0xC27E },
- { 0xC280, 0xC2FE },
- { 0xC340, 0xC37E },
- { 0xC380, 0xC3FE },
- { 0xC440, 0xC47E },
- { 0xC480, 0xC4FE },
- { 0xC540, 0xC57E },
- { 0xC580, 0xC5FE },
- { 0xC640, 0xC67E },
- { 0xC680, 0xC6FE },
- { 0xC740, 0xC77E },
- { 0xC780, 0xC7FE },
- { 0xC840, 0xC87E },
- { 0xC880, 0xC8FE },
- { 0xC940, 0xC97E },
- { 0xC980, 0xC9FE },
- { 0xCA40, 0xCA7E },
- { 0xCA80, 0xCAFE },
- { 0xCB40, 0xCB7E },
- { 0xCB80, 0xCBFE },
- { 0xCC40, 0xCC7E },
- { 0xCC80, 0xCCFE },
- { 0xCD40, 0xCD7E },
- { 0xCD80, 0xCDFE },
- { 0xCE40, 0xCE7E },
- { 0xCE80, 0xCEFE },
- { 0xCF40, 0xCF7E },
- { 0xCF80, 0xCFFE },
- { 0xD040, 0xD07E },
- { 0xD080, 0xD0FE },
- { 0xD140, 0xD17E },
- { 0xD180, 0xD1FE },
- { 0xD240, 0xD27E },
- { 0xD280, 0xD2FE },
- { 0xD340, 0xD37E },
- { 0xD380, 0xD3FE },
- { 0xD440, 0xD47E },
- { 0xD480, 0xD4FE },
- { 0xD540, 0xD57E },
- { 0xD580, 0xD5FE },
- { 0xD640, 0xD67E },
- { 0xD680, 0xD6FE },
- { 0xD740, 0xD77E },
- { 0xD780, 0xD7F9 },
- { 0xD840, 0xD87E },
- { 0xD880, 0xD8FE },
- { 0xD940, 0xD97E },
- { 0xD980, 0xD9FE },
- { 0xDA40, 0xDA7E },
- { 0xDA80, 0xDAFE },
- { 0xDB40, 0xDB7E },
- { 0xDB80, 0xDBFE },
- { 0xDC40, 0xDC7E },
- { 0xDC80, 0xDCFE },
- { 0xDD40, 0xDD7E },
- { 0xDD80, 0xDDFE },
- { 0xDE40, 0xDE7E },
- { 0xDE80, 0xDEFE },
- { 0xDF40, 0xDF7E },
- { 0xDF80, 0xDFFE },
- { 0xE040, 0xE07E },
- { 0xE080, 0xE0FE },
- { 0xE140, 0xE17E },
- { 0xE180, 0xE1FE },
- { 0xE240, 0xE27E },
- { 0xE280, 0xE2FE },
- { 0xE340, 0xE37E },
- { 0xE380, 0xE3FE },
- { 0xE440, 0xE47E },
- { 0xE480, 0xE4FE },
- { 0xE540, 0xE57E },
- { 0xE580, 0xE5FE },
- { 0xE640, 0xE67E },
- { 0xE680, 0xE6FE },
- { 0xE740, 0xE77E },
- { 0xE780, 0xE7FE },
- { 0xE840, 0xE87E },
- { 0xE880, 0xE8FE },
- { 0xE940, 0xE97E },
- { 0xE980, 0xE9FE },
- { 0xEA40, 0xEA7E },
- { 0xEA80, 0xEAFE },
- { 0xEB40, 0xEB7E },
- { 0xEB80, 0xEBFE },
- { 0xEC40, 0xEC7E },
- { 0xEC80, 0xECFE },
- { 0xED40, 0xED7E },
- { 0xED80, 0xEDFE },
- { 0xEE40, 0xEE7E },
- { 0xEE80, 0xEEFE },
- { 0xEF40, 0xEF7E },
- { 0xEF80, 0xEFFE },
- { 0xF040, 0xF07E },
- { 0xF080, 0xF0FE },
- { 0xF140, 0xF17E },
- { 0xF180, 0xF1FE },
- { 0xF240, 0xF27E },
- { 0xF280, 0xF2FE },
- { 0xF340, 0xF37E },
- { 0xF380, 0xF3FE },
- { 0xF440, 0xF47E },
- { 0xF480, 0xF4FE },
- { 0xF540, 0xF57E },
- { 0xF580, 0xF5FE },
- { 0xF640, 0xF67E },
- { 0xF680, 0xF6FE },
- { 0xF740, 0xF77E },
- { 0xF780, 0xF7FE },
- { 0xF840, 0xF87E },
- { 0xF880, 0xF8A0 },
- { 0xF940, 0xF97E },
- { 0xF980, 0xF9A0 },
- { 0xFA40, 0xFA7E },
- { 0xFA80, 0xFAA0 },
- { 0xFB40, 0xFB7E },
- { 0xFB80, 0xFBA0 },
- { 0xFC40, 0xFC7E },
- { 0xFC80, 0xFCA0 },
- { 0xFD40, 0xFD7E },
- { 0xFD80, 0xFDA0 },
- { 0xFE40, 0xFE4F },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP949.TXT
-static const CharRange kEUCKRRanges[] = {
- { 0x8141, 0x815A },
- { 0x8161, 0x817A },
- { 0x8181, 0x81FE },
- { 0x8241, 0x825A },
- { 0x8261, 0x827A },
- { 0x8281, 0x82FE },
- { 0x8341, 0x835A },
- { 0x8361, 0x837A },
- { 0x8381, 0x83FE },
- { 0x8441, 0x845A },
- { 0x8461, 0x847A },
- { 0x8481, 0x84FE },
- { 0x8541, 0x855A },
- { 0x8561, 0x857A },
- { 0x8581, 0x85FE },
- { 0x8641, 0x865A },
- { 0x8661, 0x867A },
- { 0x8681, 0x86FE },
- { 0x8741, 0x875A },
- { 0x8761, 0x877A },
- { 0x8781, 0x87FE },
- { 0x8841, 0x885A },
- { 0x8861, 0x887A },
- { 0x8881, 0x88FE },
- { 0x8941, 0x895A },
- { 0x8961, 0x897A },
- { 0x8981, 0x89FE },
- { 0x8A41, 0x8A5A },
- { 0x8A61, 0x8A7A },
- { 0x8A81, 0x8AFE },
- { 0x8B41, 0x8B5A },
- { 0x8B61, 0x8B7A },
- { 0x8B81, 0x8BFE },
- { 0x8C41, 0x8C5A },
- { 0x8C61, 0x8C7A },
- { 0x8C81, 0x8CFE },
- { 0x8D41, 0x8D5A },
- { 0x8D61, 0x8D7A },
- { 0x8D81, 0x8DFE },
- { 0x8E41, 0x8E5A },
- { 0x8E61, 0x8E7A },
- { 0x8E81, 0x8EFE },
- { 0x8F41, 0x8F5A },
- { 0x8F61, 0x8F7A },
- { 0x8F81, 0x8FFE },
- { 0x9041, 0x905A },
- { 0x9061, 0x907A },
- { 0x9081, 0x90FE },
- { 0x9141, 0x915A },
- { 0x9161, 0x917A },
- { 0x9181, 0x91FE },
- { 0x9241, 0x925A },
- { 0x9261, 0x927A },
- { 0x9281, 0x92FE },
- { 0x9341, 0x935A },
- { 0x9361, 0x937A },
- { 0x9381, 0x93FE },
- { 0x9441, 0x945A },
- { 0x9461, 0x947A },
- { 0x9481, 0x94FE },
- { 0x9541, 0x955A },
- { 0x9561, 0x957A },
- { 0x9581, 0x95FE },
- { 0x9641, 0x965A },
- { 0x9661, 0x967A },
- { 0x9681, 0x96FE },
- { 0x9741, 0x975A },
- { 0x9761, 0x977A },
- { 0x9781, 0x97FE },
- { 0x9841, 0x985A },
- { 0x9861, 0x987A },
- { 0x9881, 0x98FE },
- { 0x9941, 0x995A },
- { 0x9961, 0x997A },
- { 0x9981, 0x99FE },
- { 0x9A41, 0x9A5A },
- { 0x9A61, 0x9A7A },
- { 0x9A81, 0x9AFE },
- { 0x9B41, 0x9B5A },
- { 0x9B61, 0x9B7A },
- { 0x9B81, 0x9BFE },
- { 0x9C41, 0x9C5A },
- { 0x9C61, 0x9C7A },
- { 0x9C81, 0x9CFE },
- { 0x9D41, 0x9D5A },
- { 0x9D61, 0x9D7A },
- { 0x9D81, 0x9DFE },
- { 0x9E41, 0x9E5A },
- { 0x9E61, 0x9E7A },
- { 0x9E81, 0x9EFE },
- { 0x9F41, 0x9F5A },
- { 0x9F61, 0x9F7A },
- { 0x9F81, 0x9FFE },
- { 0xA041, 0xA05A },
- { 0xA061, 0xA07A },
- { 0xA081, 0xA0FE },
- { 0xA141, 0xA15A },
- { 0xA161, 0xA17A },
- { 0xA181, 0xA1FE },
- { 0xA241, 0xA25A },
- { 0xA261, 0xA27A },
- { 0xA281, 0xA2E7 },
- { 0xA341, 0xA35A },
- { 0xA361, 0xA37A },
- { 0xA381, 0xA3FE },
- { 0xA441, 0xA45A },
- { 0xA461, 0xA47A },
- { 0xA481, 0xA4FE },
- { 0xA541, 0xA55A },
- { 0xA561, 0xA57A },
- { 0xA581, 0xA5AA },
- { 0xA5B0, 0xA5B9 },
- { 0xA5C1, 0xA5D8 },
- { 0xA5E1, 0xA5F8 },
- { 0xA641, 0xA65A },
- { 0xA661, 0xA67A },
- { 0xA681, 0xA6E4 },
- { 0xA741, 0xA75A },
- { 0xA761, 0xA77A },
- { 0xA781, 0xA7EF },
- { 0xA841, 0xA85A },
- { 0xA861, 0xA87A },
- { 0xA881, 0xA8A4 },
- { 0xA8A6, 0xA8A6 },
- { 0xA8A8, 0xA8AF },
- { 0xA8B1, 0xA8FE },
- { 0xA941, 0xA95A },
- { 0xA961, 0xA97A },
- { 0xA981, 0xA9FE },
- { 0xAA41, 0xAA5A },
- { 0xAA61, 0xAA7A },
- { 0xAA81, 0xAAF3 },
- { 0xAB41, 0xAB5A },
- { 0xAB61, 0xAB7A },
- { 0xAB81, 0xABF6 },
- { 0xAC41, 0xAC5A },
- { 0xAC61, 0xAC7A },
- { 0xAC81, 0xACC1 },
- { 0xACD1, 0xACF1 },
- { 0xAD41, 0xAD5A },
- { 0xAD61, 0xAD7A },
- { 0xAD81, 0xADA0 },
- { 0xAE41, 0xAE5A },
- { 0xAE61, 0xAE7A },
- { 0xAE81, 0xAEA0 },
- { 0xAF41, 0xAF5A },
- { 0xAF61, 0xAF7A },
- { 0xAF81, 0xAFA0 },
- { 0xB041, 0xB05A },
- { 0xB061, 0xB07A },
- { 0xB081, 0xB0FE },
- { 0xB141, 0xB15A },
- { 0xB161, 0xB17A },
- { 0xB181, 0xB1FE },
- { 0xB241, 0xB25A },
- { 0xB261, 0xB27A },
- { 0xB281, 0xB2FE },
- { 0xB341, 0xB35A },
- { 0xB361, 0xB37A },
- { 0xB381, 0xB3FE },
- { 0xB441, 0xB45A },
- { 0xB461, 0xB47A },
- { 0xB481, 0xB4FE },
- { 0xB541, 0xB55A },
- { 0xB561, 0xB57A },
- { 0xB581, 0xB5FE },
- { 0xB641, 0xB65A },
- { 0xB661, 0xB67A },
- { 0xB681, 0xB6FE },
- { 0xB741, 0xB75A },
- { 0xB761, 0xB77A },
- { 0xB781, 0xB7FE },
- { 0xB841, 0xB85A },
- { 0xB861, 0xB87A },
- { 0xB881, 0xB8FE },
- { 0xB941, 0xB95A },
- { 0xB961, 0xB97A },
- { 0xB981, 0xB9FE },
- { 0xBA41, 0xBA5A },
- { 0xBA61, 0xBA7A },
- { 0xBA81, 0xBAFE },
- { 0xBB41, 0xBB5A },
- { 0xBB61, 0xBB7A },
- { 0xBB81, 0xBBFE },
- { 0xBC41, 0xBC5A },
- { 0xBC61, 0xBC7A },
- { 0xBC81, 0xBCFE },
- { 0xBD41, 0xBD5A },
- { 0xBD61, 0xBD7A },
- { 0xBD81, 0xBDFE },
- { 0xBE41, 0xBE5A },
- { 0xBE61, 0xBE7A },
- { 0xBE81, 0xBEFE },
- { 0xBF41, 0xBF5A },
- { 0xBF61, 0xBF7A },
- { 0xBF81, 0xBFFE },
- { 0xC041, 0xC05A },
- { 0xC061, 0xC07A },
- { 0xC081, 0xC0FE },
- { 0xC141, 0xC15A },
- { 0xC161, 0xC17A },
- { 0xC181, 0xC1FE },
- { 0xC241, 0xC25A },
- { 0xC261, 0xC27A },
- { 0xC281, 0xC2FE },
- { 0xC341, 0xC35A },
- { 0xC361, 0xC37A },
- { 0xC381, 0xC3FE },
- { 0xC441, 0xC45A },
- { 0xC461, 0xC47A },
- { 0xC481, 0xC4FE },
- { 0xC541, 0xC55A },
- { 0xC561, 0xC57A },
- { 0xC581, 0xC5FE },
- { 0xC641, 0xC652 },
- { 0xC6A1, 0xC6FE },
- { 0xC7A1, 0xC7FE },
- { 0xC8A1, 0xC8FE },
- { 0xCAA1, 0xCAFE },
- { 0xCBA1, 0xCBFE },
- { 0xCCA1, 0xCCFE },
- { 0xCDA1, 0xCDFE },
- { 0xCEA1, 0xCEFE },
- { 0xCFA1, 0xCFFE },
- { 0xD0A1, 0xD0FE },
- { 0xD1A1, 0xD1FE },
- { 0xD2A1, 0xD2FE },
- { 0xD3A1, 0xD3FE },
- { 0xD4A1, 0xD4FE },
- { 0xD5A1, 0xD5FE },
- { 0xD6A1, 0xD6FE },
- { 0xD7A1, 0xD7FE },
- { 0xD8A1, 0xD8FE },
- { 0xD9A1, 0xD9FE },
- { 0xDAA1, 0xDAFE },
- { 0xDBA1, 0xDBFE },
- { 0xDCA1, 0xDCFE },
- { 0xDDA1, 0xDDFE },
- { 0xDEA1, 0xDEFE },
- { 0xDFA1, 0xDFFE },
- { 0xE0A1, 0xE0FE },
- { 0xE1A1, 0xE1FE },
- { 0xE2A1, 0xE2FE },
- { 0xE3A1, 0xE3FE },
- { 0xE4A1, 0xE4FE },
- { 0xE5A1, 0xE5FE },
- { 0xE6A1, 0xE6FE },
- { 0xE7A1, 0xE7FE },
- { 0xE8A1, 0xE8FE },
- { 0xE9A1, 0xE9FE },
- { 0xEAA1, 0xEAFE },
- { 0xEBA1, 0xEBFE },
- { 0xECA1, 0xECFE },
- { 0xEDA1, 0xEDFE },
- { 0xEEA1, 0xEEFE },
- { 0xEFA1, 0xEFFE },
- { 0xF0A1, 0xF0FE },
- { 0xF1A1, 0xF1FE },
- { 0xF2A1, 0xF2FE },
- { 0xF3A1, 0xF3FE },
- { 0xF4A1, 0xF4FE },
- { 0xF5A1, 0xF5FE },
- { 0xF6A1, 0xF6FE },
- { 0xF7A1, 0xF7FE },
- { 0xF8A1, 0xF8FE },
- { 0xF9A1, 0xF9FE },
- { 0xFAA1, 0xFAFE },
- { 0xFBA1, 0xFBFE },
- { 0xFCA1, 0xFCFE },
- { 0xFDA1, 0xFDFE },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
-static const CharRange kBig5Ranges[] = {
- { 0xA140, 0xA17E },
- { 0xA1A1, 0xA1FE },
- { 0xA240, 0xA27E },
- { 0xA2A1, 0xA2FE },
- { 0xA340, 0xA37E },
- { 0xA3A1, 0xA3BF },
- { 0xA3E1, 0xA3E1 },
- { 0xA440, 0xA47E },
- { 0xA4A1, 0xA4FE },
- { 0xA540, 0xA57E },
- { 0xA5A1, 0xA5FE },
- { 0xA640, 0xA67E },
- { 0xA6A1, 0xA6FE },
- { 0xA740, 0xA77E },
- { 0xA7A1, 0xA7FE },
- { 0xA840, 0xA87E },
- { 0xA8A1, 0xA8FE },
- { 0xA940, 0xA97E },
- { 0xA9A1, 0xA9FE },
- { 0xAA40, 0xAA7E },
- { 0xAAA1, 0xAAFE },
- { 0xAB40, 0xAB7E },
- { 0xABA1, 0xABFE },
- { 0xAC40, 0xAC7E },
- { 0xACA1, 0xACFE },
- { 0xAD40, 0xAD7E },
- { 0xADA1, 0xADFE },
- { 0xAE40, 0xAE7E },
- { 0xAEA1, 0xAEFE },
- { 0xAF40, 0xAF7E },
- { 0xAFA1, 0xAFFE },
- { 0xB040, 0xB07E },
- { 0xB0A1, 0xB0FE },
- { 0xB140, 0xB17E },
- { 0xB1A1, 0xB1FE },
- { 0xB240, 0xB27E },
- { 0xB2A1, 0xB2FE },
- { 0xB340, 0xB37E },
- { 0xB3A1, 0xB3FE },
- { 0xB440, 0xB47E },
- { 0xB4A1, 0xB4FE },
- { 0xB540, 0xB57E },
- { 0xB5A1, 0xB5FE },
- { 0xB640, 0xB67E },
- { 0xB6A1, 0xB6FE },
- { 0xB740, 0xB77E },
- { 0xB7A1, 0xB7FE },
- { 0xB840, 0xB87E },
- { 0xB8A1, 0xB8FE },
- { 0xB940, 0xB97E },
- { 0xB9A1, 0xB9FE },
- { 0xBA40, 0xBA7E },
- { 0xBAA1, 0xBAFE },
- { 0xBB40, 0xBB7E },
- { 0xBBA1, 0xBBFE },
- { 0xBC40, 0xBC7E },
- { 0xBCA1, 0xBCFE },
- { 0xBD40, 0xBD7E },
- { 0xBDA1, 0xBDFE },
- { 0xBE40, 0xBE7E },
- { 0xBEA1, 0xBEFE },
- { 0xBF40, 0xBF7E },
- { 0xBFA1, 0xBFFE },
- { 0xC040, 0xC07E },
- { 0xC0A1, 0xC0FE },
- { 0xC140, 0xC17E },
- { 0xC1A1, 0xC1FE },
- { 0xC240, 0xC27E },
- { 0xC2A1, 0xC2FE },
- { 0xC340, 0xC37E },
- { 0xC3A1, 0xC3FE },
- { 0xC440, 0xC47E },
- { 0xC4A1, 0xC4FE },
- { 0xC540, 0xC57E },
- { 0xC5A1, 0xC5FE },
- { 0xC640, 0xC67E },
- { 0xC940, 0xC97E },
- { 0xC9A1, 0xC9FE },
- { 0xCA40, 0xCA7E },
- { 0xCAA1, 0xCAFE },
- { 0xCB40, 0xCB7E },
- { 0xCBA1, 0xCBFE },
- { 0xCC40, 0xCC7E },
- { 0xCCA1, 0xCCFE },
- { 0xCD40, 0xCD7E },
- { 0xCDA1, 0xCDFE },
- { 0xCE40, 0xCE7E },
- { 0xCEA1, 0xCEFE },
- { 0xCF40, 0xCF7E },
- { 0xCFA1, 0xCFFE },
- { 0xD040, 0xD07E },
- { 0xD0A1, 0xD0FE },
- { 0xD140, 0xD17E },
- { 0xD1A1, 0xD1FE },
- { 0xD240, 0xD27E },
- { 0xD2A1, 0xD2FE },
- { 0xD340, 0xD37E },
- { 0xD3A1, 0xD3FE },
- { 0xD440, 0xD47E },
- { 0xD4A1, 0xD4FE },
- { 0xD540, 0xD57E },
- { 0xD5A1, 0xD5FE },
- { 0xD640, 0xD67E },
- { 0xD6A1, 0xD6FE },
- { 0xD740, 0xD77E },
- { 0xD7A1, 0xD7FE },
- { 0xD840, 0xD87E },
- { 0xD8A1, 0xD8FE },
- { 0xD940, 0xD97E },
- { 0xD9A1, 0xD9FE },
- { 0xDA40, 0xDA7E },
- { 0xDAA1, 0xDAFE },
- { 0xDB40, 0xDB7E },
- { 0xDBA1, 0xDBFE },
- { 0xDC40, 0xDC7E },
- { 0xDCA1, 0xDCFE },
- { 0xDD40, 0xDD7E },
- { 0xDDA1, 0xDDFE },
- { 0xDE40, 0xDE7E },
- { 0xDEA1, 0xDEFE },
- { 0xDF40, 0xDF7E },
- { 0xDFA1, 0xDFFE },
- { 0xE040, 0xE07E },
- { 0xE0A1, 0xE0FE },
- { 0xE140, 0xE17E },
- { 0xE1A1, 0xE1FE },
- { 0xE240, 0xE27E },
- { 0xE2A1, 0xE2FE },
- { 0xE340, 0xE37E },
- { 0xE3A1, 0xE3FE },
- { 0xE440, 0xE47E },
- { 0xE4A1, 0xE4FE },
- { 0xE540, 0xE57E },
- { 0xE5A1, 0xE5FE },
- { 0xE640, 0xE67E },
- { 0xE6A1, 0xE6FE },
- { 0xE740, 0xE77E },
- { 0xE7A1, 0xE7FE },
- { 0xE840, 0xE87E },
- { 0xE8A1, 0xE8FE },
- { 0xE940, 0xE97E },
- { 0xE9A1, 0xE9FE },
- { 0xEA40, 0xEA7E },
- { 0xEAA1, 0xEAFE },
- { 0xEB40, 0xEB7E },
- { 0xEBA1, 0xEBFE },
- { 0xEC40, 0xEC7E },
- { 0xECA1, 0xECFE },
- { 0xED40, 0xED7E },
- { 0xEDA1, 0xEDFE },
- { 0xEE40, 0xEE7E },
- { 0xEEA1, 0xEEFE },
- { 0xEF40, 0xEF7E },
- { 0xEFA1, 0xEFFE },
- { 0xF040, 0xF07E },
- { 0xF0A1, 0xF0FE },
- { 0xF140, 0xF17E },
- { 0xF1A1, 0xF1FE },
- { 0xF240, 0xF27E },
- { 0xF2A1, 0xF2FE },
- { 0xF340, 0xF37E },
- { 0xF3A1, 0xF3FE },
- { 0xF440, 0xF47E },
- { 0xF4A1, 0xF4FE },
- { 0xF540, 0xF57E },
- { 0xF5A1, 0xF5FE },
- { 0xF640, 0xF67E },
- { 0xF6A1, 0xF6FE },
- { 0xF740, 0xF77E },
- { 0xF7A1, 0xF7FE },
- { 0xF840, 0xF87E },
- { 0xF8A1, 0xF8FE },
- { 0xF940, 0xF97E },
- { 0xF9A1, 0xF9FE },
-};
-
-static bool charMatchesEncoding(int ch, const CharRange* encodingRanges, int rangeCount) {
- // Use binary search to see if the character is contained in the encoding
- int low = 0;
- int high = rangeCount;
-
- while (low < high) {
- int i = (low + high) / 2;
- const CharRange* range = &encodingRanges[i];
- if (ch >= range->first && ch <= range->last)
- return true;
- if (ch > range->last)
- low = i + 1;
- else
- high = i;
- }
-
- return false;
-}
-
-extern uint32_t findPossibleEncodings(int ch)
-{
- // ASCII matches everything
- if (ch < 256) return kEncodingAll;
-
- int result = kEncodingNone;
-
- if (charMatchesEncoding(ch, kShiftJISRanges, ARRAY_SIZE(kShiftJISRanges)))
- result |= kEncodingShiftJIS;
- if (charMatchesEncoding(ch, kGBKRanges, ARRAY_SIZE(kGBKRanges)))
- result |= kEncodingGBK;
- if (charMatchesEncoding(ch, kBig5Ranges, ARRAY_SIZE(kBig5Ranges)))
- result |= kEncodingBig5;
- if (charMatchesEncoding(ch, kEUCKRRanges, ARRAY_SIZE(kEUCKRRanges)))
- result |= kEncodingEUCKR;
-
- return result;
-}
diff --git a/media/libmedia/autodetect.h b/media/libmedia/autodetect.h
deleted file mode 100644
index 9675db3..0000000
--- a/media/libmedia/autodetect.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef AUTODETECT_H
-#define AUTODETECT_H
-
-#include <inttypes.h>
-
-// flags used for native encoding detection
-enum {
- kEncodingNone = 0,
- kEncodingShiftJIS = (1 << 0),
- kEncodingGBK = (1 << 1),
- kEncodingBig5 = (1 << 2),
- kEncodingEUCKR = (1 << 3),
-
- kEncodingAll = (kEncodingShiftJIS | kEncodingGBK | kEncodingBig5 | kEncodingEUCKR),
-};
-
-
-// returns a bitfield containing the possible native encodings for the given character
-extern uint32_t findPossibleEncodings(int ch);
-
-#endif // AUTODETECT_H
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 110b94c..8e8a1ed 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -18,9 +18,12 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaMetadataRetriever"
+#include <inttypes.h>
+
#include <binder/IServiceManager.h>
#include <binder/IPCThreadState.h>
#include <media/mediametadataretriever.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <utils/Log.h>
#include <dlfcn.h>
@@ -93,7 +96,9 @@ void MediaMetadataRetriever::disconnect()
}
status_t MediaMetadataRetriever::setDataSource(
- const char *srcUrl, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *srcUrl,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource");
Mutex::Autolock _l(mLock);
@@ -106,12 +111,12 @@ status_t MediaMetadataRetriever::setDataSource(
return UNKNOWN_ERROR;
}
ALOGV("data source (%s)", srcUrl);
- return mRetriever->setDataSource(srcUrl, headers);
+ return mRetriever->setDataSource(httpService, srcUrl, headers);
}
status_t MediaMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
{
- ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
Mutex::Autolock _l(mLock);
if (mRetriever == 0) {
ALOGE("retriever is not initialized");
@@ -126,7 +131,7 @@ status_t MediaMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t l
sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)
{
- ALOGV("getFrameAtTime: time(%lld us) option(%d)", timeUs, option);
+ ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d)", timeUs, option);
Mutex::Autolock _l(mLock);
if (mRetriever == 0) {
ALOGE("retriever is not initialized");
@@ -157,7 +162,7 @@ sp<IMemory> MediaMetadataRetriever::extractAlbumArt()
return mRetriever->extractAlbumArt();
}
-void MediaMetadataRetriever::DeathNotifier::binderDied(const wp<IBinder>& who) {
+void MediaMetadataRetriever::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
Mutex::Autolock lock(MediaMetadataRetriever::sServiceLock);
MediaMetadataRetriever::sService.clear();
ALOGW("MediaMetadataRetriever server died!");
@@ -167,7 +172,7 @@ MediaMetadataRetriever::DeathNotifier::~DeathNotifier()
{
Mutex::Autolock lock(sServiceLock);
if (sService != 0) {
- sService->asBinder()->unlinkToDeath(this);
+ IInterface::asBinder(sService)->unlinkToDeath(this);
}
}
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 0f6d897..05c89ed 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -17,12 +17,14 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPlayer"
-#include <utils/Log.h>
-#include <sys/types.h>
+#include <fcntl.h>
+#include <inttypes.h>
#include <sys/stat.h>
+#include <sys/types.h>
#include <unistd.h>
-#include <fcntl.h>
+
+#include <utils/Log.h>
#include <binder/IServiceManager.h>
#include <binder/IPCThreadState.h>
@@ -48,6 +50,7 @@ MediaPlayer::MediaPlayer()
mListener = NULL;
mCookie = NULL;
mStreamType = AUDIO_STREAM_MUSIC;
+ mAudioAttributesParcel = NULL;
mCurrentPosition = -1;
mSeekPosition = -1;
mCurrentState = MEDIA_PLAYER_IDLE;
@@ -57,8 +60,8 @@ MediaPlayer::MediaPlayer()
mLeftVolume = mRightVolume = 1.0;
mVideoWidth = mVideoHeight = 0;
mLockThreadId = 0;
- mAudioSessionId = AudioSystem::newAudioSessionId();
- AudioSystem::acquireAudioSessionId(mAudioSessionId);
+ mAudioSessionId = AudioSystem::newAudioUniqueId();
+ AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
mSendLevel = 0;
mRetransmitEndpointValid = false;
}
@@ -66,7 +69,11 @@ MediaPlayer::MediaPlayer()
MediaPlayer::~MediaPlayer()
{
ALOGV("destructor");
- AudioSystem::releaseAudioSessionId(mAudioSessionId);
+ if (mAudioAttributesParcel != NULL) {
+ delete mAudioAttributesParcel;
+ mAudioAttributesParcel = NULL;
+ }
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
disconnect();
IPCThreadState::self()->flushCommands();
}
@@ -136,6 +143,7 @@ status_t MediaPlayer::attachNewPlayer(const sp<IMediaPlayer>& player)
}
status_t MediaPlayer::setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url, const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
@@ -145,7 +153,7 @@ status_t MediaPlayer::setDataSource(
if (service != 0) {
sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
- (NO_ERROR != player->setDataSource(url, headers))) {
+ (NO_ERROR != player->setDataSource(httpService, url, headers))) {
player.clear();
}
err = attachNewPlayer(player);
@@ -156,7 +164,7 @@ status_t MediaPlayer::setDataSource(
status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length)
{
- ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
status_t err = UNKNOWN_ERROR;
const sp<IMediaPlayerService>& service(getMediaPlayerService());
if (service != 0) {
@@ -193,7 +201,7 @@ status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
(mCurrentState != MEDIA_PLAYER_STATE_ERROR) &&
((mCurrentState & MEDIA_PLAYER_IDLE) != MEDIA_PLAYER_IDLE);
if ((mPlayer != NULL) && hasBeenInitialized) {
- ALOGV("invoke %d", request.dataSize());
+ ALOGV("invoke %zu", request.dataSize());
return mPlayer->invoke(request, reply);
}
ALOGE("invoke failed: wrong state %X", mCurrentState);
@@ -234,6 +242,9 @@ status_t MediaPlayer::prepareAsync_l()
{
if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {
mPlayer->setAudioStreamType(mStreamType);
+ if (mAudioAttributesParcel != NULL) {
+ mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
+ }
mCurrentState = MEDIA_PLAYER_PREPARING;
return mPlayer->prepareAsync();
}
@@ -280,16 +291,21 @@ status_t MediaPlayer::prepareAsync()
status_t MediaPlayer::start()
{
ALOGV("start");
+
+ status_t ret = NO_ERROR;
Mutex::Autolock _l(mLock);
- if (mCurrentState & MEDIA_PLAYER_STARTED)
- return NO_ERROR;
- if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_PREPARED |
+
+ mLockThreadId = getThreadId();
+
+ if (mCurrentState & MEDIA_PLAYER_STARTED) {
+ ret = NO_ERROR;
+ } else if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_PREPARED |
MEDIA_PLAYER_PLAYBACK_COMPLETE | MEDIA_PLAYER_PAUSED ) ) ) {
mPlayer->setLooping(mLoop);
mPlayer->setVolume(mLeftVolume, mRightVolume);
mPlayer->setAuxEffectSendLevel(mSendLevel);
mCurrentState = MEDIA_PLAYER_STARTED;
- status_t ret = mPlayer->start();
+ ret = mPlayer->start();
if (ret != NO_ERROR) {
mCurrentState = MEDIA_PLAYER_STATE_ERROR;
} else {
@@ -297,10 +313,14 @@ status_t MediaPlayer::start()
ALOGV("playback completed immediately following start()");
}
}
- return ret;
+ } else {
+ ALOGE("start called in state %d", mCurrentState);
+ ret = INVALID_OPERATION;
}
- ALOGE("start called in state %d", mCurrentState);
- return INVALID_OPERATION;
+
+ mLockThreadId = 0;
+
+ return ret;
}
status_t MediaPlayer::stop()
@@ -530,6 +550,14 @@ status_t MediaPlayer::setAudioStreamType(audio_stream_type_t type)
return OK;
}
+status_t MediaPlayer::getAudioStreamType(audio_stream_type_t *type)
+{
+ ALOGV("getAudioStreamType");
+ Mutex::Autolock _l(mLock);
+ *type = mStreamType;
+ return OK;
+}
+
status_t MediaPlayer::setLooping(int loop)
{
ALOGV("MediaPlayer::setLooping");
@@ -575,8 +603,8 @@ status_t MediaPlayer::setAudioSessionId(int sessionId)
return BAD_VALUE;
}
if (sessionId != mAudioSessionId) {
- AudioSystem::acquireAudioSessionId(sessionId);
- AudioSystem::releaseAudioSessionId(mAudioSessionId);
+ AudioSystem::acquireAudioSessionId(sessionId, -1);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
mAudioSessionId = sessionId;
}
return NO_ERROR;
@@ -613,15 +641,46 @@ status_t MediaPlayer::attachAuxEffect(int effectId)
return mPlayer->attachAuxEffect(effectId);
}
+// always call with lock held
+status_t MediaPlayer::checkStateForKeySet_l(int key)
+{
+ switch(key) {
+ case KEY_PARAMETER_AUDIO_ATTRIBUTES:
+ if (mCurrentState & ( MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED |
+ MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE) ) {
+ // Can't change the audio attributes after prepare
+ ALOGE("trying to set audio attributes called in state %d", mCurrentState);
+ return INVALID_OPERATION;
+ }
+ break;
+ default:
+ // parameter doesn't require player state check
+ break;
+ }
+ return OK;
+}
+
status_t MediaPlayer::setParameter(int key, const Parcel& request)
{
ALOGV("MediaPlayer::setParameter(%d)", key);
Mutex::Autolock _l(mLock);
+ if (checkStateForKeySet_l(key) != OK) {
+ return INVALID_OPERATION;
+ }
if (mPlayer != NULL) {
return mPlayer->setParameter(key, request);
}
- ALOGV("setParameter: no active player");
- return INVALID_OPERATION;
+ switch (key) {
+ case KEY_PARAMETER_AUDIO_ATTRIBUTES:
+ // no player, save the marshalled audio attributes
+ if (mAudioAttributesParcel != NULL) { delete mAudioAttributesParcel; };
+ mAudioAttributesParcel = new Parcel();
+ mAudioAttributesParcel->appendFrom(&request, 0, request.dataSize());
+ return OK;
+ default:
+ ALOGV("setParameter: no active player");
+ return INVALID_OPERATION;
+ }
}
status_t MediaPlayer::getParameter(int key, Parcel *reply)
@@ -654,7 +713,7 @@ status_t MediaPlayer::setRetransmitEndpoint(const char* addrString,
return BAD_VALUE;
}
- memset(&mRetransmitEndpoint, 0, sizeof(&mRetransmitEndpoint));
+ memset(&mRetransmitEndpoint, 0, sizeof(mRetransmitEndpoint));
mRetransmitEndpoint.sin_family = AF_INET;
mRetransmitEndpoint.sin_addr = saddr;
mRetransmitEndpoint.sin_port = htons(port);
@@ -673,8 +732,8 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj)
// running in the same process as the media server. In that case,
// this will deadlock.
//
- // The threadId hack below works around this for the care of prepare
- // and seekTo within the same process.
+ // The threadId hack below works around this for the care of prepare,
+ // seekTo and start within the same process.
// FIXME: Remember, this is a hack, it's not even a hack that is applied
// consistently for all use-cases, this needs to be revisited.
if (mLockThreadId != getThreadId()) {
@@ -776,48 +835,12 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj)
}
}
-/*static*/ status_t MediaPlayer::decode(const char* url, uint32_t *pSampleRate,
- int* pNumChannels, audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
-{
- ALOGV("decode(%s)", url);
- status_t status;
- const sp<IMediaPlayerService>& service = getMediaPlayerService();
- if (service != 0) {
- status = service->decode(url, pSampleRate, pNumChannels, pFormat, heap, pSize);
- } else {
- ALOGE("Unable to locate media service");
- status = DEAD_OBJECT;
- }
- return status;
-
-}
-
void MediaPlayer::died()
{
ALOGV("died");
notify(MEDIA_ERROR, MEDIA_ERROR_SERVER_DIED, 0);
}
-/*static*/ status_t MediaPlayer::decode(int fd, int64_t offset, int64_t length,
- uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
-{
- ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
- status_t status;
- const sp<IMediaPlayerService>& service = getMediaPlayerService();
- if (service != 0) {
- status = service->decode(fd, offset, length, pSampleRate,
- pNumChannels, pFormat, heap, pSize);
- } else {
- ALOGE("Unable to locate media service");
- status = DEAD_OBJECT;
- }
- return status;
-
-}
-
status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) {
if (mPlayer == NULL) {
return NO_INIT;
@@ -832,15 +855,4 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) {
return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
}
-status_t MediaPlayer::updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- const sp<IMediaPlayerService>& service = getMediaPlayerService();
-
- if (service != NULL) {
- return service->updateProxyConfig(host, port, exclusionList);
- }
-
- return INVALID_OPERATION;
-}
-
}; // namespace android
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 3710e46..1952b86 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -17,6 +17,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaRecorder"
+
+#include <inttypes.h>
+
#include <utils/Log.h>
#include <media/mediarecorder.h>
#include <binder/IServiceManager.h>
@@ -183,8 +186,11 @@ status_t MediaRecorder::setOutputFormat(int of)
ALOGE("setOutputFormat called in an invalid state: %d", mCurrentState);
return INVALID_OPERATION;
}
- if (mIsVideoSourceSet && of >= OUTPUT_FORMAT_AUDIO_ONLY_START && of != OUTPUT_FORMAT_RTP_AVP && of != OUTPUT_FORMAT_MPEG2TS) { //first non-video output format
- ALOGE("output format (%d) is meant for audio recording only and incompatible with video recording", of);
+ if (mIsVideoSourceSet
+ && of >= OUTPUT_FORMAT_AUDIO_ONLY_START //first non-video output format
+ && of < OUTPUT_FORMAT_AUDIO_ONLY_END) {
+ ALOGE("output format (%d) is meant for audio recording only"
+ " and incompatible with video recording", of);
return INVALID_OPERATION;
}
@@ -286,7 +292,7 @@ status_t MediaRecorder::setOutputFile(const char* path)
status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
{
- ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index 8f21632..9d8fe62 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -15,18 +15,19 @@ LOCAL_SRC_FILES:= \
MediaPlayerService.cpp \
MediaRecorderClient.cpp \
MetadataRetrieverClient.cpp \
- MidiFile.cpp \
- MidiMetadataRetriever.cpp \
RemoteDisplay.cpp \
SharedLibrary.cpp \
StagefrightPlayer.cpp \
StagefrightRecorder.cpp \
TestPlayerStub.cpp \
+ VideoFrameScheduler.cpp \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libcamera_client \
+ libcrypto \
libcutils \
+ libdrmframework \
liblog \
libdl \
libgui \
@@ -45,15 +46,17 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_rtsp \
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/av/media/libstagefright/rtsp \
$(TOP)/frameworks/av/media/libstagefright/wifi-display \
+ $(TOP)/frameworks/av/media/libstagefright/webm \
$(TOP)/frameworks/native/include/media/openmax \
$(TOP)/external/tremolo/Tremolo \
LOCAL_MODULE:= libmediaplayerservice
+LOCAL_32_BIT_ONLY := true
+
include $(BUILD_SHARED_LIBRARY)
include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp
index 62593b2..8ee7c0b 100644
--- a/media/libmediaplayerservice/Crypto.cpp
+++ b/media/libmediaplayerservice/Crypto.cpp
@@ -257,4 +257,12 @@ ssize_t Crypto::decrypt(
errorDetailMsg);
}
+void Crypto::notifyResolution(uint32_t width, uint32_t height) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck == OK && mPlugin != NULL) {
+ mPlugin->notifyResolution(width, height);
+ }
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/Crypto.h b/media/libmediaplayerservice/Crypto.h
index c44ae34..0037c2e 100644
--- a/media/libmediaplayerservice/Crypto.h
+++ b/media/libmediaplayerservice/Crypto.h
@@ -45,6 +45,8 @@ struct Crypto : public BnCrypto {
virtual bool requiresSecureDecoderComponent(
const char *mime) const;
+ virtual void notifyResolution(uint32_t width, uint32_t height);
+
virtual ssize_t decrypt(
bool secure,
const uint8_t key[16],
diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp
index eebcb79..73f1a2a 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/media/libmediaplayerservice/Drm.cpp
@@ -28,9 +28,21 @@
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaErrors.h>
+#include <binder/IServiceManager.h>
+#include <binder/IPCThreadState.h>
namespace android {
+static bool checkPermission(const char* permissionString) {
+#ifndef HAVE_ANDROID_OS
+ return true;
+#endif
+ if (getpid() == IPCThreadState::self()->getCallingPid()) return true;
+ bool ok = checkCallingPermission(String16(permissionString));
+ if (!ok) ALOGE("Request requires %s", permissionString);
+ return ok;
+}
+
KeyedVector<Vector<uint8_t>, String8> Drm::mUUIDToLibraryPathMap;
KeyedVector<String8, wp<SharedLibrary> > Drm::mLibraryPathToOpenLibraryMap;
Mutex Drm::mMapLock;
@@ -72,10 +84,10 @@ status_t Drm::setListener(const sp<IDrmClient>& listener)
{
Mutex::Autolock lock(mEventLock);
if (mListener != NULL){
- mListener->asBinder()->unlinkToDeath(this);
+ IInterface::asBinder(mListener)->unlinkToDeath(this);
}
if (listener != NULL) {
- listener->asBinder()->linkToDeath(this);
+ IInterface::asBinder(listener)->linkToDeath(this);
}
mListener = listener;
return NO_ERROR;
@@ -373,7 +385,8 @@ status_t Drm::queryKeyStatus(Vector<uint8_t> const &sessionId,
return mPlugin->queryKeyStatus(sessionId, infoMap);
}
-status_t Drm::getProvisionRequest(Vector<uint8_t> &request, String8 &defaultUrl) {
+status_t Drm::getProvisionRequest(String8 const &certType, String8 const &certAuthority,
+ Vector<uint8_t> &request, String8 &defaultUrl) {
Mutex::Autolock autoLock(mLock);
if (mInitCheck != OK) {
@@ -384,10 +397,13 @@ status_t Drm::getProvisionRequest(Vector<uint8_t> &request, String8 &defaultUrl)
return -EINVAL;
}
- return mPlugin->getProvisionRequest(request, defaultUrl);
+ return mPlugin->getProvisionRequest(certType, certAuthority,
+ request, defaultUrl);
}
-status_t Drm::provideProvisionResponse(Vector<uint8_t> const &response) {
+status_t Drm::provideProvisionResponse(Vector<uint8_t> const &response,
+ Vector<uint8_t> &certificate,
+ Vector<uint8_t> &wrappedKey) {
Mutex::Autolock autoLock(mLock);
if (mInitCheck != OK) {
@@ -398,9 +414,26 @@ status_t Drm::provideProvisionResponse(Vector<uint8_t> const &response) {
return -EINVAL;
}
- return mPlugin->provideProvisionResponse(response);
+ return mPlugin->provideProvisionResponse(response, certificate, wrappedKey);
}
+status_t Drm::unprovisionDevice() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ if (!checkPermission("android.permission.REMOVE_DRM_CERTIFICATES")) {
+ return -EPERM;
+ }
+
+ return mPlugin->unprovisionDevice();
+}
status_t Drm::getSecureStops(List<Vector<uint8_t> > &secureStops) {
Mutex::Autolock autoLock(mLock);
@@ -416,6 +449,20 @@ status_t Drm::getSecureStops(List<Vector<uint8_t> > &secureStops) {
return mPlugin->getSecureStops(secureStops);
}
+status_t Drm::getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ return mPlugin->getSecureStop(ssid, secureStop);
+}
+
status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
Mutex::Autolock autoLock(mLock);
@@ -430,6 +477,20 @@ status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) {
return mPlugin->releaseSecureStops(ssRelease);
}
+status_t Drm::releaseAllSecureStops() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ return mPlugin->releaseAllSecureStops();
+}
+
status_t Drm::getPropertyString(String8 const &name, String8 &value ) const {
Mutex::Autolock autoLock(mLock);
@@ -589,12 +650,38 @@ status_t Drm::verify(Vector<uint8_t> const &sessionId,
return mPlugin->verify(sessionId, keyId, message, signature, match);
}
+status_t Drm::signRSA(Vector<uint8_t> const &sessionId,
+ String8 const &algorithm,
+ Vector<uint8_t> const &message,
+ Vector<uint8_t> const &wrappedKey,
+ Vector<uint8_t> &signature) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ if (mPlugin == NULL) {
+ return -EINVAL;
+ }
+
+ if (!checkPermission("android.permission.ACCESS_DRM_CERTIFICATES")) {
+ return -EPERM;
+ }
+
+ return mPlugin->signRSA(sessionId, algorithm, message, wrappedKey, signature);
+}
+
void Drm::binderDied(const wp<IBinder> &the_late_who)
{
+ mEventLock.lock();
+ mListener.clear();
+ mEventLock.unlock();
+
+ Mutex::Autolock autoLock(mLock);
delete mPlugin;
mPlugin = NULL;
closeFactory();
- mListener.clear();
}
} // namespace android
diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h
index 119fd50..0e1eb2c 100644
--- a/media/libmediaplayerservice/Drm.h
+++ b/media/libmediaplayerservice/Drm.h
@@ -66,14 +66,22 @@ struct Drm : public BnDrm,
virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId,
KeyedVector<String8, String8> &infoMap) const;
- virtual status_t getProvisionRequest(Vector<uint8_t> &request,
+ virtual status_t getProvisionRequest(String8 const &certType,
+ String8 const &certAuthority,
+ Vector<uint8_t> &request,
String8 &defaulUrl);
- virtual status_t provideProvisionResponse(Vector<uint8_t> const &response);
+ virtual status_t provideProvisionResponse(Vector<uint8_t> const &response,
+ Vector<uint8_t> &certificate,
+ Vector<uint8_t> &wrappedKey);
+
+ virtual status_t unprovisionDevice();
virtual status_t getSecureStops(List<Vector<uint8_t> > &secureStops);
+ virtual status_t getSecureStop(Vector<uint8_t> const &ssid, Vector<uint8_t> &secureStop);
virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease);
+ virtual status_t releaseAllSecureStops();
virtual status_t getPropertyString(String8 const &name, String8 &value ) const;
virtual status_t getPropertyByteArray(String8 const &name,
@@ -111,6 +119,12 @@ struct Drm : public BnDrm,
Vector<uint8_t> const &signature,
bool &match);
+ virtual status_t signRSA(Vector<uint8_t> const &sessionId,
+ String8 const &algorithm,
+ Vector<uint8_t> const &message,
+ Vector<uint8_t> const &wrappedKey,
+ Vector<uint8_t> &signature);
+
virtual status_t setListener(const sp<IDrmClient>& listener);
virtual void sendEvent(DrmPlugin::EventType eventType, int extra,
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
index c2ac1a3..afe3936 100644
--- a/media/libmediaplayerservice/HDCP.cpp
+++ b/media/libmediaplayerservice/HDCP.cpp
@@ -107,11 +107,7 @@ uint32_t HDCP::getCaps() {
return NO_INIT;
}
- // TO-DO:
- // Only support HDCP_CAPS_ENCRYPT (byte-array to byte-array) for now.
- // use mHDCPModule->getCaps() when the HDCP libraries get updated.
- //return mHDCPModule->getCaps();
- return HDCPModule::HDCP_CAPS_ENCRYPT;
+ return mHDCPModule->getCaps();
}
status_t HDCP::encrypt(
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index 90aed39..48884b9 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -15,18 +15,21 @@
** limitations under the License.
*/
+//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPlayerFactory"
#include <utils/Log.h>
#include <cutils/properties.h>
#include <media/IMediaPlayer.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
#include <media/stagefright/foundation/ADebug.h>
#include <utils/Errors.h>
#include <utils/misc.h>
+#include <../libstagefright/include/WVMExtractor.h>
#include "MediaPlayerFactory.h"
-#include "MidiFile.h"
#include "TestPlayerStub.h"
#include "StagefrightPlayer.h"
#include "nuplayer/NuPlayerDriver.h"
@@ -60,14 +63,20 @@ status_t MediaPlayerFactory::registerFactory_l(IFactory* factory,
return OK;
}
-player_type MediaPlayerFactory::getDefaultPlayerType() {
+static player_type getDefaultPlayerType() {
char value[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.use-nuplayer", value, NULL)
+ if (property_get("media.stagefright.use-awesome", value, NULL)
&& (!strcmp("1", value) || !strcasecmp("true", value))) {
- return NU_PLAYER;
+ return STAGEFRIGHT_PLAYER;
}
- return STAGEFRIGHT_PLAYER;
+ // TODO: remove this EXPERIMENTAL developer settings property
+ if (property_get("persist.sys.media.use-awesome", value, NULL)
+ && !strcasecmp("true", value)) {
+ return STAGEFRIGHT_PLAYER;
+ }
+
+ return NU_PLAYER;
}
status_t MediaPlayerFactory::registerFactory(IFactory* factory,
@@ -170,34 +179,63 @@ sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(
class StagefrightPlayerFactory :
public MediaPlayerFactory::IFactory {
public:
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
int fd,
int64_t offset,
int64_t length,
- float curScore) {
- char buf[20];
- lseek(fd, offset, SEEK_SET);
- read(fd, buf, sizeof(buf));
- lseek(fd, offset, SEEK_SET);
+ float /*curScore*/) {
+ if (legacyDrm()) {
+ sp<DataSource> source = new FileSource(dup(fd), offset, length);
+ String8 mimeType;
+ float confidence;
+ if (SniffWVM(source, &mimeType, &confidence, NULL /* format */)) {
+ return 1.0;
+ }
+ }
- long ident = *((long*)buf);
+ if (getDefaultPlayerType() == STAGEFRIGHT_PLAYER) {
+ char buf[20];
+ lseek(fd, offset, SEEK_SET);
+ read(fd, buf, sizeof(buf));
+ lseek(fd, offset, SEEK_SET);
- // Ogg vorbis?
- if (ident == 0x5367674f) // 'OggS'
- return 1.0;
+ uint32_t ident = *((uint32_t*)buf);
+
+ // Ogg vorbis?
+ if (ident == 0x5367674f) // 'OggS'
+ return 1.0;
+ }
return 0.0;
}
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ const char* url,
+ float /*curScore*/) {
+ if (legacyDrm() && !strncasecmp("widevine://", url, 11)) {
+ return 1.0;
+ }
+ return 0.0;
+ }
+
virtual sp<MediaPlayerBase> createPlayer() {
ALOGV(" create StagefrightPlayer");
return new StagefrightPlayer();
}
+ private:
+ bool legacyDrm() {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.media.legacy-drm", value, NULL)
+ && (!strcmp("1", value) || !strcasecmp("true", value))) {
+ return true;
+ }
+ return false;
+ }
};
class NuPlayerFactory : public MediaPlayerFactory::IFactory {
public:
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const char* url,
float curScore) {
static const float kOurScore = 0.8;
@@ -229,9 +267,9 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory {
return 0.0;
}
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
- const sp<IStreamSource> &source,
- float curScore) {
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ const sp<IStreamSource>& /*source*/,
+ float /*curScore*/) {
return 1.0;
}
@@ -241,80 +279,11 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory {
}
};
-class SonivoxPlayerFactory : public MediaPlayerFactory::IFactory {
- public:
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
- const char* url,
- float curScore) {
- static const float kOurScore = 0.4;
- static const char* const FILE_EXTS[] = { ".mid",
- ".midi",
- ".smf",
- ".xmf",
- ".mxmf",
- ".imy",
- ".rtttl",
- ".rtx",
- ".ota" };
- if (kOurScore <= curScore)
- return 0.0;
-
- // use MidiFile for MIDI extensions
- int lenURL = strlen(url);
- for (int i = 0; i < NELEM(FILE_EXTS); ++i) {
- int len = strlen(FILE_EXTS[i]);
- int start = lenURL - len;
- if (start > 0) {
- if (!strncasecmp(url + start, FILE_EXTS[i], len)) {
- return kOurScore;
- }
- }
- }
-
- return 0.0;
- }
-
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
- int fd,
- int64_t offset,
- int64_t length,
- float curScore) {
- static const float kOurScore = 0.8;
-
- if (kOurScore <= curScore)
- return 0.0;
-
- // Some kind of MIDI?
- EAS_DATA_HANDLE easdata;
- if (EAS_Init(&easdata) == EAS_SUCCESS) {
- EAS_FILE locator;
- locator.path = NULL;
- locator.fd = fd;
- locator.offset = offset;
- locator.length = length;
- EAS_HANDLE eashandle;
- if (EAS_OpenFile(easdata, &locator, &eashandle) == EAS_SUCCESS) {
- EAS_CloseFile(easdata, eashandle);
- EAS_Shutdown(easdata);
- return kOurScore;
- }
- EAS_Shutdown(easdata);
- }
-
- return 0.0;
- }
-
- virtual sp<MediaPlayerBase> createPlayer() {
- ALOGV(" create MidiFile");
- return new MidiFile();
- }
-};
-
class TestPlayerFactory : public MediaPlayerFactory::IFactory {
public:
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
const char* url,
- float curScore) {
+ float /*curScore*/) {
if (TestPlayerStub::canBeUsed(url)) {
return 1.0;
}
@@ -336,7 +305,6 @@ void MediaPlayerFactory::registerBuiltinFactories() {
registerFactory_l(new StagefrightPlayerFactory(), STAGEFRIGHT_PLAYER);
registerFactory_l(new NuPlayerFactory(), NU_PLAYER);
- registerFactory_l(new SonivoxPlayerFactory(), SONIVOX_PLAYER);
registerFactory_l(new TestPlayerFactory(), TEST_PLAYER);
sInitComplete = true;
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.h b/media/libmediaplayerservice/MediaPlayerFactory.h
index fe8972b..55ff918 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.h
+++ b/media/libmediaplayerservice/MediaPlayerFactory.h
@@ -29,19 +29,19 @@ class MediaPlayerFactory {
public:
virtual ~IFactory() { }
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
- const char* url,
- float curScore) { return 0.0; }
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ const char* /*url*/,
+ float /*curScore*/) { return 0.0; }
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
- int fd,
- int64_t offset,
- int64_t length,
- float curScore) { return 0.0; }
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ int /*fd*/,
+ int64_t /*offset*/,
+ int64_t /*length*/,
+ float /*curScore*/) { return 0.0; }
- virtual float scoreFactory(const sp<IMediaPlayer>& client,
- const sp<IStreamSource> &source,
- float curScore) { return 0.0; }
+ virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/,
+ const sp<IStreamSource> &/*source*/,
+ float /*curScore*/) { return 0.0; }
virtual sp<MediaPlayerBase> createPlayer() = 0;
};
@@ -71,7 +71,6 @@ class MediaPlayerFactory {
static status_t registerFactory_l(IFactory* factory,
player_type type);
- static player_type getDefaultPlayerType();
static Mutex sLock;
static tFactoryMap sFactoryMap;
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index a392b76..694f1a4 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -34,6 +34,7 @@
#include <utils/misc.h>
+#include <binder/IBatteryStats.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryHeapBase.h>
@@ -42,8 +43,10 @@
#include <utils/Errors.h> // for status_t
#include <utils/String8.h>
#include <utils/SystemClock.h>
+#include <utils/Timers.h>
#include <utils/Vector.h>
+#include <media/IMediaHTTPService.h>
#include <media/IRemoteDisplay.h>
#include <media/IRemoteDisplayClient.h>
#include <media/MediaPlayerInterface.h>
@@ -52,9 +55,11 @@
#include <media/Metadata.h>
#include <media/AudioTrack.h>
#include <media/MemoryLeakTrackUtil.h>
+#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/AudioPlayer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooperRoster.h>
#include <system/audio.h>
@@ -66,7 +71,6 @@
#include "MetadataRetrieverClient.h"
#include "MediaPlayerFactory.h"
-#include "MidiFile.h"
#include "TestPlayerStub.h"
#include "StagefrightPlayer.h"
#include "nuplayer/NuPlayerDriver.h"
@@ -185,8 +189,68 @@ bool findMetadata(const Metadata::Filter& filter, const int32_t val)
} // anonymous namespace
+namespace {
+using android::Parcel;
+using android::String16;
+
+// marshalling tag indicating flattened utf16 tags
+// keep in sync with frameworks/base/media/java/android/media/AudioAttributes.java
+const int32_t kAudioAttributesMarshallTagFlattenTags = 1;
+
+// Audio attributes format in a parcel:
+//
+// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | usage |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | content_type |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | source |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | flags |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | kAudioAttributesMarshallTagFlattenTags | // ignore tags if not found
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// | flattened tags in UTF16 |
+// | ... |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+// @param p Parcel that contains audio attributes.
+// @param[out] attributes On exit points to an initialized audio_attributes_t structure
+// @param[out] status On exit contains the status code to be returned.
+void unmarshallAudioAttributes(const Parcel& parcel, audio_attributes_t *attributes)
+{
+ attributes->usage = (audio_usage_t) parcel.readInt32();
+ attributes->content_type = (audio_content_type_t) parcel.readInt32();
+ attributes->source = (audio_source_t) parcel.readInt32();
+ attributes->flags = (audio_flags_mask_t) parcel.readInt32();
+ const bool hasFlattenedTag = (parcel.readInt32() == kAudioAttributesMarshallTagFlattenTags);
+ if (hasFlattenedTag) {
+ // the tags are UTF16, convert to UTF8
+ String16 tags = parcel.readString16();
+ ssize_t realTagSize = utf16_to_utf8_length(tags.string(), tags.size());
+ if (realTagSize <= 0) {
+ strcpy(attributes->tags, "");
+ } else {
+ // copy the flattened string into the attributes as the destination for the conversion:
+ // copying array size -1, array for tags was calloc'd, no need to NULL-terminate it
+ size_t tagSize = realTagSize > AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1 ?
+ AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1 : realTagSize;
+ utf16_to_utf8(tags.string(), tagSize, attributes->tags);
+ }
+ } else {
+ ALOGE("unmarshallAudioAttributes() received unflattened tags, ignoring tag values");
+ strcpy(attributes->tags, "");
+ }
+}
+} // anonymous namespace
+
+
namespace android {
+extern ALooperRoster gLooperRoster;
+
+
static bool checkPermission(const char* permissionString) {
#ifndef HAVE_ANDROID_OS
return true;
@@ -220,6 +284,20 @@ MediaPlayerService::MediaPlayerService()
// speaker is on by default
mBatteryAudio.deviceOn[SPEAKER] = 1;
+ // reset battery stats
+ // if the mediaserver has crashed, battery stats could be left
+ // in bad state, reset the state upon service start.
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != NULL) {
+ const String16 name("batterystats");
+ sp<IBatteryStats> batteryStats =
+ interface_cast<IBatteryStats>(sm->getService(name));
+ if (batteryStats != NULL) {
+ batteryStats->noteResetVideo();
+ batteryStats->noteResetAudio();
+ }
+ }
+
MediaPlayerFactory::registerBuiltinFactories();
}
@@ -275,6 +353,10 @@ sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client
return c;
}
+sp<IMediaCodecList> MediaPlayerService::getCodecList() const {
+ return MediaCodecList::getLocalInstance();
+}
+
sp<IOMX> MediaPlayerService::getOMX() {
Mutex::Autolock autoLock(mLock);
@@ -306,33 +388,6 @@ sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay(
return new RemoteDisplay(client, iface.string());
}
-status_t MediaPlayerService::updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- return HTTPBase::UpdateProxyConfig(host, port, exclusionList);
-}
-
-status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const
-{
- const size_t SIZE = 256;
- char buffer[SIZE];
- String8 result;
-
- result.append(" AudioCache\n");
- if (mHeap != 0) {
- snprintf(buffer, 255, " heap base(%p), size(%zu), flags(%d)\n",
- mHeap->getBase(), mHeap->getSize(), mHeap->getFlags());
- result.append(buffer);
- }
- snprintf(buffer, 255, " msec per frame(%f), channel count(%d), format(%d), frame count(%zd)\n",
- mMsecsPerFrame, mChannelCount, mFormat, mFrameCount);
- result.append(buffer);
- snprintf(buffer, 255, " sample rate(%d), size(%d), error(%d), command complete(%s)\n",
- mSampleRate, mSize, mError, mCommandComplete?"true":"false");
- result.append(buffer);
- ::write(fd, result.string(), result.size());
- return NO_ERROR;
-}
-
status_t MediaPlayerService::AudioOutput::dump(int fd, const Vector<String16>& args) const
{
const size_t SIZE = 256;
@@ -377,6 +432,10 @@ status_t MediaPlayerService::Client::dump(int fd, const Vector<String16>& args)
return NO_ERROR;
}
+/**
+ * The only arguments this understands right now are -c, -von and -voff,
+ * which are parsed by ALooperRoster::dump()
+ */
status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)
{
const size_t SIZE = 256;
@@ -410,7 +469,7 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)
}
result.append(" Files opened and/or mapped:\n");
- snprintf(buffer, SIZE, "/proc/%d/maps", gettid());
+ snprintf(buffer, SIZE, "/proc/%d/maps", getpid());
FILE *f = fopen(buffer, "r");
if (f) {
while (!feof(f)) {
@@ -430,13 +489,13 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)
result.append("\n");
}
- snprintf(buffer, SIZE, "/proc/%d/fd", gettid());
+ snprintf(buffer, SIZE, "/proc/%d/fd", getpid());
DIR *d = opendir(buffer);
if (d) {
struct dirent *ent;
while((ent = readdir(d)) != NULL) {
if (strcmp(ent->d_name,".") && strcmp(ent->d_name,"..")) {
- snprintf(buffer, SIZE, "/proc/%d/fd/%s", gettid(), ent->d_name);
+ snprintf(buffer, SIZE, "/proc/%d/fd/%s", getpid(), ent->d_name);
struct stat s;
if (lstat(buffer, &s) == 0) {
if ((s.st_mode & S_IFMT) == S_IFLNK) {
@@ -477,6 +536,8 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)
result.append("\n");
}
+ gLooperRoster.dump(fd, args);
+
bool dumpMem = false;
for (size_t i = 0; i < args.size(); i++) {
if (args[i] == String16("-m")) {
@@ -512,6 +573,7 @@ MediaPlayerService::Client::Client(
mAudioSessionId = audioSessionId;
mUID = uid;
mRetransmitEndpointValid = false;
+ mAudioAttributes = NULL;
#if CALLBACK_ANTAGONIZER
ALOGD("create Antagonizer");
@@ -526,6 +588,9 @@ MediaPlayerService::Client::~Client()
wp<Client> client(this);
disconnect();
mService->removeClient(client);
+ if (mAudioAttributes != NULL) {
+ free(mAudioAttributes);
+ }
}
void MediaPlayerService::Client::disconnect()
@@ -590,7 +655,8 @@ sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
}
if (!p->hardwareOutput()) {
- mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid());
+ mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
+ mPid, mAudioAttributes);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
@@ -622,7 +688,9 @@ void MediaPlayerService::Client::setDataSource_post(
}
status_t MediaPlayerService::Client::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
if (url == NULL)
@@ -657,7 +725,7 @@ status_t MediaPlayerService::Client::setDataSource(
return NO_INIT;
}
- setDataSource_post(p, p->setDataSource(url, headers));
+ setDataSource_post(p, p->setDataSource(httpService, url, headers));
return mStatus;
}
}
@@ -674,8 +742,8 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64
ALOGV("st_dev = %llu", sb.st_dev);
ALOGV("st_mode = %u", sb.st_mode);
- ALOGV("st_uid = %lu", sb.st_uid);
- ALOGV("st_gid = %lu", sb.st_gid);
+ ALOGV("st_uid = %lu", static_cast<unsigned long>(sb.st_uid));
+ ALOGV("st_gid = %lu", static_cast<unsigned long>(sb.st_gid));
ALOGV("st_size = %llu", sb.st_size);
if (offset >= sb.st_size) {
@@ -736,8 +804,7 @@ status_t MediaPlayerService::Client::setVideoSurfaceTexture(
sp<MediaPlayerBase> p = getPlayer();
if (p == 0) return UNKNOWN_ERROR;
- sp<IBinder> binder(bufferProducer == NULL ? NULL :
- bufferProducer->asBinder());
+ sp<IBinder> binder(IInterface::asBinder(bufferProducer));
if (mConnectedWindowBinder == binder) {
return OK;
}
@@ -804,7 +871,7 @@ status_t MediaPlayerService::Client::setMetadataFilter(const Parcel& filter)
}
status_t MediaPlayerService::Client::getMetadata(
- bool update_only, bool apply_filter, Parcel *reply)
+ bool update_only, bool /*apply_filter*/, Parcel *reply)
{
sp<MediaPlayerBase> player = getPlayer();
if (player == 0) return UNKNOWN_ERROR;
@@ -969,6 +1036,22 @@ status_t MediaPlayerService::Client::setAudioStreamType(audio_stream_type_t type
return NO_ERROR;
}
+status_t MediaPlayerService::Client::setAudioAttributes_l(const Parcel &parcel)
+{
+ if (mAudioAttributes != NULL) { free(mAudioAttributes); }
+ mAudioAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+ unmarshallAudioAttributes(parcel, mAudioAttributes);
+
+ ALOGV("setAudioAttributes_l() usage=%d content=%d flags=0x%x tags=%s",
+ mAudioAttributes->usage, mAudioAttributes->content_type, mAudioAttributes->flags,
+ mAudioAttributes->tags);
+
+ if (mAudioOutput != 0) {
+ mAudioOutput->setAudioAttributes(mAudioAttributes);
+ }
+ return NO_ERROR;
+}
+
status_t MediaPlayerService::Client::setLooping(int loop)
{
ALOGV("[%d] setLooping(%d)", mConnId, loop);
@@ -1017,9 +1100,17 @@ status_t MediaPlayerService::Client::attachAuxEffect(int effectId)
status_t MediaPlayerService::Client::setParameter(int key, const Parcel &request) {
ALOGV("[%d] setParameter(%d)", mConnId, key);
- sp<MediaPlayerBase> p = getPlayer();
- if (p == 0) return UNKNOWN_ERROR;
- return p->setParameter(key, request);
+ switch (key) {
+ case KEY_PARAMETER_AUDIO_ATTRIBUTES:
+ {
+ Mutex::Autolock l(mLock);
+ return setAudioAttributes_l(request);
+ }
+ default:
+ sp<MediaPlayerBase> p = getPlayer();
+ if (p == 0) { return UNKNOWN_ERROR; }
+ return p->setParameter(key, request);
+ }
}
status_t MediaPlayerService::Client::getParameter(int key, Parcel *reply) {
@@ -1176,133 +1267,17 @@ int Antagonizer::callbackThread(void* user)
}
#endif
-status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
-{
- ALOGV("decode(%s)", url);
- sp<MediaPlayerBase> player;
- status_t status = BAD_VALUE;
-
- // Protect our precious, precious DRMd ringtones by only allowing
- // decoding of http, but not filesystem paths or content Uris.
- // If the application wants to decode those, it should open a
- // filedescriptor for them and use that.
- if (url != NULL && strncmp(url, "http://", 7) != 0) {
- ALOGD("Can't decode %s by path, use filedescriptor instead", url);
- return BAD_VALUE;
- }
-
- player_type playerType =
- MediaPlayerFactory::getPlayerType(NULL /* client */, url);
- ALOGV("player type = %d", playerType);
-
- // create the right type of player
- sp<AudioCache> cache = new AudioCache(heap);
- player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify);
- if (player == NULL) goto Exit;
- if (player->hardwareOutput()) goto Exit;
-
- static_cast<MediaPlayerInterface*>(player.get())->setAudioSink(cache);
-
- // set data source
- if (player->setDataSource(url) != NO_ERROR) goto Exit;
-
- ALOGV("prepare");
- player->prepareAsync();
-
- ALOGV("wait for prepare");
- if (cache->wait() != NO_ERROR) goto Exit;
-
- ALOGV("start");
- player->start();
-
- ALOGV("wait for playback complete");
- cache->wait();
- // in case of error, return what was successfully decoded.
- if (cache->size() == 0) {
- goto Exit;
- }
-
- *pSize = cache->size();
- *pSampleRate = cache->sampleRate();
- *pNumChannels = cache->channelCount();
- *pFormat = cache->format();
- ALOGV("return size %d sampleRate=%u, channelCount = %d, format = %d",
- *pSize, *pSampleRate, *pNumChannels, *pFormat);
- status = NO_ERROR;
-
-Exit:
- if (player != 0) player->reset();
- return status;
-}
-
-status_t MediaPlayerService::decode(int fd, int64_t offset, int64_t length,
- uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
-{
- ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
- sp<MediaPlayerBase> player;
- status_t status = BAD_VALUE;
-
- player_type playerType = MediaPlayerFactory::getPlayerType(NULL /* client */,
- fd,
- offset,
- length);
- ALOGV("player type = %d", playerType);
-
- // create the right type of player
- sp<AudioCache> cache = new AudioCache(heap);
- player = MediaPlayerFactory::createPlayer(playerType, cache.get(), cache->notify);
- if (player == NULL) goto Exit;
- if (player->hardwareOutput()) goto Exit;
-
- static_cast<MediaPlayerInterface*>(player.get())->setAudioSink(cache);
-
- // set data source
- if (player->setDataSource(fd, offset, length) != NO_ERROR) goto Exit;
-
- ALOGV("prepare");
- player->prepareAsync();
-
- ALOGV("wait for prepare");
- if (cache->wait() != NO_ERROR) goto Exit;
-
- ALOGV("start");
- player->start();
-
- ALOGV("wait for playback complete");
- cache->wait();
- // in case of error, return what was successfully decoded.
- if (cache->size() == 0) {
- goto Exit;
- }
-
- *pSize = cache->size();
- *pSampleRate = cache->sampleRate();
- *pNumChannels = cache->channelCount();
- *pFormat = cache->format();
- ALOGV("return size %d, sampleRate=%u, channelCount = %d, format = %d",
- *pSize, *pSampleRate, *pNumChannels, *pFormat);
- status = NO_ERROR;
-
-Exit:
- if (player != 0) player->reset();
- ::close(fd);
- return status;
-}
-
-
#undef LOG_TAG
#define LOG_TAG "AudioSink"
-MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid)
+MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid, int pid,
+ const audio_attributes_t* attr)
: mCallback(NULL),
mCallbackCookie(NULL),
mCallbackData(NULL),
mBytesWritten(0),
mSessionId(sessionId),
mUid(uid),
+ mPid(pid),
mFlags(AUDIO_OUTPUT_FLAG_NONE) {
ALOGV("AudioOutput(%d)", sessionId);
mStreamType = AUDIO_STREAM_MUSIC;
@@ -1314,6 +1289,7 @@ MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid)
mAuxEffectId = 0;
mSendLevel = 0.0;
setMinBufferCount();
+ mAttributes = attr;
}
MediaPlayerService::AudioOutput::~AudioOutput()
@@ -1384,6 +1360,12 @@ status_t MediaPlayerService::AudioOutput::getPosition(uint32_t *position) const
return mTrack->getPosition(position);
}
+status_t MediaPlayerService::AudioOutput::getTimestamp(AudioTimestamp &ts) const
+{
+ if (mTrack == 0) return NO_INIT;
+ return mTrack->getTimestamp(ts);
+}
+
status_t MediaPlayerService::AudioOutput::getFramesWritten(uint32_t *frameswritten) const
{
if (mTrack == 0) return NO_INIT;
@@ -1403,6 +1385,10 @@ String8 MediaPlayerService::AudioOutput::getParameters(const String8& keys)
return mTrack->getParameters(keys);
}
+void MediaPlayerService::AudioOutput::setAudioAttributes(const audio_attributes_t * attributes) {
+ mAttributes = attributes;
+}
+
void MediaPlayerService::AudioOutput::deleteRecycledTrack()
{
ALOGV("deleteRecycledTrack");
@@ -1450,7 +1436,7 @@ status_t MediaPlayerService::AudioOutput::open(
format, bufferCount, mSessionId, flags);
uint32_t afSampleRate;
size_t afFrameCount;
- uint32_t frameCount;
+ size_t frameCount;
// offloading is only supported in callback mode for now.
// offloadInfo must be present if offload flag is set
@@ -1551,7 +1537,9 @@ status_t MediaPlayerService::AudioOutput::open(
mSessionId,
AudioTrack::TRANSFER_CALLBACK,
offloadInfo,
- mUid);
+ mUid,
+ mPid,
+ mAttributes);
} else {
t = new AudioTrack(
mStreamType,
@@ -1566,13 +1554,19 @@ status_t MediaPlayerService::AudioOutput::open(
mSessionId,
AudioTrack::TRANSFER_DEFAULT,
NULL, // offload info
- mUid);
+ mUid,
+ mPid,
+ mAttributes);
}
if ((t == 0) || (t->initCheck() != NO_ERROR)) {
ALOGE("Unable to create audio track");
delete newcbd;
return NO_INIT;
+ } else {
+ // successful AudioTrack initialization implies a legacy stream type was generated
+ // from the audio attributes
+ mStreamType = t->streamType();
}
}
@@ -1672,12 +1666,14 @@ void MediaPlayerService::AudioOutput::switchToNextOutput() {
ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
{
- LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
+ LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
//ALOGV("write(%p, %u)", buffer, size);
if (mTrack != 0) {
ssize_t ret = mTrack->write(buffer, size);
- mBytesWritten += ret;
+ if (ret >= 0) {
+ mBytesWritten += ret;
+ }
return ret;
}
return NO_INIT;
@@ -1777,7 +1773,8 @@ void MediaPlayerService::AudioOutput::CallbackWrapper(
me, buffer->raw, buffer->size, me->mCallbackCookie,
CB_EVENT_FILL_BUFFER);
- if (actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) {
+ if ((me->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0 &&
+ actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) {
// We've reached EOS but the audio track is not stopped yet,
// keep playing silence.
@@ -1819,38 +1816,6 @@ uint32_t MediaPlayerService::AudioOutput::getSampleRate() const
return mTrack->getSampleRate();
}
-#undef LOG_TAG
-#define LOG_TAG "AudioCache"
-MediaPlayerService::AudioCache::AudioCache(const sp<IMemoryHeap>& heap) :
- mHeap(heap), mChannelCount(0), mFrameCount(1024), mSampleRate(0), mSize(0),
- mError(NO_ERROR), mCommandComplete(false)
-{
-}
-
-uint32_t MediaPlayerService::AudioCache::latency () const
-{
- return 0;
-}
-
-float MediaPlayerService::AudioCache::msecsPerFrame() const
-{
- return mMsecsPerFrame;
-}
-
-status_t MediaPlayerService::AudioCache::getPosition(uint32_t *position) const
-{
- if (position == 0) return BAD_VALUE;
- *position = mSize;
- return NO_ERROR;
-}
-
-status_t MediaPlayerService::AudioCache::getFramesWritten(uint32_t *written) const
-{
- if (written == 0) return BAD_VALUE;
- *written = mSize;
- return NO_ERROR;
-}
-
////////////////////////////////////////////////////////////////////////////////
struct CallbackThread : public Thread {
@@ -1909,6 +1874,8 @@ bool CallbackThread::threadLoop() {
if (actualSize > 0) {
sink->write(mBuffer, actualSize);
+ // Could return false on sink->write() error or short count.
+ // Not necessarily appropriate but would work for AudioCache behavior.
}
return true;
@@ -1916,119 +1883,6 @@ bool CallbackThread::threadLoop() {
////////////////////////////////////////////////////////////////////////////////
-status_t MediaPlayerService::AudioCache::open(
- uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
- audio_format_t format, int bufferCount,
- AudioCallback cb, void *cookie, audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo)
-{
- ALOGV("open(%u, %d, 0x%x, %d, %d)", sampleRate, channelCount, channelMask, format, bufferCount);
- if (mHeap->getHeapID() < 0) {
- return NO_INIT;
- }
-
- mSampleRate = sampleRate;
- mChannelCount = (uint16_t)channelCount;
- mFormat = format;
- mMsecsPerFrame = 1.e3 / (float) sampleRate;
-
- if (cb != NULL) {
- mCallbackThread = new CallbackThread(this, cb, cookie);
- }
- return NO_ERROR;
-}
-
-status_t MediaPlayerService::AudioCache::start() {
- if (mCallbackThread != NULL) {
- mCallbackThread->run("AudioCache callback");
- }
- return NO_ERROR;
-}
-
-void MediaPlayerService::AudioCache::stop() {
- if (mCallbackThread != NULL) {
- mCallbackThread->requestExitAndWait();
- }
-}
-
-ssize_t MediaPlayerService::AudioCache::write(const void* buffer, size_t size)
-{
- ALOGV("write(%p, %u)", buffer, size);
- if ((buffer == 0) || (size == 0)) return size;
-
- uint8_t* p = static_cast<uint8_t*>(mHeap->getBase());
- if (p == NULL) return NO_INIT;
- p += mSize;
- ALOGV("memcpy(%p, %p, %u)", p, buffer, size);
- if (mSize + size > mHeap->getSize()) {
- ALOGE("Heap size overflow! req size: %d, max size: %d", (mSize + size), mHeap->getSize());
- size = mHeap->getSize() - mSize;
- }
- memcpy(p, buffer, size);
- mSize += size;
- return size;
-}
-
-// call with lock held
-status_t MediaPlayerService::AudioCache::wait()
-{
- Mutex::Autolock lock(mLock);
- while (!mCommandComplete) {
- mSignal.wait(mLock);
- }
- mCommandComplete = false;
-
- if (mError == NO_ERROR) {
- ALOGV("wait - success");
- } else {
- ALOGV("wait - error");
- }
- return mError;
-}
-
-void MediaPlayerService::AudioCache::notify(
- void* cookie, int msg, int ext1, int ext2, const Parcel *obj)
-{
- ALOGV("notify(%p, %d, %d, %d)", cookie, msg, ext1, ext2);
- AudioCache* p = static_cast<AudioCache*>(cookie);
-
- // ignore buffering messages
- switch (msg)
- {
- case MEDIA_ERROR:
- ALOGE("Error %d, %d occurred", ext1, ext2);
- p->mError = ext1;
- break;
- case MEDIA_PREPARED:
- ALOGV("prepared");
- break;
- case MEDIA_PLAYBACK_COMPLETE:
- ALOGV("playback complete");
- break;
- default:
- ALOGV("ignored");
- return;
- }
-
- // wake up thread
- Mutex::Autolock lock(p->mLock);
- p->mCommandComplete = true;
- p->mSignal.signal();
-}
-
-int MediaPlayerService::AudioCache::getSessionId() const
-{
- return 0;
-}
-
-uint32_t MediaPlayerService::AudioCache::getSampleRate() const
-{
- if (mMsecsPerFrame == 0) {
- return 0;
- }
- return (uint32_t)(1.e3 / mMsecsPerFrame);
-}
-
void MediaPlayerService::addBatteryData(uint32_t params)
{
Mutex::Autolock lock(mLock);
@@ -2072,7 +1926,7 @@ void MediaPlayerService::addBatteryData(uint32_t params)
return;
}
- // an sudio stream is started
+ // an audio stream is started
if (params & kBatteryDataAudioFlingerStart) {
// record the start time only if currently no other audio
// is being played
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 9c084e1..fad3447 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -72,11 +72,11 @@ class MediaPlayerService : public BnMediaPlayerService
class CallbackData;
public:
- AudioOutput(int sessionId, int uid);
+ AudioOutput(int sessionId, int uid, int pid,
+ const audio_attributes_t * attr);
virtual ~AudioOutput();
virtual bool ready() const { return mTrack != 0; }
- virtual bool realtime() const { return true; }
virtual ssize_t bufferSize() const;
virtual ssize_t frameCount() const;
virtual ssize_t channelCount() const;
@@ -84,6 +84,7 @@ class MediaPlayerService : public BnMediaPlayerService
virtual uint32_t latency() const;
virtual float msecsPerFrame() const;
virtual status_t getPosition(uint32_t *position) const;
+ virtual status_t getTimestamp(AudioTimestamp &ts) const;
virtual status_t getFramesWritten(uint32_t *frameswritten) const;
virtual int getSessionId() const;
virtual uint32_t getSampleRate() const;
@@ -104,6 +105,7 @@ class MediaPlayerService : public BnMediaPlayerService
void setAudioStreamType(audio_stream_type_t streamType) {
mStreamType = streamType; }
virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
+ void setAudioAttributes(const audio_attributes_t * attributes);
void setVolume(float left, float right);
virtual status_t setPlaybackRatePermille(int32_t ratePermille);
@@ -133,6 +135,7 @@ class MediaPlayerService : public BnMediaPlayerService
CallbackData * mCallbackData;
uint64_t mBytesWritten;
audio_stream_type_t mStreamType;
+ const audio_attributes_t *mAttributes;
float mLeftVolume;
float mRightVolume;
int32_t mPlaybackRatePermille;
@@ -140,6 +143,7 @@ class MediaPlayerService : public BnMediaPlayerService
float mMsecsPerFrame;
int mSessionId;
int mUid;
+ int mPid;
float mSendLevel;
int mAuxEffectId;
static bool mIsOnEmulator;
@@ -179,73 +183,6 @@ class MediaPlayerService : public BnMediaPlayerService
}; // AudioOutput
- class AudioCache : public MediaPlayerBase::AudioSink
- {
- public:
- AudioCache(const sp<IMemoryHeap>& heap);
- virtual ~AudioCache() {}
-
- virtual bool ready() const { return (mChannelCount > 0) && (mHeap->getHeapID() > 0); }
- virtual bool realtime() const { return false; }
- virtual ssize_t bufferSize() const { return frameSize() * mFrameCount; }
- virtual ssize_t frameCount() const { return mFrameCount; }
- virtual ssize_t channelCount() const { return (ssize_t)mChannelCount; }
- virtual ssize_t frameSize() const { return ssize_t(mChannelCount * ((mFormat == AUDIO_FORMAT_PCM_16_BIT)?sizeof(int16_t):sizeof(u_int8_t))); }
- virtual uint32_t latency() const;
- virtual float msecsPerFrame() const;
- virtual status_t getPosition(uint32_t *position) const;
- virtual status_t getFramesWritten(uint32_t *frameswritten) const;
- virtual int getSessionId() const;
- virtual uint32_t getSampleRate() const;
-
- virtual status_t open(
- uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
- audio_format_t format, int bufferCount = 1,
- AudioCallback cb = NULL, void *cookie = NULL,
- audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
- const audio_offload_info_t *offloadInfo = NULL);
-
- virtual status_t start();
- virtual ssize_t write(const void* buffer, size_t size);
- virtual void stop();
- virtual void flush() {}
- virtual void pause() {}
- virtual void close() {}
- void setAudioStreamType(audio_stream_type_t streamType) {}
- // stream type is not used for AudioCache
- virtual audio_stream_type_t getAudioStreamType() const { return AUDIO_STREAM_DEFAULT; }
-
- void setVolume(float left, float right) {}
- virtual status_t setPlaybackRatePermille(int32_t ratePermille) { return INVALID_OPERATION; }
- uint32_t sampleRate() const { return mSampleRate; }
- audio_format_t format() const { return mFormat; }
- size_t size() const { return mSize; }
- status_t wait();
-
- sp<IMemoryHeap> getHeap() const { return mHeap; }
-
- static void notify(void* cookie, int msg,
- int ext1, int ext2, const Parcel *obj);
- virtual status_t dump(int fd, const Vector<String16>& args) const;
-
- private:
- AudioCache();
-
- Mutex mLock;
- Condition mSignal;
- sp<IMemoryHeap> mHeap;
- float mMsecsPerFrame;
- uint16_t mChannelCount;
- audio_format_t mFormat;
- ssize_t mFrameCount;
- uint32_t mSampleRate;
- uint32_t mSize;
- int mError;
- bool mCommandComplete;
-
- sp<Thread> mCallbackThread;
- }; // AudioCache
-
public:
static void instantiate();
@@ -256,13 +193,7 @@ public:
virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId);
- virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize);
- virtual status_t decode(int fd, int64_t offset, int64_t length,
- uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize);
+ virtual sp<IMediaCodecList> getCodecList() const;
virtual sp<IOMX> getOMX();
virtual sp<ICrypto> makeCrypto();
virtual sp<IDrm> makeDrm();
@@ -272,9 +203,6 @@ public:
const String8& iface);
virtual status_t dump(int fd, const Vector<String16>& args);
- virtual status_t updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
void removeClient(wp<Client> client);
// For battery usage tracking purpose
@@ -356,6 +284,7 @@ private:
sp<MediaPlayerBase> createPlayer(player_type playerType);
virtual status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers);
@@ -405,6 +334,8 @@ private:
// Disconnect from the currently connected ANativeWindow.
void disconnectNativeWindow();
+ status_t setAudioAttributes_l(const Parcel &request);
+
mutable Mutex mLock;
sp<MediaPlayerBase> mPlayer;
sp<MediaPlayerService> mService;
@@ -415,6 +346,7 @@ private:
bool mLoop;
int32_t mConnId;
int mAudioSessionId;
+ audio_attributes_t * mAudioAttributes;
uid_t mUID;
sp<ANativeWindow> mConnectedWindow;
sp<IBinder> mConnectedWindowBinder;
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index a9820e0..194abbb 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -95,7 +95,8 @@ status_t MediaRecorderClient::setPreviewSurface(const sp<IGraphicBufferProducer>
status_t MediaRecorderClient::setVideoSource(int vs)
{
ALOGV("setVideoSource(%d)", vs);
- if (!checkPermission(cameraPermission)) {
+ // Check camera permission for sources other than SURFACE
+ if (vs != VIDEO_SOURCE_SURFACE && !checkPermission(cameraPermission)) {
return PERMISSION_DENIED;
}
Mutex::Autolock lock(mLock);
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 348957f..715cc0c 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -31,10 +31,10 @@
#include <binder/MemoryHeapBase.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
+#include <media/IMediaHTTPService.h>
#include <media/MediaMetadataRetrieverInterface.h>
#include <media/MediaPlayerInterface.h>
#include <private/media/VideoFrame.h>
-#include "MidiMetadataRetriever.h"
#include "MetadataRetrieverClient.h"
#include "StagefrightMetadataRetriever.h"
#include "MediaPlayerFactory.h"
@@ -56,7 +56,7 @@ MetadataRetrieverClient::~MetadataRetrieverClient()
disconnect();
}
-status_t MetadataRetrieverClient::dump(int fd, const Vector<String16>& args) const
+status_t MetadataRetrieverClient::dump(int fd, const Vector<String16>& /*args*/) const
{
const size_t SIZE = 256;
char buffer[SIZE];
@@ -89,10 +89,6 @@ static sp<MediaMetadataRetrieverBase> createRetriever(player_type playerType)
p = new StagefrightMetadataRetriever;
break;
}
- case SONIVOX_PLAYER:
- ALOGV("create midi metadata retriever");
- p = new MidiMetadataRetriever();
- break;
default:
// TODO:
// support for TEST_PLAYER
@@ -106,7 +102,9 @@ static sp<MediaMetadataRetrieverBase> createRetriever(player_type playerType)
}
status_t MetadataRetrieverClient::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
Mutex::Autolock lock(mLock);
@@ -127,7 +125,7 @@ status_t MetadataRetrieverClient::setDataSource(
ALOGV("player type = %d", playerType);
sp<MediaMetadataRetrieverBase> p = createRetriever(playerType);
if (p == NULL) return NO_INIT;
- status_t ret = p->setDataSource(url, headers);
+ status_t ret = p->setDataSource(httpService, url, headers);
if (ret == NO_ERROR) mRetriever = p;
return ret;
}
@@ -144,8 +142,8 @@ status_t MetadataRetrieverClient::setDataSource(int fd, int64_t offset, int64_t
}
ALOGV("st_dev = %llu", sb.st_dev);
ALOGV("st_mode = %u", sb.st_mode);
- ALOGV("st_uid = %lu", sb.st_uid);
- ALOGV("st_gid = %lu", sb.st_gid);
+ ALOGV("st_uid = %lu", static_cast<unsigned long>(sb.st_uid));
+ ALOGV("st_gid = %lu", static_cast<unsigned long>(sb.st_gid));
ALOGV("st_size = %llu", sb.st_size);
if (offset >= sb.st_size) {
@@ -230,7 +228,7 @@ sp<IMemory> MetadataRetrieverClient::extractAlbumArt()
ALOGE("failed to extract an album art");
return NULL;
}
- size_t size = sizeof(MediaAlbumArt) + albumArt->mSize;
+ size_t size = sizeof(MediaAlbumArt) + albumArt->size();
sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
if (heap == NULL) {
ALOGE("failed to create MemoryDealer object");
@@ -243,11 +241,9 @@ sp<IMemory> MetadataRetrieverClient::extractAlbumArt()
delete albumArt;
return NULL;
}
- MediaAlbumArt *albumArtCopy = static_cast<MediaAlbumArt *>(mAlbumArt->pointer());
- albumArtCopy->mSize = albumArt->mSize;
- albumArtCopy->mData = (uint8_t *)albumArtCopy + sizeof(MediaAlbumArt);
- memcpy(albumArtCopy->mData, albumArt->mData, albumArt->mSize);
- delete albumArt; // Fix memory leakage
+ MediaAlbumArt::init((MediaAlbumArt *) mAlbumArt->pointer(),
+ albumArt->size(), albumArt->data());
+ delete albumArt; // We've taken our copy.
return mAlbumArt;
}
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index f08f933..9d3fbe9 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -30,6 +30,7 @@
namespace android {
+struct IMediaHTTPService;
class IMediaPlayerService;
class MemoryDealer;
@@ -43,7 +44,9 @@ public:
virtual void disconnect();
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp
deleted file mode 100644
index 0a6aa90..0000000
--- a/media/libmediaplayerservice/MidiFile.cpp
+++ /dev/null
@@ -1,558 +0,0 @@
-/* MidiFile.cpp
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MidiFile"
-#include "utils/Log.h"
-
-#include <stdio.h>
-#include <assert.h>
-#include <limits.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <sched.h>
-#include <utils/threads.h>
-#include <libsonivox/eas_reverb.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <unistd.h>
-
-#include <system/audio.h>
-
-#include "MidiFile.h"
-
-// ----------------------------------------------------------------------------
-
-namespace android {
-
-// ----------------------------------------------------------------------------
-
-// The midi engine buffers are a bit small (128 frames), so we batch them up
-static const int NUM_BUFFERS = 4;
-
-// TODO: Determine appropriate return codes
-static status_t ERROR_NOT_OPEN = -1;
-static status_t ERROR_OPEN_FAILED = -2;
-static status_t ERROR_EAS_FAILURE = -3;
-static status_t ERROR_ALLOCATE_FAILED = -4;
-
-static const S_EAS_LIB_CONFIG* pLibConfig = NULL;
-
-MidiFile::MidiFile() :
- mEasData(NULL), mEasHandle(NULL), mAudioBuffer(NULL),
- mPlayTime(-1), mDuration(-1), mState(EAS_STATE_ERROR),
- mStreamType(AUDIO_STREAM_MUSIC), mLoop(false), mExit(false),
- mPaused(false), mRender(false), mTid(-1)
-{
- ALOGV("constructor");
-
- mFileLocator.path = NULL;
- mFileLocator.fd = -1;
- mFileLocator.offset = 0;
- mFileLocator.length = 0;
-
- // get the library configuration and do sanity check
- if (pLibConfig == NULL)
- pLibConfig = EAS_Config();
- if ((pLibConfig == NULL) || (LIB_VERSION != pLibConfig->libVersion)) {
- ALOGE("EAS library/header mismatch");
- goto Failed;
- }
-
- // initialize EAS library
- if (EAS_Init(&mEasData) != EAS_SUCCESS) {
- ALOGE("EAS_Init failed");
- goto Failed;
- }
-
- // select reverb preset and enable
- EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_PRESET, EAS_PARAM_REVERB_CHAMBER);
- EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_BYPASS, EAS_FALSE);
-
- // create playback thread
- {
- Mutex::Autolock l(mMutex);
- mThread = new MidiFileThread(this);
- mThread->run("midithread", ANDROID_PRIORITY_AUDIO);
- mCondition.wait(mMutex);
- ALOGV("thread started");
- }
-
- // indicate success
- if (mTid > 0) {
- ALOGV(" render thread(%d) started", mTid);
- mState = EAS_STATE_READY;
- }
-
-Failed:
- return;
-}
-
-status_t MidiFile::initCheck()
-{
- if (mState == EAS_STATE_ERROR) return ERROR_EAS_FAILURE;
- return NO_ERROR;
-}
-
-MidiFile::~MidiFile() {
- ALOGV("MidiFile destructor");
- release();
-}
-
-status_t MidiFile::setDataSource(
- const char* path, const KeyedVector<String8, String8> *) {
- ALOGV("MidiFile::setDataSource url=%s", path);
- Mutex::Autolock lock(mMutex);
-
- // file still open?
- if (mEasHandle) {
- reset_nosync();
- }
-
- // open file and set paused state
- mFileLocator.path = strdup(path);
- mFileLocator.fd = -1;
- mFileLocator.offset = 0;
- mFileLocator.length = 0;
- EAS_RESULT result = EAS_OpenFile(mEasData, &mFileLocator, &mEasHandle);
- if (result == EAS_SUCCESS) {
- updateState();
- }
-
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_OpenFile failed: [%d]", (int)result);
- mState = EAS_STATE_ERROR;
- return ERROR_OPEN_FAILED;
- }
-
- mState = EAS_STATE_OPEN;
- mPlayTime = 0;
- return NO_ERROR;
-}
-
-status_t MidiFile::setDataSource(int fd, int64_t offset, int64_t length)
-{
- ALOGV("MidiFile::setDataSource fd=%d", fd);
- Mutex::Autolock lock(mMutex);
-
- // file still open?
- if (mEasHandle) {
- reset_nosync();
- }
-
- // open file and set paused state
- mFileLocator.fd = dup(fd);
- mFileLocator.offset = offset;
- mFileLocator.length = length;
- EAS_RESULT result = EAS_OpenFile(mEasData, &mFileLocator, &mEasHandle);
- updateState();
-
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_OpenFile failed: [%d]", (int)result);
- mState = EAS_STATE_ERROR;
- return ERROR_OPEN_FAILED;
- }
-
- mState = EAS_STATE_OPEN;
- mPlayTime = 0;
- return NO_ERROR;
-}
-
-status_t MidiFile::prepare()
-{
- ALOGV("MidiFile::prepare");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- EAS_RESULT result;
- if ((result = EAS_Prepare(mEasData, mEasHandle)) != EAS_SUCCESS) {
- ALOGE("EAS_Prepare failed: [%ld]", result);
- return ERROR_EAS_FAILURE;
- }
- updateState();
- return NO_ERROR;
-}
-
-status_t MidiFile::prepareAsync()
-{
- ALOGV("MidiFile::prepareAsync");
- status_t ret = prepare();
-
- // don't hold lock during callback
- if (ret == NO_ERROR) {
- sendEvent(MEDIA_PREPARED);
- } else {
- sendEvent(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ret);
- }
- return ret;
-}
-
-status_t MidiFile::start()
-{
- ALOGV("MidiFile::start");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
-
- // resuming after pause?
- if (mPaused) {
- if (EAS_Resume(mEasData, mEasHandle) != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
- mPaused = false;
- updateState();
- }
-
- mRender = true;
- if (mState == EAS_STATE_PLAY) {
- sendEvent(MEDIA_STARTED);
- }
-
- // wake up render thread
- ALOGV(" wakeup render thread");
- mCondition.signal();
- return NO_ERROR;
-}
-
-status_t MidiFile::stop()
-{
- ALOGV("MidiFile::stop");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- if (!mPaused && (mState != EAS_STATE_STOPPED)) {
- EAS_RESULT result = EAS_Pause(mEasData, mEasHandle);
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_Pause returned error %ld", result);
- return ERROR_EAS_FAILURE;
- }
- }
- mPaused = false;
- sendEvent(MEDIA_STOPPED);
- return NO_ERROR;
-}
-
-status_t MidiFile::seekTo(int position)
-{
- ALOGV("MidiFile::seekTo %d", position);
- // hold lock during EAS calls
- {
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- EAS_RESULT result;
- if ((result = EAS_Locate(mEasData, mEasHandle, position, false))
- != EAS_SUCCESS)
- {
- ALOGE("EAS_Locate returned %ld", result);
- return ERROR_EAS_FAILURE;
- }
- EAS_GetLocation(mEasData, mEasHandle, &mPlayTime);
- }
- sendEvent(MEDIA_SEEK_COMPLETE);
- return NO_ERROR;
-}
-
-status_t MidiFile::pause()
-{
- ALOGV("MidiFile::pause");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- if ((mState == EAS_STATE_PAUSING) || (mState == EAS_STATE_PAUSED)) return NO_ERROR;
- if (EAS_Pause(mEasData, mEasHandle) != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
- mPaused = true;
- sendEvent(MEDIA_PAUSED);
- return NO_ERROR;
-}
-
-bool MidiFile::isPlaying()
-{
- ALOGV("MidiFile::isPlaying, mState=%d", int(mState));
- if (!mEasHandle || mPaused) return false;
- return (mState == EAS_STATE_PLAY);
-}
-
-status_t MidiFile::getCurrentPosition(int* position)
-{
- ALOGV("MidiFile::getCurrentPosition");
- if (!mEasHandle) {
- ALOGE("getCurrentPosition(): file not open");
- return ERROR_NOT_OPEN;
- }
- if (mPlayTime < 0) {
- ALOGE("getCurrentPosition(): mPlayTime = %ld", mPlayTime);
- return ERROR_EAS_FAILURE;
- }
- *position = mPlayTime;
- return NO_ERROR;
-}
-
-status_t MidiFile::getDuration(int* duration)
-{
-
- ALOGV("MidiFile::getDuration");
- {
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) return ERROR_NOT_OPEN;
- *duration = mDuration;
- }
-
- // if no duration cached, get the duration
- // don't need a lock here because we spin up a new engine
- if (*duration < 0) {
- EAS_I32 temp;
- EAS_DATA_HANDLE easData = NULL;
- EAS_HANDLE easHandle = NULL;
- EAS_RESULT result = EAS_Init(&easData);
- if (result == EAS_SUCCESS) {
- result = EAS_OpenFile(easData, &mFileLocator, &easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_Prepare(easData, easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_ParseMetaData(easData, easHandle, &temp);
- }
- if (easHandle) {
- EAS_CloseFile(easData, easHandle);
- }
- if (easData) {
- EAS_Shutdown(easData);
- }
-
- if (result != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
-
- // cache successful result
- mDuration = *duration = int(temp);
- }
-
- return NO_ERROR;
-}
-
-status_t MidiFile::release()
-{
- ALOGV("MidiFile::release");
- Mutex::Autolock l(mMutex);
- reset_nosync();
-
- // wait for render thread to exit
- mExit = true;
- mCondition.signal();
-
- // wait for thread to exit
- if (mAudioBuffer) {
- mCondition.wait(mMutex);
- }
-
- // release resources
- if (mEasData) {
- EAS_Shutdown(mEasData);
- mEasData = NULL;
- }
- return NO_ERROR;
-}
-
-status_t MidiFile::reset()
-{
- ALOGV("MidiFile::reset");
- Mutex::Autolock lock(mMutex);
- return reset_nosync();
-}
-
-// call only with mutex held
-status_t MidiFile::reset_nosync()
-{
- ALOGV("MidiFile::reset_nosync");
- sendEvent(MEDIA_STOPPED);
- // close file
- if (mEasHandle) {
- EAS_CloseFile(mEasData, mEasHandle);
- mEasHandle = NULL;
- }
- if (mFileLocator.path) {
- free((void*)mFileLocator.path);
- mFileLocator.path = NULL;
- }
- if (mFileLocator.fd >= 0) {
- close(mFileLocator.fd);
- }
- mFileLocator.fd = -1;
- mFileLocator.offset = 0;
- mFileLocator.length = 0;
-
- mPlayTime = -1;
- mDuration = -1;
- mLoop = false;
- mPaused = false;
- mRender = false;
- return NO_ERROR;
-}
-
-status_t MidiFile::setLooping(int loop)
-{
- ALOGV("MidiFile::setLooping");
- Mutex::Autolock lock(mMutex);
- if (!mEasHandle) {
- return ERROR_NOT_OPEN;
- }
- loop = loop ? -1 : 0;
- if (EAS_SetRepeat(mEasData, mEasHandle, loop) != EAS_SUCCESS) {
- return ERROR_EAS_FAILURE;
- }
- return NO_ERROR;
-}
-
-status_t MidiFile::createOutputTrack() {
- if (mAudioSink->open(pLibConfig->sampleRate, pLibConfig->numChannels,
- CHANNEL_MASK_USE_CHANNEL_ORDER, AUDIO_FORMAT_PCM_16_BIT, 2 /*bufferCount*/) != NO_ERROR) {
- ALOGE("mAudioSink open failed");
- return ERROR_OPEN_FAILED;
- }
- return NO_ERROR;
-}
-
-int MidiFile::render() {
- EAS_RESULT result = EAS_FAILURE;
- EAS_I32 count;
- int temp;
- bool audioStarted = false;
-
- ALOGV("MidiFile::render");
-
- // allocate render buffer
- mAudioBuffer = new EAS_PCM[pLibConfig->mixBufferSize * pLibConfig->numChannels * NUM_BUFFERS];
- if (!mAudioBuffer) {
- ALOGE("mAudioBuffer allocate failed");
- goto threadExit;
- }
-
- // signal main thread that we started
- {
- Mutex::Autolock l(mMutex);
- mTid = gettid();
- ALOGV("render thread(%d) signal", mTid);
- mCondition.signal();
- }
-
- while (1) {
- mMutex.lock();
-
- // nothing to render, wait for client thread to wake us up
- while (!mRender && !mExit)
- {
- ALOGV("MidiFile::render - signal wait");
- mCondition.wait(mMutex);
- ALOGV("MidiFile::render - signal rx'd");
- }
- if (mExit) {
- mMutex.unlock();
- break;
- }
-
- // render midi data into the input buffer
- //ALOGV("MidiFile::render - rendering audio");
- int num_output = 0;
- EAS_PCM* p = mAudioBuffer;
- for (int i = 0; i < NUM_BUFFERS; i++) {
- result = EAS_Render(mEasData, p, pLibConfig->mixBufferSize, &count);
- if (result != EAS_SUCCESS) {
- ALOGE("EAS_Render returned %ld", result);
- }
- p += count * pLibConfig->numChannels;
- num_output += count * pLibConfig->numChannels * sizeof(EAS_PCM);
- }
-
- // update playback state and position
- // ALOGV("MidiFile::render - updating state");
- EAS_GetLocation(mEasData, mEasHandle, &mPlayTime);
- EAS_State(mEasData, mEasHandle, &mState);
- mMutex.unlock();
-
- // create audio output track if necessary
- if (!mAudioSink->ready()) {
- ALOGV("MidiFile::render - create output track");
- if (createOutputTrack() != NO_ERROR)
- goto threadExit;
- }
-
- // Write data to the audio hardware
- // ALOGV("MidiFile::render - writing to audio output");
- if ((temp = mAudioSink->write(mAudioBuffer, num_output)) < 0) {
- ALOGE("Error in writing:%d",temp);
- return temp;
- }
-
- // start audio output if necessary
- if (!audioStarted) {
- //ALOGV("MidiFile::render - starting audio");
- mAudioSink->start();
- audioStarted = true;
- }
-
- // still playing?
- if ((mState == EAS_STATE_STOPPED) || (mState == EAS_STATE_ERROR) ||
- (mState == EAS_STATE_PAUSED))
- {
- switch(mState) {
- case EAS_STATE_STOPPED:
- {
- ALOGV("MidiFile::render - stopped");
- sendEvent(MEDIA_PLAYBACK_COMPLETE);
- break;
- }
- case EAS_STATE_ERROR:
- {
- ALOGE("MidiFile::render - error");
- sendEvent(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN);
- break;
- }
- case EAS_STATE_PAUSED:
- ALOGV("MidiFile::render - paused");
- break;
- default:
- break;
- }
- mAudioSink->stop();
- audioStarted = false;
- mRender = false;
- }
- }
-
-threadExit:
- mAudioSink.clear();
- if (mAudioBuffer) {
- delete [] mAudioBuffer;
- mAudioBuffer = NULL;
- }
- mMutex.lock();
- mTid = -1;
- mCondition.signal();
- mMutex.unlock();
- return result;
-}
-
-} // end namespace android
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
deleted file mode 100644
index 24d59b4..0000000
--- a/media/libmediaplayerservice/MidiFile.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
-**
-** Copyright 2008, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_MIDIFILE_H
-#define ANDROID_MIDIFILE_H
-
-#include <media/MediaPlayerInterface.h>
-#include <libsonivox/eas.h>
-
-namespace android {
-
-// Note that the name MidiFile is misleading; this actually represents a MIDI file player
-class MidiFile : public MediaPlayerInterface {
-public:
- MidiFile();
- ~MidiFile();
-
- virtual status_t initCheck();
-
- virtual status_t setDataSource(
- const char* path, const KeyedVector<String8, String8> *headers);
-
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurfaceTexture(
- const sp<IGraphicBufferProducer>& bufferProducer)
- { return UNKNOWN_ERROR; }
- virtual status_t prepare();
- virtual status_t prepareAsync();
- virtual status_t start();
- virtual status_t stop();
- virtual status_t seekTo(int msec);
- virtual status_t pause();
- virtual bool isPlaying();
- virtual status_t getCurrentPosition(int* msec);
- virtual status_t getDuration(int* msec);
- virtual status_t release();
- virtual status_t reset();
- virtual status_t setLooping(int loop);
- virtual player_type playerType() { return SONIVOX_PLAYER; }
- virtual status_t invoke(const Parcel& request, Parcel *reply) {
- return INVALID_OPERATION;
- }
- virtual status_t setParameter(int key, const Parcel &request) {
- return INVALID_OPERATION;
- }
- virtual status_t getParameter(int key, Parcel *reply) {
- return INVALID_OPERATION;
- }
-
-
-private:
- status_t createOutputTrack();
- status_t reset_nosync();
- int render();
- void updateState(){ EAS_State(mEasData, mEasHandle, &mState); }
-
- Mutex mMutex;
- Condition mCondition;
- EAS_DATA_HANDLE mEasData;
- EAS_HANDLE mEasHandle;
- EAS_PCM* mAudioBuffer;
- EAS_I32 mPlayTime;
- EAS_I32 mDuration;
- EAS_STATE mState;
- EAS_FILE mFileLocator;
- audio_stream_type_t mStreamType;
- bool mLoop;
- volatile bool mExit;
- bool mPaused;
- volatile bool mRender;
- pid_t mTid;
-
- class MidiFileThread : public Thread {
- public:
- MidiFileThread(MidiFile *midiPlayer) : mMidiFile(midiPlayer) {
- }
-
- protected:
- virtual ~MidiFileThread() {}
-
- private:
- MidiFile *mMidiFile;
-
- bool threadLoop() {
- int result;
- result = mMidiFile->render();
- return false;
- }
-
- MidiFileThread(const MidiFileThread &);
- MidiFileThread &operator=(const MidiFileThread &);
- };
-
- sp<MidiFileThread> mThread;
-};
-
-}; // namespace android
-
-#endif // ANDROID_MIDIFILE_H
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.cpp b/media/libmediaplayerservice/MidiMetadataRetriever.cpp
deleted file mode 100644
index 465209f..0000000
--- a/media/libmediaplayerservice/MidiMetadataRetriever.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
-**
-** Copyright 2009, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MidiMetadataRetriever"
-#include <utils/Log.h>
-
-#include "MidiMetadataRetriever.h"
-#include <media/mediametadataretriever.h>
-
-namespace android {
-
-static status_t ERROR_NOT_OPEN = -1;
-static status_t ERROR_OPEN_FAILED = -2;
-static status_t ERROR_EAS_FAILURE = -3;
-static status_t ERROR_ALLOCATE_FAILED = -4;
-
-void MidiMetadataRetriever::clearMetadataValues()
-{
- ALOGV("clearMetadataValues");
- mMetadataValues[0][0] = '\0';
-}
-
-status_t MidiMetadataRetriever::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers)
-{
- ALOGV("setDataSource: %s", url? url: "NULL pointer");
- Mutex::Autolock lock(mLock);
- clearMetadataValues();
- if (mMidiPlayer == 0) {
- mMidiPlayer = new MidiFile();
- }
- return mMidiPlayer->setDataSource(url, headers);
-}
-
-status_t MidiMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
-{
- ALOGV("setDataSource: fd(%d), offset(%lld), and length(%lld)", fd, offset, length);
- Mutex::Autolock lock(mLock);
- clearMetadataValues();
- if (mMidiPlayer == 0) {
- mMidiPlayer = new MidiFile();
- }
- return mMidiPlayer->setDataSource(fd, offset, length);;
-}
-
-const char* MidiMetadataRetriever::extractMetadata(int keyCode)
-{
- ALOGV("extractMetdata: key(%d)", keyCode);
- Mutex::Autolock lock(mLock);
- if (mMidiPlayer == 0 || mMidiPlayer->initCheck() != NO_ERROR) {
- ALOGE("Midi player is not initialized yet");
- return NULL;
- }
- switch (keyCode) {
- case METADATA_KEY_DURATION:
- {
- if (mMetadataValues[0][0] == '\0') {
- int duration = -1;
- if (mMidiPlayer->getDuration(&duration) != NO_ERROR) {
- ALOGE("failed to get duration");
- return NULL;
- }
- snprintf(mMetadataValues[0], MAX_METADATA_STRING_LENGTH, "%d", duration);
- }
-
- ALOGV("duration: %s ms", mMetadataValues[0]);
- return mMetadataValues[0];
- }
- default:
- ALOGE("Unsupported key code (%d)", keyCode);
- return NULL;
- }
- return NULL;
-}
-
-};
-
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.h b/media/libmediaplayerservice/MidiMetadataRetriever.h
deleted file mode 100644
index 4cee42d..0000000
--- a/media/libmediaplayerservice/MidiMetadataRetriever.h
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
-**
-** Copyright 2009, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-#ifndef ANDROID_MIDIMETADATARETRIEVER_H
-#define ANDROID_MIDIMETADATARETRIEVER_H
-
-#include <utils/threads.h>
-#include <utils/Errors.h>
-#include <media/MediaMetadataRetrieverInterface.h>
-
-#include "MidiFile.h"
-
-namespace android {
-
-class MidiMetadataRetriever : public MediaMetadataRetrieverInterface {
-public:
- MidiMetadataRetriever() {}
- ~MidiMetadataRetriever() {}
-
- virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
-
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual const char* extractMetadata(int keyCode);
-
-private:
- static const uint32_t MAX_METADATA_STRING_LENGTH = 128;
- void clearMetadataValues();
-
- Mutex mLock;
- sp<MidiFile> mMidiPlayer;
- char mMetadataValues[1][MAX_METADATA_STRING_LENGTH];
-};
-
-}; // namespace android
-
-#endif // ANDROID_MIDIMETADATARETRIEVER_H
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index de61d9b..b37aee3 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -54,8 +54,10 @@ status_t StagefrightPlayer::setUID(uid_t uid) {
}
status_t StagefrightPlayer::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
- return mPlayer->setDataSource(url, headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+ return mPlayer->setDataSource(httpService, url, headers);
}
// Warning: The filedescriptor passed into this method will only be valid until
@@ -187,7 +189,7 @@ status_t StagefrightPlayer::getParameter(int key, Parcel *reply) {
}
status_t StagefrightPlayer::getMetadata(
- const media::Metadata::Filter& ids, Parcel *records) {
+ const media::Metadata::Filter& /* ids */, Parcel *records) {
using media::Metadata;
uint32_t flags = mPlayer->flags();
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 600945e..e6c30ff 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -34,7 +34,9 @@ public:
virtual status_t setUID(uid_t uid);
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 4da74e1..86639cb 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -19,14 +19,18 @@
#include <inttypes.h>
#include <utils/Log.h>
+#include "WebmWriter.h"
#include "StagefrightRecorder.h"
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
#include <media/IMediaPlayerService.h>
-#include <media/openmax/OMX_Audio.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/ACodec.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/AACWriter.h>
@@ -36,13 +40,12 @@
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaCodecSource.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
-#include <media/stagefright/SurfaceMediaSource.h>
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
#include <camera/CameraParameters.h>
-#include <gui/Surface.h>
#include <utils/Errors.h>
#include <sys/types.h>
@@ -72,8 +75,7 @@ StagefrightRecorder::StagefrightRecorder()
mAudioSource(AUDIO_SOURCE_CNT),
mVideoSource(VIDEO_SOURCE_LIST_END),
mCaptureTimeLapse(false),
- mStarted(false),
- mSurfaceMediaSource(NULL) {
+ mStarted(false) {
ALOGV("Constructor");
reset();
@@ -82,10 +84,19 @@ StagefrightRecorder::StagefrightRecorder()
StagefrightRecorder::~StagefrightRecorder() {
ALOGV("Destructor");
stop();
+
+ if (mLooper != NULL) {
+ mLooper->stop();
+ }
}
status_t StagefrightRecorder::init() {
ALOGV("init");
+
+ mLooper = new ALooper;
+ mLooper->setName("recorder_looper");
+ mLooper->start();
+
return OK;
}
@@ -94,13 +105,13 @@ status_t StagefrightRecorder::init() {
// while encoding GL Frames
sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const {
ALOGV("Get SurfaceMediaSource");
- return mSurfaceMediaSource->getBufferQueue();
+ return mGraphicBufferProducer;
}
status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
ALOGV("setAudioSource: %d", as);
if (as < AUDIO_SOURCE_DEFAULT ||
- as >= AUDIO_SOURCE_CNT) {
+ (as >= AUDIO_SOURCE_CNT && as != AUDIO_SOURCE_FM_TUNER)) {
ALOGE("Invalid audio source: %d", as);
return BAD_VALUE;
}
@@ -173,11 +184,7 @@ status_t StagefrightRecorder::setVideoEncoder(video_encoder ve) {
return BAD_VALUE;
}
- if (ve == VIDEO_ENCODER_DEFAULT) {
- mVideoEncoder = VIDEO_ENCODER_H263;
- } else {
- mVideoEncoder = ve;
- }
+ mVideoEncoder = ve;
return OK;
}
@@ -234,7 +241,7 @@ status_t StagefrightRecorder::setPreviewSurface(const sp<IGraphicBufferProducer>
return OK;
}
-status_t StagefrightRecorder::setOutputFile(const char *path) {
+status_t StagefrightRecorder::setOutputFile(const char * /* path */) {
ALOGE("setOutputFile(const char*) must not be called");
// We don't actually support this at all, as the media_server process
// no longer has permissions to create files.
@@ -253,6 +260,9 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng
return -EBADF;
}
+ // start with a clean, empty file
+ ftruncate(fd, 0);
+
if (mOutputFd >= 0) {
::close(mOutputFd);
}
@@ -681,10 +691,10 @@ status_t StagefrightRecorder::setParameter(
return setParamTimeLapseEnable(timeLapseEnable);
}
} else if (key == "time-between-time-lapse-frame-capture") {
- int64_t timeBetweenTimeLapseFrameCaptureMs;
- if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+ int64_t timeBetweenTimeLapseFrameCaptureUs;
+ if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureUs)) {
return setParamTimeBetweenTimeLapseFrameCapture(
- 1000LL * timeBetweenTimeLapseFrameCaptureMs);
+ timeBetweenTimeLapseFrameCaptureUs);
}
} else {
ALOGE("setParameter: failed to find key %s", key.string());
@@ -739,19 +749,15 @@ status_t StagefrightRecorder::setClientName(const String16& clientName) {
return OK;
}
-status_t StagefrightRecorder::prepare() {
- return OK;
-}
-
-status_t StagefrightRecorder::start() {
- CHECK_GE(mOutputFd, 0);
+status_t StagefrightRecorder::prepareInternal() {
+ ALOGV("prepare");
+ if (mOutputFd < 0) {
+ ALOGE("Output file descriptor is invalid");
+ return INVALID_OPERATION;
+ }
// Get UID here for permission checking
mClientUid = IPCThreadState::self()->getCallingUid();
- if (mWriter != NULL) {
- ALOGE("File writer is not avaialble");
- return UNKNOWN_ERROR;
- }
status_t status = OK;
@@ -759,31 +765,103 @@ status_t StagefrightRecorder::start() {
case OUTPUT_FORMAT_DEFAULT:
case OUTPUT_FORMAT_THREE_GPP:
case OUTPUT_FORMAT_MPEG_4:
- status = startMPEG4Recording();
+ case OUTPUT_FORMAT_WEBM:
+ status = setupMPEG4orWEBMRecording();
break;
case OUTPUT_FORMAT_AMR_NB:
case OUTPUT_FORMAT_AMR_WB:
- status = startAMRRecording();
+ status = setupAMRRecording();
break;
case OUTPUT_FORMAT_AAC_ADIF:
case OUTPUT_FORMAT_AAC_ADTS:
- status = startAACRecording();
+ status = setupAACRecording();
break;
case OUTPUT_FORMAT_RTP_AVP:
- status = startRTPRecording();
+ status = setupRTPRecording();
+ break;
+
+ case OUTPUT_FORMAT_MPEG2TS:
+ status = setupMPEG2TSRecording();
+ break;
+
+ default:
+ ALOGE("Unsupported output file format: %d", mOutputFormat);
+ status = UNKNOWN_ERROR;
+ break;
+ }
+
+ return status;
+}
+
+status_t StagefrightRecorder::prepare() {
+ if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+ return prepareInternal();
+ }
+ return OK;
+}
+
+status_t StagefrightRecorder::start() {
+ ALOGV("start");
+ if (mOutputFd < 0) {
+ ALOGE("Output file descriptor is invalid");
+ return INVALID_OPERATION;
+ }
+
+ status_t status = OK;
+
+ if (mVideoSource != VIDEO_SOURCE_SURFACE) {
+ status = prepareInternal();
+ if (status != OK) {
+ return status;
+ }
+ }
+
+ if (mWriter == NULL) {
+ ALOGE("File writer is not avaialble");
+ return UNKNOWN_ERROR;
+ }
+
+ switch (mOutputFormat) {
+ case OUTPUT_FORMAT_DEFAULT:
+ case OUTPUT_FORMAT_THREE_GPP:
+ case OUTPUT_FORMAT_MPEG_4:
+ case OUTPUT_FORMAT_WEBM:
+ {
+ bool isMPEG4 = true;
+ if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
+ isMPEG4 = false;
+ }
+ sp<MetaData> meta = new MetaData;
+ setupMPEG4orWEBMMetaData(&meta);
+ status = mWriter->start(meta.get());
break;
+ }
+ case OUTPUT_FORMAT_AMR_NB:
+ case OUTPUT_FORMAT_AMR_WB:
+ case OUTPUT_FORMAT_AAC_ADIF:
+ case OUTPUT_FORMAT_AAC_ADTS:
+ case OUTPUT_FORMAT_RTP_AVP:
case OUTPUT_FORMAT_MPEG2TS:
- status = startMPEG2TSRecording();
+ {
+ status = mWriter->start();
break;
+ }
default:
+ {
ALOGE("Unsupported output file format: %d", mOutputFormat);
status = UNKNOWN_ERROR;
break;
+ }
+ }
+
+ if (status != OK) {
+ mWriter.clear();
+ mWriter = NULL;
}
if ((status == OK) && (!mStarted)) {
@@ -817,58 +895,58 @@ sp<MediaSource> StagefrightRecorder::createAudioSource() {
return NULL;
}
- sp<MetaData> encMeta = new MetaData;
+ sp<AMessage> format = new AMessage;
const char *mime;
switch (mAudioEncoder) {
case AUDIO_ENCODER_AMR_NB:
case AUDIO_ENCODER_DEFAULT:
- mime = MEDIA_MIMETYPE_AUDIO_AMR_NB;
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
break;
case AUDIO_ENCODER_AMR_WB:
- mime = MEDIA_MIMETYPE_AUDIO_AMR_WB;
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
break;
case AUDIO_ENCODER_AAC:
- mime = MEDIA_MIMETYPE_AUDIO_AAC;
- encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectLC);
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ format->setInt32("aac-profile", OMX_AUDIO_AACObjectLC);
break;
case AUDIO_ENCODER_HE_AAC:
- mime = MEDIA_MIMETYPE_AUDIO_AAC;
- encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectHE);
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ format->setInt32("aac-profile", OMX_AUDIO_AACObjectHE);
break;
case AUDIO_ENCODER_AAC_ELD:
- mime = MEDIA_MIMETYPE_AUDIO_AAC;
- encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectELD);
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ format->setInt32("aac-profile", OMX_AUDIO_AACObjectELD);
break;
default:
ALOGE("Unknown audio encoder: %d", mAudioEncoder);
return NULL;
}
- encMeta->setCString(kKeyMIMEType, mime);
int32_t maxInputSize;
CHECK(audioSource->getFormat()->findInt32(
kKeyMaxInputSize, &maxInputSize));
- encMeta->setInt32(kKeyMaxInputSize, maxInputSize);
- encMeta->setInt32(kKeyChannelCount, mAudioChannels);
- encMeta->setInt32(kKeySampleRate, mSampleRate);
- encMeta->setInt32(kKeyBitRate, mAudioBitRate);
+ format->setInt32("max-input-size", maxInputSize);
+ format->setInt32("channel-count", mAudioChannels);
+ format->setInt32("sample-rate", mSampleRate);
+ format->setInt32("bitrate", mAudioBitRate);
if (mAudioTimeScale > 0) {
- encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
+ format->setInt32("time-scale", mAudioTimeScale);
}
- OMXClient client;
- CHECK_EQ(client.connect(), (status_t)OK);
sp<MediaSource> audioEncoder =
- OMXCodec::Create(client.interface(), encMeta,
- true /* createEncoder */, audioSource);
+ MediaCodecSource::Create(mLooper, format, audioSource);
mAudioSourceNode = audioSource;
+ if (audioEncoder == NULL) {
+ ALOGE("Failed to create audio encoder");
+ }
+
return audioEncoder;
}
-status_t StagefrightRecorder::startAACRecording() {
+status_t StagefrightRecorder::setupAACRecording() {
// FIXME:
// Add support for OUTPUT_FORMAT_AAC_ADIF
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
@@ -879,16 +957,10 @@ status_t StagefrightRecorder::startAACRecording() {
CHECK(mAudioSource != AUDIO_SOURCE_CNT);
mWriter = new AACWriter(mOutputFd);
- status_t status = startRawAudioRecording();
- if (status != OK) {
- mWriter.clear();
- mWriter = NULL;
- }
-
- return status;
+ return setupRawAudioRecording();
}
-status_t StagefrightRecorder::startAMRRecording() {
+status_t StagefrightRecorder::setupAMRRecording() {
CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
mOutputFormat == OUTPUT_FORMAT_AMR_WB);
@@ -908,16 +980,11 @@ status_t StagefrightRecorder::startAMRRecording() {
}
mWriter = new AMRWriter(mOutputFd);
- status_t status = startRawAudioRecording();
- if (status != OK) {
- mWriter.clear();
- mWriter = NULL;
- }
- return status;
+ return setupRawAudioRecording();
}
-status_t StagefrightRecorder::startRawAudioRecording() {
- if (mAudioSource >= AUDIO_SOURCE_CNT) {
+status_t StagefrightRecorder::setupRawAudioRecording() {
+ if (mAudioSource >= AUDIO_SOURCE_CNT && mAudioSource != AUDIO_SOURCE_FM_TUNER) {
ALOGE("Invalid audio source: %d", mAudioSource);
return BAD_VALUE;
}
@@ -942,12 +1009,11 @@ status_t StagefrightRecorder::startRawAudioRecording() {
mWriter->setMaxFileSize(mMaxFileSizeBytes);
}
mWriter->setListener(mListener);
- mWriter->start();
return OK;
}
-status_t StagefrightRecorder::startRTPRecording() {
+status_t StagefrightRecorder::setupRTPRecording() {
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
if ((mAudioSource != AUDIO_SOURCE_CNT
@@ -967,6 +1033,7 @@ status_t StagefrightRecorder::startRTPRecording() {
if (mAudioSource != AUDIO_SOURCE_CNT) {
source = createAudioSource();
} else {
+ setDefaultVideoEncoderIfNecessary();
sp<MediaSource> mediaSource;
status_t err = setupMediaSource(&mediaSource);
@@ -974,7 +1041,7 @@ status_t StagefrightRecorder::startRTPRecording() {
return err;
}
- err = setupVideoEncoder(mediaSource, mVideoBitRate, &source);
+ err = setupVideoEncoder(mediaSource, &source);
if (err != OK) {
return err;
}
@@ -984,10 +1051,10 @@ status_t StagefrightRecorder::startRTPRecording() {
mWriter->addSource(source);
mWriter->setListener(mListener);
- return mWriter->start();
+ return OK;
}
-status_t StagefrightRecorder::startMPEG2TSRecording() {
+status_t StagefrightRecorder::setupMPEG2TSRecording() {
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
@@ -1008,6 +1075,7 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
if (mVideoSource < VIDEO_SOURCE_LIST_END) {
if (mVideoEncoder != VIDEO_ENCODER_H264) {
+ ALOGE("MPEG2TS recording only supports H.264 encoding!");
return ERROR_UNSUPPORTED;
}
@@ -1018,7 +1086,7 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
}
sp<MediaSource> encoder;
- err = setupVideoEncoder(mediaSource, mVideoBitRate, &encoder);
+ err = setupVideoEncoder(mediaSource, &encoder);
if (err != OK) {
return err;
@@ -1037,11 +1105,17 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
mWriter = writer;
- return mWriter->start();
+ return OK;
}
void StagefrightRecorder::clipVideoFrameRate() {
ALOGV("clipVideoFrameRate: encoder %d", mVideoEncoder);
+ if (mFrameRate == -1) {
+ mFrameRate = mEncoderProfiles->getCamcorderProfileParamByName(
+ "vid.fps", mCameraId, CAMCORDER_QUALITY_LOW);
+ ALOGW("Using default video fps %d", mFrameRate);
+ }
+
int minFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.fps.min", mVideoEncoder);
int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
@@ -1101,6 +1175,7 @@ status_t StagefrightRecorder::checkVideoEncoderCapabilities(
client.interface(),
(mVideoEncoder == VIDEO_ENCODER_H263 ? MEDIA_MIMETYPE_VIDEO_H263 :
mVideoEncoder == VIDEO_ENCODER_MPEG_4_SP ? MEDIA_MIMETYPE_VIDEO_MPEG4 :
+ mVideoEncoder == VIDEO_ENCODER_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 :
mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),
false /* decoder */, true /* hwCodec */, &codecs);
*supportsCameraSourceMetaDataMode = codecs.size() > 0;
@@ -1173,6 +1248,31 @@ void StagefrightRecorder::setDefaultProfileIfNecessary() {
if (videoCodec == VIDEO_ENCODER_H264) {
ALOGI("Force to use AVC baseline profile");
setParamVideoEncoderProfile(OMX_VIDEO_AVCProfileBaseline);
+ // set 0 for invalid levels - this will be rejected by the
+ // codec if it cannot handle it during configure
+ setParamVideoEncoderLevel(ACodec::getAVCLevelFor(
+ videoFrameWidth, videoFrameHeight, videoFrameRate, videoBitRate));
+ }
+ }
+}
+
+void StagefrightRecorder::setDefaultVideoEncoderIfNecessary() {
+ if (mVideoEncoder == VIDEO_ENCODER_DEFAULT) {
+ if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
+ // default to VP8 for WEBM recording
+ mVideoEncoder = VIDEO_ENCODER_VP8;
+ } else {
+ // pick the default encoder for CAMCORDER_QUALITY_LOW
+ int videoCodec = mEncoderProfiles->getCamcorderProfileParamByName(
+ "vid.codec", mCameraId, CAMCORDER_QUALITY_LOW);
+
+ if (videoCodec > VIDEO_ENCODER_DEFAULT &&
+ videoCodec < VIDEO_ENCODER_LIST_END) {
+ mVideoEncoder = (video_encoder)videoCodec;
+ } else {
+ // default to H.264 if camcorder profile not available
+ mVideoEncoder = VIDEO_ENCODER_H264;
+ }
}
}
}
@@ -1278,49 +1378,14 @@ status_t StagefrightRecorder::setupMediaSource(
return err;
}
*mediaSource = cameraSource;
- } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
- // If using GRAlloc buffers, setup surfacemediasource.
- // Later a handle to that will be passed
- // to the client side when queried
- status_t err = setupSurfaceMediaSource();
- if (err != OK) {
- return err;
- }
- *mediaSource = mSurfaceMediaSource;
+ } else if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+ *mediaSource = NULL;
} else {
return INVALID_OPERATION;
}
return OK;
}
-// setupSurfaceMediaSource creates a source with the given
-// width and height and framerate.
-// TODO: This could go in a static function inside SurfaceMediaSource
-// similar to that in CameraSource
-status_t StagefrightRecorder::setupSurfaceMediaSource() {
- status_t err = OK;
- mSurfaceMediaSource = new SurfaceMediaSource(mVideoWidth, mVideoHeight);
- if (mSurfaceMediaSource == NULL) {
- return NO_INIT;
- }
-
- if (mFrameRate == -1) {
- int32_t frameRate = 0;
- CHECK (mSurfaceMediaSource->getFormat()->findInt32(
- kKeyFrameRate, &frameRate));
- ALOGI("Frame rate is not explicitly set. Use the current frame "
- "rate (%d fps)", frameRate);
- mFrameRate = frameRate;
- } else {
- err = mSurfaceMediaSource->setFrameRate(mFrameRate);
- }
- CHECK(mFrameRate != -1);
-
- mIsMetaDataStoredInVideoBuffers =
- mSurfaceMediaSource->isMetaDataStoredInVideoBuffers();
- return err;
-}
-
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
status_t err = OK;
@@ -1384,25 +1449,26 @@ status_t StagefrightRecorder::setupCameraSource(
status_t StagefrightRecorder::setupVideoEncoder(
sp<MediaSource> cameraSource,
- int32_t videoBitRate,
sp<MediaSource> *source) {
source->clear();
- sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, videoBitRate);
- enc_meta->setInt32(kKeyFrameRate, mFrameRate);
+ sp<AMessage> format = new AMessage();
switch (mVideoEncoder) {
case VIDEO_ENCODER_H263:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_H263);
break;
case VIDEO_ENCODER_MPEG_4_SP:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_MPEG4);
break;
case VIDEO_ENCODER_H264:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
+ break;
+
+ case VIDEO_ENCODER_VP8:
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_VP8);
break;
default:
@@ -1410,59 +1476,80 @@ status_t StagefrightRecorder::setupVideoEncoder(
break;
}
- sp<MetaData> meta = cameraSource->getFormat();
+ if (cameraSource != NULL) {
+ sp<MetaData> meta = cameraSource->getFormat();
+
+ int32_t width, height, stride, sliceHeight, colorFormat;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+ CHECK(meta->findInt32(kKeyStride, &stride));
+ CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+ CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+
+ format->setInt32("width", width);
+ format->setInt32("height", height);
+ format->setInt32("stride", stride);
+ format->setInt32("slice-height", sliceHeight);
+ format->setInt32("color-format", colorFormat);
+ } else {
+ format->setInt32("width", mVideoWidth);
+ format->setInt32("height", mVideoHeight);
+ format->setInt32("stride", mVideoWidth);
+ format->setInt32("slice-height", mVideoWidth);
+ format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+
+ // set up time lapse/slow motion for surface source
+ if (mCaptureTimeLapse) {
+ if (mTimeBetweenTimeLapseFrameCaptureUs <= 0) {
+ ALOGE("Invalid mTimeBetweenTimeLapseFrameCaptureUs value: %lld",
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ return BAD_VALUE;
+ }
+ format->setInt64("time-lapse",
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ }
+ }
- int32_t width, height, stride, sliceHeight, colorFormat;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
- CHECK(meta->findInt32(kKeyStride, &stride));
- CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
- CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+ format->setInt32("bitrate", mVideoBitRate);
+ format->setInt32("frame-rate", mFrameRate);
+ format->setInt32("i-frame-interval", mIFramesIntervalSec);
- enc_meta->setInt32(kKeyWidth, width);
- enc_meta->setInt32(kKeyHeight, height);
- enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
- enc_meta->setInt32(kKeyStride, stride);
- enc_meta->setInt32(kKeySliceHeight, sliceHeight);
- enc_meta->setInt32(kKeyColorFormat, colorFormat);
if (mVideoTimeScale > 0) {
- enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
+ format->setInt32("time-scale", mVideoTimeScale);
}
if (mVideoEncoderProfile != -1) {
- enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
+ format->setInt32("profile", mVideoEncoderProfile);
}
if (mVideoEncoderLevel != -1) {
- enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+ format->setInt32("level", mVideoEncoderLevel);
}
- OMXClient client;
- CHECK_EQ(client.connect(), (status_t)OK);
-
- uint32_t encoder_flags = 0;
+ uint32_t flags = 0;
if (mIsMetaDataStoredInVideoBuffers) {
- encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+ flags |= MediaCodecSource::FLAG_USE_METADATA_INPUT;
}
- // Do not wait for all the input buffers to become available.
- // This give timelapse video recording faster response in
- // receiving output from video encoder component.
- if (mCaptureTimeLapse) {
- encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+ if (cameraSource == NULL) {
+ flags |= MediaCodecSource::FLAG_USE_SURFACE_INPUT;
}
- sp<MediaSource> encoder = OMXCodec::Create(
- client.interface(), enc_meta,
- true /* createEncoder */, cameraSource,
- NULL, encoder_flags);
+ sp<MediaCodecSource> encoder =
+ MediaCodecSource::Create(mLooper, format, cameraSource, flags);
if (encoder == NULL) {
- ALOGW("Failed to create the encoder");
+ ALOGE("Failed to create video encoder");
// When the encoder fails to be created, we need
// release the camera source due to the camera's lock
// and unlock mechanism.
- cameraSource->stop();
+ if (cameraSource != NULL) {
+ cameraSource->stop();
+ }
return UNKNOWN_ERROR;
}
+ if (cameraSource == NULL) {
+ mGraphicBufferProducer = encoder->getGraphicBufferProducer();
+ }
+
*source = encoder;
return OK;
@@ -1496,18 +1583,20 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
return OK;
}
-status_t StagefrightRecorder::setupMPEG4Recording(
- int outputFd,
- int32_t videoWidth, int32_t videoHeight,
- int32_t videoBitRate,
- int32_t *totalBitRate,
- sp<MediaWriter> *mediaWriter) {
- mediaWriter->clear();
- *totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4orWEBMRecording() {
+ mWriter.clear();
+ mTotalBitRate = 0;
+
status_t err = OK;
- sp<MediaWriter> writer = new MPEG4Writer(outputFd);
+ sp<MediaWriter> writer;
+ if (mOutputFormat == OUTPUT_FORMAT_WEBM) {
+ writer = new WebmWriter(mOutputFd);
+ } else {
+ writer = new MPEG4Writer(mOutputFd);
+ }
if (mVideoSource < VIDEO_SOURCE_LIST_END) {
+ setDefaultVideoEncoderIfNecessary();
sp<MediaSource> mediaSource;
err = setupMediaSource(&mediaSource);
@@ -1516,31 +1605,34 @@ status_t StagefrightRecorder::setupMPEG4Recording(
}
sp<MediaSource> encoder;
- err = setupVideoEncoder(mediaSource, videoBitRate, &encoder);
+ err = setupVideoEncoder(mediaSource, &encoder);
if (err != OK) {
return err;
}
writer->addSource(encoder);
- *totalBitRate += videoBitRate;
- }
-
- // Audio source is added at the end if it exists.
- // This help make sure that the "recoding" sound is suppressed for
- // camcorder applications in the recorded files.
- if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
- err = setupAudioEncoder(writer);
- if (err != OK) return err;
- *totalBitRate += mAudioBitRate;
- }
+ mTotalBitRate += mVideoBitRate;
+ }
+
+ if (mOutputFormat != OUTPUT_FORMAT_WEBM) {
+ // Audio source is added at the end if it exists.
+ // This help make sure that the "recoding" sound is suppressed for
+ // camcorder applications in the recorded files.
+ // TODO Audio source is currently unsupported for webm output; vorbis encoder needed.
+ if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
+ err = setupAudioEncoder(writer);
+ if (err != OK) return err;
+ mTotalBitRate += mAudioBitRate;
+ }
- if (mInterleaveDurationUs > 0) {
- reinterpret_cast<MPEG4Writer *>(writer.get())->
- setInterleaveDuration(mInterleaveDurationUs);
- }
- if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) {
- reinterpret_cast<MPEG4Writer *>(writer.get())->
- setGeoData(mLatitudex10000, mLongitudex10000);
+ if (mInterleaveDurationUs > 0) {
+ reinterpret_cast<MPEG4Writer *>(writer.get())->
+ setInterleaveDuration(mInterleaveDurationUs);
+ }
+ if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) {
+ reinterpret_cast<MPEG4Writer *>(writer.get())->
+ setGeoData(mLatitudex10000, mLongitudex10000);
+ }
}
if (mMaxFileDurationUs != 0) {
writer->setMaxFileDuration(mMaxFileDurationUs);
@@ -1548,54 +1640,39 @@ status_t StagefrightRecorder::setupMPEG4Recording(
if (mMaxFileSizeBytes != 0) {
writer->setMaxFileSize(mMaxFileSizeBytes);
}
-
- mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+ if (mVideoSource == VIDEO_SOURCE_DEFAULT
+ || mVideoSource == VIDEO_SOURCE_CAMERA) {
+ mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+ } else if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+ // surface source doesn't need large initial delay
+ mStartTimeOffsetMs = 200;
+ }
if (mStartTimeOffsetMs > 0) {
- reinterpret_cast<MPEG4Writer *>(writer.get())->
- setStartTimeOffsetMs(mStartTimeOffsetMs);
+ writer->setStartTimeOffsetMs(mStartTimeOffsetMs);
}
writer->setListener(mListener);
- *mediaWriter = writer;
+ mWriter = writer;
return OK;
}
-void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
- sp<MetaData> *meta) {
+void StagefrightRecorder::setupMPEG4orWEBMMetaData(sp<MetaData> *meta) {
+ int64_t startTimeUs = systemTime() / 1000;
(*meta)->setInt64(kKeyTime, startTimeUs);
(*meta)->setInt32(kKeyFileType, mOutputFormat);
- (*meta)->setInt32(kKeyBitRate, totalBitRate);
- (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+ (*meta)->setInt32(kKeyBitRate, mTotalBitRate);
if (mMovieTimeScale > 0) {
(*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
}
- if (mTrackEveryTimeDurationUs > 0) {
- (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
- }
- if (mRotationDegrees != 0) {
- (*meta)->setInt32(kKeyRotation, mRotationDegrees);
- }
-}
-
-status_t StagefrightRecorder::startMPEG4Recording() {
- int32_t totalBitRate;
- status_t err = setupMPEG4Recording(
- mOutputFd, mVideoWidth, mVideoHeight,
- mVideoBitRate, &totalBitRate, &mWriter);
- if (err != OK) {
- return err;
- }
-
- int64_t startTimeUs = systemTime() / 1000;
- sp<MetaData> meta = new MetaData;
- setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
-
- err = mWriter->start(meta.get());
- if (err != OK) {
- return err;
+ if (mOutputFormat != OUTPUT_FORMAT_WEBM) {
+ (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+ if (mTrackEveryTimeDurationUs > 0) {
+ (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+ }
+ if (mRotationDegrees != 0) {
+ (*meta)->setInt32(kKeyRotation, mRotationDegrees);
+ }
}
-
- return OK;
}
status_t StagefrightRecorder::pause() {
@@ -1637,6 +1714,8 @@ status_t StagefrightRecorder::stop() {
mWriter.clear();
}
+ mGraphicBufferProducer.clear();
+
if (mOutputFd >= 0) {
::close(mOutputFd);
mOutputFd = -1;
@@ -1656,7 +1735,6 @@ status_t StagefrightRecorder::stop() {
addBatteryData(params);
}
-
return err;
}
@@ -1678,7 +1756,7 @@ status_t StagefrightRecorder::reset() {
// Default parameters
mOutputFormat = OUTPUT_FORMAT_THREE_GPP;
mAudioEncoder = AUDIO_ENCODER_AMR_NB;
- mVideoEncoder = VIDEO_ENCODER_H263;
+ mVideoEncoder = VIDEO_ENCODER_DEFAULT;
mVideoWidth = 176;
mVideoHeight = 144;
mFrameRate = -1;
@@ -1708,6 +1786,7 @@ status_t StagefrightRecorder::reset() {
mRotationDegrees = 0;
mLatitudex10000 = -3600000;
mLongitudex10000 = -3600000;
+ mTotalBitRate = 0;
mOutputFd = -1;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 31f09e0..54c38d3 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -37,6 +37,7 @@ struct AudioSource;
class MediaProfiles;
class IGraphicBufferProducer;
class SurfaceMediaSource;
+class ALooper;
struct StagefrightRecorder : public MediaRecorderBase {
StagefrightRecorder();
@@ -106,6 +107,7 @@ private:
int32_t mLatitudex10000;
int32_t mLongitudex10000;
int32_t mStartTimeOffsetMs;
+ int32_t mTotalBitRate;
bool mCaptureTimeLapse;
int64_t mTimeBetweenTimeLapseFrameCaptureUs;
@@ -122,22 +124,17 @@ private:
// An <IGraphicBufferProducer> pointer
// will be sent to the client side using which the
// frame buffers will be queued and dequeued
- sp<SurfaceMediaSource> mSurfaceMediaSource;
-
- status_t setupMPEG4Recording(
- int outputFd,
- int32_t videoWidth, int32_t videoHeight,
- int32_t videoBitRate,
- int32_t *totalBitRate,
- sp<MediaWriter> *mediaWriter);
- void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
- sp<MetaData> *meta);
- status_t startMPEG4Recording();
- status_t startAMRRecording();
- status_t startAACRecording();
- status_t startRawAudioRecording();
- status_t startRTPRecording();
- status_t startMPEG2TSRecording();
+ sp<IGraphicBufferProducer> mGraphicBufferProducer;
+ sp<ALooper> mLooper;
+
+ status_t prepareInternal();
+ status_t setupMPEG4orWEBMRecording();
+ void setupMPEG4orWEBMMetaData(sp<MetaData> *meta);
+ status_t setupAMRRecording();
+ status_t setupAACRecording();
+ status_t setupRawAudioRecording();
+ status_t setupRTPRecording();
+ status_t setupMPEG2TSRecording();
sp<MediaSource> createAudioSource();
status_t checkVideoEncoderCapabilities(
bool *supportsCameraSourceMetaDataMode);
@@ -147,14 +144,8 @@ private:
// depending on the videosource type
status_t setupMediaSource(sp<MediaSource> *mediaSource);
status_t setupCameraSource(sp<CameraSource> *cameraSource);
- // setup the surfacemediasource for the encoder
- status_t setupSurfaceMediaSource();
-
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
- status_t setupVideoEncoder(
- sp<MediaSource> cameraSource,
- int32_t videoBitRate,
- sp<MediaSource> *source);
+ status_t setupVideoEncoder(sp<MediaSource> cameraSource, sp<MediaSource> *source);
// Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
@@ -187,6 +178,7 @@ private:
void clipAudioSampleRate();
void clipNumberOfAudioChannels();
void setDefaultProfileIfNecessary();
+ void setDefaultVideoEncoderIfNecessary();
StagefrightRecorder(const StagefrightRecorder &);
diff --git a/media/libmediaplayerservice/TestPlayerStub.cpp b/media/libmediaplayerservice/TestPlayerStub.cpp
index 5d9728a..c8bf6c5 100644
--- a/media/libmediaplayerservice/TestPlayerStub.cpp
+++ b/media/libmediaplayerservice/TestPlayerStub.cpp
@@ -45,7 +45,7 @@ bool isTestBuild()
{
char prop[PROPERTY_VALUE_MAX] = { '\0', };
- property_get(kBuildTypePropName, prop, '\0');
+ property_get(kBuildTypePropName, prop, "\0");
return strcmp(prop, kEngBuild) == 0 || strcmp(prop, kTestBuild) == 0;
}
@@ -113,7 +113,9 @@ status_t TestPlayerStub::parseUrl()
// Create the test player.
// Call setDataSource on the test player with the url in param.
status_t TestPlayerStub::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
if (!isTestUrl(url) || NULL != mHandle) {
return INVALID_OPERATION;
}
@@ -162,7 +164,7 @@ status_t TestPlayerStub::setDataSource(
}
mPlayer = (*mNewPlayer)();
- return mPlayer->setDataSource(mContentUrl, headers);
+ return mPlayer->setDataSource(httpService, mContentUrl, headers);
}
// Internal cleanup.
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index a3802eb..55bf2c8 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -66,7 +66,9 @@ class TestPlayerStub : public MediaPlayerInterface {
// @param url Should be a test url. See class comment.
virtual status_t setDataSource(
- const char* url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
+ const KeyedVector<String8, String8> *headers);
// Test player for a file descriptor source is not supported.
virtual status_t setDataSource(int, int64_t, int64_t) {
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.cpp b/media/libmediaplayerservice/VideoFrameScheduler.cpp
new file mode 100644
index 0000000..ce5f5fe
--- /dev/null
+++ b/media/libmediaplayerservice/VideoFrameScheduler.cpp
@@ -0,0 +1,482 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFrameScheduler"
+#include <utils/Log.h>
+#define ATRACE_TAG ATRACE_TAG_VIDEO
+#include <utils/Trace.h>
+
+#include <sys/time.h>
+
+#include <binder/IServiceManager.h>
+#include <gui/ISurfaceComposer.h>
+#include <ui/DisplayStatInfo.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+
+#include "VideoFrameScheduler.h"
+
+namespace android {
+
+static const nsecs_t kNanosIn1s = 1000000000;
+
+template<class T>
+static int compare(const T *lhs, const T *rhs) {
+ if (*lhs < *rhs) {
+ return -1;
+ } else if (*lhs > *rhs) {
+ return 1;
+ } else {
+ return 0;
+ }
+}
+
+/* ======================================================================= */
+/* PLL */
+/* ======================================================================= */
+
+static const size_t kMinSamplesToStartPrime = 3;
+static const size_t kMinSamplesToStopPrime = VideoFrameScheduler::kHistorySize;
+static const size_t kMinSamplesToEstimatePeriod = 3;
+static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;
+
+static const size_t kPrecision = 12;
+static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;
+static const int64_t kMultiplesThresholdDiv = 4; // 25%
+static const int64_t kReFitThresholdDiv = 100; // 1%
+static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s; // 1 sec
+static const nsecs_t kMinPeriod = kNanosIn1s / 120; // 120Hz
+static const nsecs_t kRefitRefreshPeriod = 10 * kNanosIn1s; // 10 sec
+
+VideoFrameScheduler::PLL::PLL()
+ : mPeriod(-1),
+ mPhase(0),
+ mPrimed(false),
+ mSamplesUsedForPriming(0),
+ mLastTime(-1),
+ mNumSamples(0) {
+}
+
+void VideoFrameScheduler::PLL::reset(float fps) {
+ //test();
+
+ mSamplesUsedForPriming = 0;
+ mLastTime = -1;
+
+ // set up or reset video PLL
+ if (fps <= 0.f) {
+ mPeriod = -1;
+ mPrimed = false;
+ } else {
+ ALOGV("reset at %.1f fps", fps);
+ mPeriod = (nsecs_t)(1e9 / fps + 0.5);
+ mPrimed = true;
+ }
+
+ restart();
+}
+
+// reset PLL but keep previous period estimate
+void VideoFrameScheduler::PLL::restart() {
+ mNumSamples = 0;
+ mPhase = -1;
+}
+
+#if 0
+
+void VideoFrameScheduler::PLL::test() {
+ nsecs_t period = kNanosIn1s / 60;
+ mTimes[0] = 0;
+ mTimes[1] = period;
+ mTimes[2] = period * 3;
+ mTimes[3] = period * 4;
+ mTimes[4] = period * 7;
+ mTimes[5] = period * 8;
+ mTimes[6] = period * 10;
+ mTimes[7] = period * 12;
+ mNumSamples = 8;
+ int64_t a, b, err;
+ fit(0, period * 12 / 7, 8, &a, &b, &err);
+ // a = 0.8(5)+
+ // b = -0.14097(2)+
+ // err = 0.2750578(703)+
+ ALOGD("a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
+ (long long)a, (a / (float)(1 << kPrecision)),
+ (long long)b, (b / (float)(1 << kPrecision)),
+ (long long)err, (err / (float)(1 << (kPrecision * 2))));
+}
+
+#endif
+
+bool VideoFrameScheduler::PLL::fit(
+ nsecs_t phase, nsecs_t period, size_t numSamplesToUse,
+ int64_t *a, int64_t *b, int64_t *err) {
+ if (numSamplesToUse > mNumSamples) {
+ numSamplesToUse = mNumSamples;
+ }
+
+ int64_t sumX = 0;
+ int64_t sumXX = 0;
+ int64_t sumXY = 0;
+ int64_t sumYY = 0;
+ int64_t sumY = 0;
+
+ int64_t x = 0; // x usually is in [0..numSamplesToUse)
+ nsecs_t lastTime;
+ for (size_t i = 0; i < numSamplesToUse; i++) {
+ size_t ix = (mNumSamples - numSamplesToUse + i) % kHistorySize;
+ nsecs_t time = mTimes[ix];
+ if (i > 0) {
+ x += divRound(time - lastTime, period);
+ }
+ // y is usually in [-numSamplesToUse..numSamplesToUse+kRefitRefreshPeriod/kMinPeriod) << kPrecision
+ // ideally in [0..numSamplesToUse), but shifted by -numSamplesToUse during
+ // priming, and possibly shifted by up to kRefitRefreshPeriod/kMinPeriod
+ // while we are not refitting.
+ int64_t y = divRound(time - phase, period >> kPrecision);
+ sumX += x;
+ sumY += y;
+ sumXX += x * x;
+ sumXY += x * y;
+ sumYY += y * y;
+ lastTime = time;
+ }
+
+ int64_t div = numSamplesToUse * sumXX - sumX * sumX;
+ if (div == 0) {
+ return false;
+ }
+
+ int64_t a_nom = numSamplesToUse * sumXY - sumX * sumY;
+ int64_t b_nom = sumXX * sumY - sumX * sumXY;
+ *a = divRound(a_nom, div);
+ *b = divRound(b_nom, div);
+ // don't use a and b directly as the rounding error is significant
+ *err = sumYY - divRound(a_nom * sumXY + b_nom * sumY, div);
+ ALOGV("fitting[%zu] a=%lld (%.6f), b=%lld (%.6f), err=%lld (%.6f)",
+ numSamplesToUse,
+ (long long)*a, (*a / (float)(1 << kPrecision)),
+ (long long)*b, (*b / (float)(1 << kPrecision)),
+ (long long)*err, (*err / (float)(1 << (kPrecision * 2))));
+ return true;
+}
+
+void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {
+ if (numSamplesToUse > mNumSamples) {
+ numSamplesToUse = mNumSamples;
+ }
+ CHECK(numSamplesToUse >= 3); // must have at least 3 samples
+
+ // estimate video framerate from deltas between timestamps, and
+ // 2nd order deltas
+ Vector<nsecs_t> deltas;
+ nsecs_t lastTime, firstTime;
+ for (size_t i = 0; i < numSamplesToUse; ++i) {
+ size_t index = (mNumSamples - numSamplesToUse + i) % kHistorySize;
+ nsecs_t time = mTimes[index];
+ if (i > 0) {
+ if (time - lastTime > kMinPeriod) {
+ //ALOGV("delta: %lld", (long long)(time - lastTime));
+ deltas.push(time - lastTime);
+ }
+ } else {
+ firstTime = time;
+ }
+ lastTime = time;
+ }
+ deltas.sort(compare<nsecs_t>);
+ size_t numDeltas = deltas.size();
+ if (numDeltas > 1) {
+ nsecs_t deltaMinLimit = max(deltas[0] / kMultiplesThresholdDiv, kMinPeriod);
+ nsecs_t deltaMaxLimit = deltas[numDeltas / 2] * kMultiplesThresholdDiv;
+ for (size_t i = numDeltas / 2 + 1; i < numDeltas; ++i) {
+ if (deltas[i] > deltaMaxLimit) {
+ deltas.resize(i);
+ numDeltas = i;
+ break;
+ }
+ }
+ for (size_t i = 1; i < numDeltas; ++i) {
+ nsecs_t delta2nd = deltas[i] - deltas[i - 1];
+ if (delta2nd >= deltaMinLimit) {
+ //ALOGV("delta2: %lld", (long long)(delta2nd));
+ deltas.push(delta2nd);
+ }
+ }
+ }
+
+ // use the one that yields the best match
+ int64_t bestScore;
+ for (size_t i = 0; i < deltas.size(); ++i) {
+ nsecs_t delta = deltas[i];
+ int64_t score = 0;
+#if 1
+ // simplest score: number of deltas that are near multiples
+ size_t matches = 0;
+ for (size_t j = 0; j < deltas.size(); ++j) {
+ nsecs_t err = periodicError(deltas[j], delta);
+ if (err < delta / kMultiplesThresholdDiv) {
+ ++matches;
+ }
+ }
+ score = matches;
+#if 0
+ // could be weighed by the (1 - normalized error)
+ if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
+ int64_t a, b, err;
+ fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
+ err = (1 << (2 * kPrecision)) - err;
+ score *= max(0, err);
+ }
+#endif
+#else
+ // or use the error as a negative score
+ if (numSamplesToUse >= kMinSamplesToEstimatePeriod) {
+ int64_t a, b, err;
+ fit(firstTime, delta, numSamplesToUse, &a, &b, &err);
+ score = -delta * err;
+ }
+#endif
+ if (i == 0 || score > bestScore) {
+ bestScore = score;
+ mPeriod = delta;
+ mPhase = firstTime;
+ }
+ }
+ ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod);
+}
+
+nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {
+ if (mLastTime >= 0
+ // if time goes backward, or we skipped rendering
+ && (time > mLastTime + kMaxAllowedFrameSkip || time < mLastTime)) {
+ restart();
+ }
+
+ mLastTime = time;
+ mTimes[mNumSamples % kHistorySize] = time;
+ ++mNumSamples;
+
+ bool doFit = time > mRefitAt;
+ if ((mPeriod <= 0 || !mPrimed) && mNumSamples >= kMinSamplesToStartPrime) {
+ prime(kMinSamplesToStopPrime);
+ ++mSamplesUsedForPriming;
+ doFit = true;
+ }
+ if (mPeriod > 0 && mNumSamples >= kMinSamplesToEstimatePeriod) {
+ if (mPhase < 0) {
+ // initialize phase to the current render time
+ mPhase = time;
+ doFit = true;
+ } else if (!doFit) {
+ int64_t err = periodicError(time - mPhase, mPeriod);
+ doFit = err > mPeriod / kReFitThresholdDiv;
+ }
+
+ if (doFit) {
+ int64_t a, b, err;
+ if (!fit(mPhase, mPeriod, kMaxSamplesToEstimatePeriod, &a, &b, &err)) {
+ // samples are not suitable for fitting. this means they are
+ // also not suitable for priming.
+ ALOGV("could not fit - keeping old period:%lld", (long long)mPeriod);
+ return mPeriod;
+ }
+
+ mRefitAt = time + kRefitRefreshPeriod;
+
+ mPhase += (mPeriod * b) >> kPrecision;
+ mPeriod = (mPeriod * a) >> kPrecision;
+ ALOGV("new phase:%lld period:%lld", (long long)mPhase, (long long)mPeriod);
+
+ if (err < kErrorThreshold) {
+ if (!mPrimed && mSamplesUsedForPriming >= kMinSamplesToStopPrime) {
+ mPrimed = true;
+ }
+ } else {
+ mPrimed = false;
+ mSamplesUsedForPriming = 0;
+ }
+ }
+ }
+ return mPeriod;
+}
+
+/* ======================================================================= */
+/* Frame Scheduler */
+/* ======================================================================= */
+
+static const nsecs_t kDefaultVsyncPeriod = kNanosIn1s / 60; // 60Hz
+static const nsecs_t kVsyncRefreshPeriod = kNanosIn1s; // 1 sec
+
+VideoFrameScheduler::VideoFrameScheduler()
+ : mVsyncTime(0),
+ mVsyncPeriod(0),
+ mVsyncRefreshAt(0),
+ mLastVsyncTime(-1),
+ mTimeCorrection(0) {
+}
+
+void VideoFrameScheduler::updateVsync() {
+ mVsyncRefreshAt = systemTime(SYSTEM_TIME_MONOTONIC) + kVsyncRefreshPeriod;
+ mVsyncPeriod = 0;
+ mVsyncTime = 0;
+
+ // TODO: schedule frames for the destination surface
+ // For now, surface flinger only schedules frames on the primary display
+ if (mComposer == NULL) {
+ String16 name("SurfaceFlinger");
+ sp<IServiceManager> sm = defaultServiceManager();
+ mComposer = interface_cast<ISurfaceComposer>(sm->checkService(name));
+ }
+ if (mComposer != NULL) {
+ DisplayStatInfo stats;
+ status_t res = mComposer->getDisplayStats(NULL /* display */, &stats);
+ if (res == OK) {
+ ALOGV("vsync time:%lld period:%lld",
+ (long long)stats.vsyncTime, (long long)stats.vsyncPeriod);
+ mVsyncTime = stats.vsyncTime;
+ mVsyncPeriod = stats.vsyncPeriod;
+ } else {
+ ALOGW("getDisplayStats returned %d", res);
+ }
+ } else {
+ ALOGW("could not get surface mComposer service");
+ }
+}
+
+void VideoFrameScheduler::init(float videoFps) {
+ updateVsync();
+
+ mLastVsyncTime = -1;
+ mTimeCorrection = 0;
+
+ mPll.reset(videoFps);
+}
+
+void VideoFrameScheduler::restart() {
+ mLastVsyncTime = -1;
+ mTimeCorrection = 0;
+
+ mPll.restart();
+}
+
+nsecs_t VideoFrameScheduler::getVsyncPeriod() {
+ if (mVsyncPeriod > 0) {
+ return mVsyncPeriod;
+ }
+ return kDefaultVsyncPeriod;
+}
+
+nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {
+ nsecs_t origRenderTime = renderTime;
+
+ nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC);
+ if (now >= mVsyncRefreshAt) {
+ updateVsync();
+ }
+
+ // without VSYNC info, there is nothing to do
+ if (mVsyncPeriod == 0) {
+ ALOGV("no vsync: render=%lld", (long long)renderTime);
+ return renderTime;
+ }
+
+ // ensure vsync time is well before (corrected) render time
+ if (mVsyncTime > renderTime - 4 * mVsyncPeriod) {
+ mVsyncTime -=
+ ((mVsyncTime - renderTime) / mVsyncPeriod + 5) * mVsyncPeriod;
+ }
+
+ // Video presentation takes place at the VSYNC _after_ renderTime. Adjust renderTime
+ // so this effectively becomes a rounding operation (to the _closest_ VSYNC.)
+ renderTime -= mVsyncPeriod / 2;
+
+ const nsecs_t videoPeriod = mPll.addSample(origRenderTime);
+ if (videoPeriod > 0) {
+ // Smooth out rendering
+ size_t N = 12;
+ nsecs_t fiveSixthDev =
+ abs(((videoPeriod * 5 + mVsyncPeriod) % (mVsyncPeriod * 6)) - mVsyncPeriod)
+ / (mVsyncPeriod / 100);
+ // use 20 samples if we are doing 5:6 ratio +- 1% (e.g. playing 50Hz on 60Hz)
+ if (fiveSixthDev < 12) { /* 12% / 6 = 2% */
+ N = 20;
+ }
+
+ nsecs_t offset = 0;
+ nsecs_t edgeRemainder = 0;
+ for (size_t i = 1; i <= N; i++) {
+ offset +=
+ (renderTime + mTimeCorrection + videoPeriod * i - mVsyncTime) % mVsyncPeriod;
+ edgeRemainder += (videoPeriod * i) % mVsyncPeriod;
+ }
+ mTimeCorrection += mVsyncPeriod / 2 - offset / N;
+ renderTime += mTimeCorrection;
+ nsecs_t correctionLimit = mVsyncPeriod * 3 / 5;
+ edgeRemainder = abs(edgeRemainder / N - mVsyncPeriod / 2);
+ if (edgeRemainder <= mVsyncPeriod / 3) {
+ correctionLimit /= 2;
+ }
+
+ // estimate how many VSYNCs a frame will spend on the display
+ nsecs_t nextVsyncTime =
+ renderTime + mVsyncPeriod - ((renderTime - mVsyncTime) % mVsyncPeriod);
+ if (mLastVsyncTime >= 0) {
+ size_t minVsyncsPerFrame = videoPeriod / mVsyncPeriod;
+ size_t vsyncsForLastFrame = divRound(nextVsyncTime - mLastVsyncTime, mVsyncPeriod);
+ bool vsyncsPerFrameAreNearlyConstant =
+ periodicError(videoPeriod, mVsyncPeriod) / (mVsyncPeriod / 20) == 0;
+
+ if (mTimeCorrection > correctionLimit &&
+ (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame > minVsyncsPerFrame)) {
+ // remove a VSYNC
+ mTimeCorrection -= mVsyncPeriod / 2;
+ renderTime -= mVsyncPeriod / 2;
+ nextVsyncTime -= mVsyncPeriod;
+ --vsyncsForLastFrame;
+ } else if (mTimeCorrection < -correctionLimit &&
+ (vsyncsPerFrameAreNearlyConstant || vsyncsForLastFrame == minVsyncsPerFrame)) {
+ // add a VSYNC
+ mTimeCorrection += mVsyncPeriod / 2;
+ renderTime += mVsyncPeriod / 2;
+ nextVsyncTime += mVsyncPeriod;
+ ++vsyncsForLastFrame;
+ }
+ ATRACE_INT("FRAME_VSYNCS", vsyncsForLastFrame);
+ }
+ mLastVsyncTime = nextVsyncTime;
+ }
+
+ // align rendertime to the center between VSYNC edges
+ renderTime -= (renderTime - mVsyncTime) % mVsyncPeriod;
+ renderTime += mVsyncPeriod / 2;
+ ALOGV("adjusting render: %lld => %lld", (long long)origRenderTime, (long long)renderTime);
+ ATRACE_INT("FRAME_FLIP_IN(ms)", (renderTime - now) / 1000000);
+ return renderTime;
+}
+
+void VideoFrameScheduler::release() {
+ mComposer.clear();
+}
+
+VideoFrameScheduler::~VideoFrameScheduler() {
+ release();
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/media/libmediaplayerservice/VideoFrameScheduler.h
new file mode 100644
index 0000000..84b27b4
--- /dev/null
+++ b/media/libmediaplayerservice/VideoFrameScheduler.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FRAME_SCHEDULER_H_
+#define VIDEO_FRAME_SCHEDULER_H_
+
+#include <utils/RefBase.h>
+#include <utils/Timers.h>
+
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+struct ISurfaceComposer;
+
+struct VideoFrameScheduler : public RefBase {
+ VideoFrameScheduler();
+
+ // (re)initialize scheduler
+ void init(float videoFps = -1);
+ // use in case of video render-time discontinuity, e.g. seek
+ void restart();
+ // get adjusted nanotime for a video frame render at renderTime
+ nsecs_t schedule(nsecs_t renderTime);
+
+ // returns the vsync period for the main display
+ nsecs_t getVsyncPeriod();
+
+ void release();
+
+ static const size_t kHistorySize = 8;
+
+protected:
+ virtual ~VideoFrameScheduler();
+
+private:
+ struct PLL {
+ PLL();
+
+ // reset PLL to new PLL
+ void reset(float fps = -1);
+ // keep current estimate, but restart phase
+ void restart();
+ // returns period
+ nsecs_t addSample(nsecs_t time);
+
+ private:
+ nsecs_t mPeriod;
+ nsecs_t mPhase;
+
+ bool mPrimed; // have an estimate for the period
+ size_t mSamplesUsedForPriming;
+
+ nsecs_t mLastTime; // last input time
+ nsecs_t mRefitAt; // next input time to fit at
+
+ size_t mNumSamples; // can go past kHistorySize
+ nsecs_t mTimes[kHistorySize];
+
+ void test();
+ // returns whether fit was successful
+ bool fit(nsecs_t phase, nsecs_t period, size_t numSamples,
+ int64_t *a, int64_t *b, int64_t *err);
+ void prime(size_t numSamples);
+ };
+
+ void updateVsync();
+
+ nsecs_t mVsyncTime; // vsync timing from display
+ nsecs_t mVsyncPeriod;
+ nsecs_t mVsyncRefreshAt; // next time to refresh timing info
+
+ nsecs_t mLastVsyncTime; // estimated vsync time for last frame
+ nsecs_t mTimeCorrection; // running adjustment
+
+ PLL mPll; // PLL for video frame rate based on render time
+
+ sp<ISurfaceComposer> mComposer;
+
+ DISALLOW_EVIL_CONSTRUCTORS(VideoFrameScheduler);
+};
+
+} // namespace android
+
+#endif // VIDEO_FRAME_SCHEDULER_H_
+
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index f946c1c..6609874 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -5,19 +5,23 @@ LOCAL_SRC_FILES:= \
GenericSource.cpp \
HTTPLiveSource.cpp \
NuPlayer.cpp \
+ NuPlayerCCDecoder.cpp \
NuPlayerDecoder.cpp \
+ NuPlayerDecoderBase.cpp \
+ NuPlayerDecoderPassThrough.cpp \
NuPlayerDriver.cpp \
NuPlayerRenderer.cpp \
NuPlayerStreamListener.cpp \
RTSPSource.cpp \
StreamingSource.cpp \
- mp4/MP4Source.cpp \
LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright/httplive \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/av/media/libstagefright/mpeg2ts \
$(TOP)/frameworks/av/media/libstagefright/rtsp \
+ $(TOP)/frameworks/av/media/libstagefright/timedtext \
+ $(TOP)/frameworks/av/media/libmediaplayerservice \
$(TOP)/frameworks/native/include/media/openmax
LOCAL_MODULE:= libstagefright_nuplayer
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index b04e7a6..63a9b77 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -14,10 +14,14 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GenericSource"
+
#include "GenericSource.h"
#include "AnotherPacketSource.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -28,57 +32,198 @@
#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include "../../libstagefright/include/DRMExtractor.h"
+#include "../../libstagefright/include/NuCachedSource2.h"
+#include "../../libstagefright/include/WVMExtractor.h"
+#include "../../libstagefright/include/HTTPBase.h"
namespace android {
+static int64_t kLowWaterMarkUs = 2000000ll; // 2secs
+static int64_t kHighWaterMarkUs = 5000000ll; // 5secs
+static const ssize_t kLowWaterMarkBytes = 40000;
+static const ssize_t kHighWaterMarkBytes = 200000;
+
NuPlayer::GenericSource::GenericSource(
const sp<AMessage> &notify,
- const char *url,
- const KeyedVector<String8, String8> *headers,
bool uidValid,
uid_t uid)
: Source(notify),
+ mAudioTimeUs(0),
+ mAudioLastDequeueTimeUs(0),
+ mVideoTimeUs(0),
+ mVideoLastDequeueTimeUs(0),
+ mFetchSubtitleDataGeneration(0),
+ mFetchTimedTextDataGeneration(0),
mDurationUs(0ll),
- mAudioIsVorbis(false) {
+ mAudioIsVorbis(false),
+ mIsWidevine(false),
+ mIsSecure(false),
+ mIsStreaming(false),
+ mUIDValid(uidValid),
+ mUID(uid),
+ mFd(-1),
+ mDrmManagerClient(NULL),
+ mMetaDataSize(-1ll),
+ mBitrate(-1ll),
+ mPollBufferingGeneration(0),
+ mPendingReadBufferTypes(0),
+ mBuffering(false),
+ mPrepareBuffering(false) {
+ resetDataSource();
DataSource::RegisterDefaultSniffers();
+}
+
+void NuPlayer::GenericSource::resetDataSource() {
+ mHTTPService.clear();
+ mHttpSource.clear();
+ mUri.clear();
+ mUriHeaders.clear();
+ if (mFd >= 0) {
+ close(mFd);
+ mFd = -1;
+ }
+ mOffset = 0;
+ mLength = 0;
+ setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
+ mDecryptHandle = NULL;
+ mDrmManagerClient = NULL;
+ mStarted = false;
+ mStopRead = true;
+}
+
+status_t NuPlayer::GenericSource::setDataSource(
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+ resetDataSource();
- sp<DataSource> dataSource =
- DataSource::CreateFromURI(url, headers);
- CHECK(dataSource != NULL);
+ mHTTPService = httpService;
+ mUri = url;
- initFromDataSource(dataSource);
+ if (headers) {
+ mUriHeaders = *headers;
+ }
+
+ // delay data source creation to prepareAsync() to avoid blocking
+ // the calling thread in setDataSource for any significant time.
+ return OK;
}
-NuPlayer::GenericSource::GenericSource(
- const sp<AMessage> &notify,
- int fd, int64_t offset, int64_t length)
- : Source(notify),
- mDurationUs(0ll),
- mAudioIsVorbis(false) {
- DataSource::RegisterDefaultSniffers();
+status_t NuPlayer::GenericSource::setDataSource(
+ int fd, int64_t offset, int64_t length) {
+ resetDataSource();
- sp<DataSource> dataSource = new FileSource(dup(fd), offset, length);
+ mFd = dup(fd);
+ mOffset = offset;
+ mLength = length;
- initFromDataSource(dataSource);
+ // delay data source creation to prepareAsync() to avoid blocking
+ // the calling thread in setDataSource for any significant time.
+ return OK;
}
-void NuPlayer::GenericSource::initFromDataSource(
- const sp<DataSource> &dataSource) {
- sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const {
+ return mFileMeta;
+}
- CHECK(extractor != NULL);
+status_t NuPlayer::GenericSource::initFromDataSource() {
+ sp<MediaExtractor> extractor;
+ String8 mimeType;
+ float confidence;
+ sp<AMessage> dummy;
+ bool isWidevineStreaming = false;
+
+ CHECK(mDataSource != NULL);
+
+ if (mIsWidevine) {
+ isWidevineStreaming = SniffWVM(
+ mDataSource, &mimeType, &confidence, &dummy);
+ if (!isWidevineStreaming ||
+ strcasecmp(
+ mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) {
+ ALOGE("unsupported widevine mime: %s", mimeType.string());
+ return UNKNOWN_ERROR;
+ }
+ } else if (mIsStreaming) {
+ if (mSniffedMIME.empty()) {
+ if (!mDataSource->sniff(&mimeType, &confidence, &dummy)) {
+ return UNKNOWN_ERROR;
+ }
+ mSniffedMIME = mimeType.string();
+ }
+ isWidevineStreaming = !strcasecmp(
+ mSniffedMIME.c_str(), MEDIA_MIMETYPE_CONTAINER_WVM);
+ }
+
+ if (isWidevineStreaming) {
+ // we don't want cached source for widevine streaming.
+ mCachedSource.clear();
+ mDataSource = mHttpSource;
+ mWVMExtractor = new WVMExtractor(mDataSource);
+ mWVMExtractor->setAdaptiveStreamingMode(true);
+ if (mUIDValid) {
+ mWVMExtractor->setUID(mUID);
+ }
+ extractor = mWVMExtractor;
+ } else {
+ extractor = MediaExtractor::Create(mDataSource,
+ mSniffedMIME.empty() ? NULL: mSniffedMIME.c_str());
+ }
+
+ if (extractor == NULL) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (extractor->getDrmFlag()) {
+ checkDrmStatus(mDataSource);
+ }
+
+ mFileMeta = extractor->getMetaData();
+ if (mFileMeta != NULL) {
+ int64_t duration;
+ if (mFileMeta->findInt64(kKeyDuration, &duration)) {
+ mDurationUs = duration;
+ }
+
+ if (!mIsWidevine) {
+ // Check mime to see if we actually have a widevine source.
+ // If the data source is not URL-type (eg. file source), we
+ // won't be able to tell until now.
+ const char *fileMime;
+ if (mFileMeta->findCString(kKeyMIMEType, &fileMime)
+ && !strncasecmp(fileMime, "video/wvm", 9)) {
+ mIsWidevine = true;
+ }
+ }
+ }
+
+ int32_t totalBitrate = 0;
+
+ size_t numtracks = extractor->countTracks();
+ if (numtracks == 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ for (size_t i = 0; i < numtracks; ++i) {
+ sp<MediaSource> track = extractor->getTrack(i);
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<MetaData> meta = extractor->getTrackMetaData(i);
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
- sp<MediaSource> track;
-
+ // Do the string compare immediately with "mime",
+ // we can't assume "mime" would stay valid after another
+ // extractor operation, some extractors might modify meta
+ // during getTrack() and make it invalid.
if (!strncasecmp(mime, "audio/", 6)) {
if (mAudioTrack.mSource == NULL) {
- mAudioTrack.mSource = track = extractor->getTrack(i);
+ mAudioTrack.mIndex = i;
+ mAudioTrack.mSource = track;
+ mAudioTrack.mPackets =
+ new AnotherPacketSource(mAudioTrack.mSource->getFormat());
if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
mAudioIsVorbis = true;
@@ -88,71 +233,808 @@ void NuPlayer::GenericSource::initFromDataSource(
}
} else if (!strncasecmp(mime, "video/", 6)) {
if (mVideoTrack.mSource == NULL) {
- mVideoTrack.mSource = track = extractor->getTrack(i);
+ mVideoTrack.mIndex = i;
+ mVideoTrack.mSource = track;
+ mVideoTrack.mPackets =
+ new AnotherPacketSource(mVideoTrack.mSource->getFormat());
+
+ // check if the source requires secure buffers
+ int32_t secure;
+ if (meta->findInt32(kKeyRequiresSecureBuffers, &secure)
+ && secure) {
+ mIsSecure = true;
+ if (mUIDValid) {
+ extractor->setUID(mUID);
+ }
+ }
}
}
if (track != NULL) {
+ mSources.push(track);
int64_t durationUs;
if (meta->findInt64(kKeyDuration, &durationUs)) {
if (durationUs > mDurationUs) {
mDurationUs = durationUs;
}
}
+
+ int32_t bitrate;
+ if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
+ totalBitrate += bitrate;
+ } else {
+ totalBitrate = -1;
+ }
+ }
+ }
+
+ mBitrate = totalBitrate;
+
+ return OK;
+}
+
+status_t NuPlayer::GenericSource::startSources() {
+ // Start the selected A/V tracks now before we start buffering.
+ // Widevine sources might re-initialize crypto when starting, if we delay
+ // this to start(), all data buffered during prepare would be wasted.
+ // (We don't actually start reading until start().)
+ if (mAudioTrack.mSource != NULL && mAudioTrack.mSource->start() != OK) {
+ ALOGE("failed to start audio track!");
+ return UNKNOWN_ERROR;
+ }
+
+ if (mVideoTrack.mSource != NULL && mVideoTrack.mSource->start() != OK) {
+ ALOGE("failed to start video track!");
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+void NuPlayer::GenericSource::checkDrmStatus(const sp<DataSource>& dataSource) {
+ dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
+ if (mDecryptHandle != NULL) {
+ CHECK(mDrmManagerClient);
+ if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
+ sp<AMessage> msg = dupNotify();
+ msg->setInt32("what", kWhatDrmNoLicense);
+ msg->post();
}
}
}
+int64_t NuPlayer::GenericSource::getLastReadPosition() {
+ if (mAudioTrack.mSource != NULL) {
+ return mAudioTimeUs;
+ } else if (mVideoTrack.mSource != NULL) {
+ return mVideoTimeUs;
+ } else {
+ return 0;
+ }
+}
+
+status_t NuPlayer::GenericSource::setBuffers(
+ bool audio, Vector<MediaBuffer *> &buffers) {
+ if (mIsSecure && !audio) {
+ return mVideoTrack.mSource->setBuffers(buffers);
+ }
+ return INVALID_OPERATION;
+}
+
NuPlayer::GenericSource::~GenericSource() {
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ }
+ resetDataSource();
}
void NuPlayer::GenericSource::prepareAsync() {
- if (mVideoTrack.mSource != NULL) {
- sp<MetaData> meta = mVideoTrack.mSource->getFormat();
+ if (mLooper == NULL) {
+ mLooper = new ALooper;
+ mLooper->setName("generic");
+ mLooper->start();
+
+ mLooper->registerHandler(this);
+ }
- int32_t width, height;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
+ sp<AMessage> msg = new AMessage(kWhatPrepareAsync, id());
+ msg->post();
+}
- notifyVideoSizeChanged(width, height);
+void NuPlayer::GenericSource::onPrepareAsync() {
+ // delayed data source creation
+ if (mDataSource == NULL) {
+ // set to false first, if the extractor
+ // comes back as secure, set it to true then.
+ mIsSecure = false;
+
+ if (!mUri.empty()) {
+ const char* uri = mUri.c_str();
+ mIsWidevine = !strncasecmp(uri, "widevine://", 11);
+
+ if (!strncasecmp("http://", uri, 7)
+ || !strncasecmp("https://", uri, 8)
+ || mIsWidevine) {
+ mHttpSource = DataSource::CreateMediaHTTP(mHTTPService);
+ if (mHttpSource == NULL) {
+ ALOGE("Failed to create http source!");
+ notifyPreparedAndCleanup(UNKNOWN_ERROR);
+ return;
+ }
+ }
+
+ mDataSource = DataSource::CreateFromURI(
+ mHTTPService, uri, &mUriHeaders, &mContentType,
+ static_cast<HTTPBase *>(mHttpSource.get()));
+ } else {
+ mIsWidevine = false;
+
+ mDataSource = new FileSource(mFd, mOffset, mLength);
+ mFd = -1;
+ }
+
+ if (mDataSource == NULL) {
+ ALOGE("Failed to create data source!");
+ notifyPreparedAndCleanup(UNKNOWN_ERROR);
+ return;
+ }
+
+ if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
+ mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
+ }
+
+ // For widevine or other cached streaming cases, we need to wait for
+ // enough buffering before reporting prepared.
+ // Note that even when URL doesn't start with widevine://, mIsWidevine
+ // could still be set to true later, if the streaming or file source
+ // is sniffed to be widevine. We don't want to buffer for file source
+ // in that case, so must check the flag now.
+ mIsStreaming = (mIsWidevine || mCachedSource != NULL);
+ }
+
+ // check initial caching status
+ status_t err = prefillCacheIfNecessary();
+ if (err != OK) {
+ if (err == -EAGAIN) {
+ (new AMessage(kWhatPrepareAsync, id()))->post(200000);
+ } else {
+ ALOGE("Failed to prefill data cache!");
+ notifyPreparedAndCleanup(UNKNOWN_ERROR);
+ }
+ return;
+ }
+
+ // init extrator from data source
+ err = initFromDataSource();
+
+ if (err != OK) {
+ ALOGE("Failed to init from data source!");
+ notifyPreparedAndCleanup(err);
+ return;
+ }
+
+ if (mVideoTrack.mSource != NULL) {
+ sp<MetaData> meta = doGetFormatMeta(false /* audio */);
+ sp<AMessage> msg = new AMessage;
+ err = convertMetaDataToMessage(meta, &msg);
+ if(err != OK) {
+ notifyPreparedAndCleanup(err);
+ return;
+ }
+ notifyVideoSizeChanged(msg);
}
notifyFlagsChanged(
- FLAG_CAN_PAUSE
+ (mIsSecure ? FLAG_SECURE : 0)
+ | (mDecryptHandle != NULL ? FLAG_PROTECTED : 0)
+ | FLAG_CAN_PAUSE
| FLAG_CAN_SEEK_BACKWARD
| FLAG_CAN_SEEK_FORWARD
| FLAG_CAN_SEEK);
- notifyPrepared();
+ if (mIsSecure) {
+ // secure decoders must be instantiated before starting widevine source
+ sp<AMessage> reply = new AMessage(kWhatSecureDecodersInstantiated, id());
+ notifyInstantiateSecureDecoders(reply);
+ } else {
+ finishPrepareAsync();
+ }
+}
+
+void NuPlayer::GenericSource::onSecureDecodersInstantiated(status_t err) {
+ if (err != OK) {
+ ALOGE("Failed to instantiate secure decoders!");
+ notifyPreparedAndCleanup(err);
+ return;
+ }
+ finishPrepareAsync();
+}
+
+void NuPlayer::GenericSource::finishPrepareAsync() {
+ status_t err = startSources();
+ if (err != OK) {
+ ALOGE("Failed to init start data source!");
+ notifyPreparedAndCleanup(err);
+ return;
+ }
+
+ if (mIsStreaming) {
+ mPrepareBuffering = true;
+
+ ensureCacheIsFetching();
+ restartPollBuffering();
+ } else {
+ notifyPrepared();
+ }
+}
+
+void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {
+ if (err != OK) {
+ mMetaDataSize = -1ll;
+ mContentType = "";
+ mSniffedMIME = "";
+ mDataSource.clear();
+ mCachedSource.clear();
+ mHttpSource.clear();
+ mBitrate = -1;
+
+ cancelPollBuffering();
+ }
+ notifyPrepared(err);
+}
+
+status_t NuPlayer::GenericSource::prefillCacheIfNecessary() {
+ CHECK(mDataSource != NULL);
+
+ if (mCachedSource == NULL) {
+ // no prefill if the data source is not cached
+ return OK;
+ }
+
+ // We're not doing this for streams that appear to be audio-only
+ // streams to ensure that even low bandwidth streams start
+ // playing back fairly instantly.
+ if (!strncasecmp(mContentType.string(), "audio/", 6)) {
+ return OK;
+ }
+
+ // We're going to prefill the cache before trying to instantiate
+ // the extractor below, as the latter is an operation that otherwise
+ // could block on the datasource for a significant amount of time.
+ // During that time we'd be unable to abort the preparation phase
+ // without this prefill.
+
+ // Initially make sure we have at least 192 KB for the sniff
+ // to complete without blocking.
+ static const size_t kMinBytesForSniffing = 192 * 1024;
+ static const size_t kDefaultMetaSize = 200000;
+
+ status_t finalStatus;
+
+ size_t cachedDataRemaining =
+ mCachedSource->approxDataRemaining(&finalStatus);
+
+ if (finalStatus != OK || (mMetaDataSize >= 0
+ && (off64_t)cachedDataRemaining >= mMetaDataSize)) {
+ ALOGV("stop caching, status %d, "
+ "metaDataSize %lld, cachedDataRemaining %zu",
+ finalStatus, mMetaDataSize, cachedDataRemaining);
+ return OK;
+ }
+
+ ALOGV("now cached %zu bytes of data", cachedDataRemaining);
+
+ if (mMetaDataSize < 0
+ && cachedDataRemaining >= kMinBytesForSniffing) {
+ String8 tmp;
+ float confidence;
+ sp<AMessage> meta;
+ if (!mCachedSource->sniff(&tmp, &confidence, &meta)) {
+ return UNKNOWN_ERROR;
+ }
+
+ // We successfully identified the file's extractor to
+ // be, remember this mime type so we don't have to
+ // sniff it again when we call MediaExtractor::Create()
+ mSniffedMIME = tmp.string();
+
+ if (meta == NULL
+ || !meta->findInt64("meta-data-size",
+ reinterpret_cast<int64_t*>(&mMetaDataSize))) {
+ mMetaDataSize = kDefaultMetaSize;
+ }
+
+ if (mMetaDataSize < 0ll) {
+ ALOGE("invalid metaDataSize = %lld bytes", mMetaDataSize);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ return -EAGAIN;
}
void NuPlayer::GenericSource::start() {
ALOGI("start");
+ mStopRead = false;
if (mAudioTrack.mSource != NULL) {
- CHECK_EQ(mAudioTrack.mSource->start(), (status_t)OK);
+ postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
+ }
- mAudioTrack.mPackets =
- new AnotherPacketSource(mAudioTrack.mSource->getFormat());
+ if (mVideoTrack.mSource != NULL) {
+ postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+ }
- readBuffer(true /* audio */);
+ setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);
+ mStarted = true;
+
+ (new AMessage(kWhatStart, id()))->post();
+}
+
+void NuPlayer::GenericSource::stop() {
+ // nothing to do, just account for DRM playback status
+ setDrmPlaybackStatusIfNeeded(Playback::STOP, 0);
+ mStarted = false;
+ if (mIsWidevine || mIsSecure) {
+ // For widevine or secure sources we need to prevent any further reads.
+ sp<AMessage> msg = new AMessage(kWhatStopWidevine, id());
+ sp<AMessage> response;
+ (void) msg->postAndAwaitResponse(&response);
}
+}
- if (mVideoTrack.mSource != NULL) {
- CHECK_EQ(mVideoTrack.mSource->start(), (status_t)OK);
+void NuPlayer::GenericSource::pause() {
+ // nothing to do, just account for DRM playback status
+ setDrmPlaybackStatusIfNeeded(Playback::PAUSE, 0);
+ mStarted = false;
+}
- mVideoTrack.mPackets =
- new AnotherPacketSource(mVideoTrack.mSource->getFormat());
+void NuPlayer::GenericSource::resume() {
+ // nothing to do, just account for DRM playback status
+ setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);
+ mStarted = true;
+
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::GenericSource::disconnect() {
+ if (mDataSource != NULL) {
+ // disconnect data source
+ if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
+ static_cast<NuCachedSource2 *>(mDataSource.get())->disconnect();
+ }
+ } else if (mHttpSource != NULL) {
+ static_cast<HTTPBase *>(mHttpSource.get())->disconnect();
+ }
+}
- readBuffer(false /* audio */);
+void NuPlayer::GenericSource::setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position) {
+ if (mDecryptHandle != NULL) {
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle, playbackStatus, position);
}
+ mSubtitleTrack.mPackets = new AnotherPacketSource(NULL);
+ mTimedTextTrack.mPackets = new AnotherPacketSource(NULL);
}
status_t NuPlayer::GenericSource::feedMoreTSData() {
return OK;
}
+void NuPlayer::GenericSource::schedulePollBuffering() {
+ sp<AMessage> msg = new AMessage(kWhatPollBuffering, id());
+ msg->setInt32("generation", mPollBufferingGeneration);
+ msg->post(1000000ll);
+}
+
+void NuPlayer::GenericSource::cancelPollBuffering() {
+ mBuffering = false;
+ ++mPollBufferingGeneration;
+}
+
+void NuPlayer::GenericSource::restartPollBuffering() {
+ if (mIsStreaming) {
+ cancelPollBuffering();
+ onPollBuffering();
+ }
+}
+
+void NuPlayer::GenericSource::notifyBufferingUpdate(int percentage) {
+ ALOGV("notifyBufferingUpdate: buffering %d%%", percentage);
+
+ sp<AMessage> msg = dupNotify();
+ msg->setInt32("what", kWhatBufferingUpdate);
+ msg->setInt32("percentage", percentage);
+ msg->post();
+}
+
+void NuPlayer::GenericSource::startBufferingIfNecessary() {
+ ALOGV("startBufferingIfNecessary: mPrepareBuffering=%d, mBuffering=%d",
+ mPrepareBuffering, mBuffering);
+
+ if (mPrepareBuffering) {
+ return;
+ }
+
+ if (!mBuffering) {
+ mBuffering = true;
+
+ ensureCacheIsFetching();
+ sendCacheStats();
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatPauseOnBufferingStart);
+ notify->post();
+ }
+}
+
+void NuPlayer::GenericSource::stopBufferingIfNecessary() {
+ ALOGV("stopBufferingIfNecessary: mPrepareBuffering=%d, mBuffering=%d",
+ mPrepareBuffering, mBuffering);
+
+ if (mPrepareBuffering) {
+ mPrepareBuffering = false;
+ notifyPrepared();
+ return;
+ }
+
+ if (mBuffering) {
+ mBuffering = false;
+
+ sendCacheStats();
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatResumeOnBufferingEnd);
+ notify->post();
+ }
+}
+
+void NuPlayer::GenericSource::sendCacheStats() {
+ int32_t kbps = 0;
+ status_t err = UNKNOWN_ERROR;
+
+ if (mWVMExtractor != NULL) {
+ err = mWVMExtractor->getEstimatedBandwidthKbps(&kbps);
+ } else if (mCachedSource != NULL) {
+ err = mCachedSource->getEstimatedBandwidthKbps(&kbps);
+ }
+
+ if (err == OK) {
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatCacheStats);
+ notify->setInt32("bandwidth", kbps);
+ notify->post();
+ }
+}
+
+void NuPlayer::GenericSource::ensureCacheIsFetching() {
+ if (mCachedSource != NULL) {
+ mCachedSource->resumeFetchingIfNecessary();
+ }
+}
+
+void NuPlayer::GenericSource::onPollBuffering() {
+ status_t finalStatus = UNKNOWN_ERROR;
+ int64_t cachedDurationUs = -1ll;
+ ssize_t cachedDataRemaining = -1;
+
+ ALOGW_IF(mWVMExtractor != NULL && mCachedSource != NULL,
+ "WVMExtractor and NuCachedSource both present");
+
+ if (mWVMExtractor != NULL) {
+ cachedDurationUs =
+ mWVMExtractor->getCachedDurationUs(&finalStatus);
+ } else if (mCachedSource != NULL) {
+ cachedDataRemaining =
+ mCachedSource->approxDataRemaining(&finalStatus);
+
+ if (finalStatus == OK) {
+ off64_t size;
+ int64_t bitrate = 0ll;
+ if (mDurationUs > 0 && mCachedSource->getSize(&size) == OK) {
+ bitrate = size * 8000000ll / mDurationUs;
+ } else if (mBitrate > 0) {
+ bitrate = mBitrate;
+ }
+ if (bitrate > 0) {
+ cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;
+ }
+ }
+ }
+
+ if (finalStatus != OK) {
+ ALOGV("onPollBuffering: EOS (finalStatus = %d)", finalStatus);
+
+ if (finalStatus == ERROR_END_OF_STREAM) {
+ notifyBufferingUpdate(100);
+ }
+
+ stopBufferingIfNecessary();
+ return;
+ } else if (cachedDurationUs >= 0ll) {
+ if (mDurationUs > 0ll) {
+ int64_t cachedPosUs = getLastReadPosition() + cachedDurationUs;
+ int percentage = 100.0 * cachedPosUs / mDurationUs;
+ if (percentage > 100) {
+ percentage = 100;
+ }
+
+ notifyBufferingUpdate(percentage);
+ }
+
+ ALOGV("onPollBuffering: cachedDurationUs %.1f sec",
+ cachedDurationUs / 1000000.0f);
+
+ if (cachedDurationUs < kLowWaterMarkUs) {
+ startBufferingIfNecessary();
+ } else if (cachedDurationUs > kHighWaterMarkUs) {
+ stopBufferingIfNecessary();
+ }
+ } else if (cachedDataRemaining >= 0) {
+ ALOGV("onPollBuffering: cachedDataRemaining %d bytes",
+ cachedDataRemaining);
+
+ if (cachedDataRemaining < kLowWaterMarkBytes) {
+ startBufferingIfNecessary();
+ } else if (cachedDataRemaining > kHighWaterMarkBytes) {
+ stopBufferingIfNecessary();
+ }
+ }
+
+ schedulePollBuffering();
+}
+
+void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatPrepareAsync:
+ {
+ onPrepareAsync();
+ break;
+ }
+ case kWhatFetchSubtitleData:
+ {
+ fetchTextData(kWhatSendSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+ mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatFetchTimedTextData:
+ {
+ fetchTextData(kWhatSendTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+ mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatSendSubtitleData:
+ {
+ sendTextData(kWhatSubtitleData, MEDIA_TRACK_TYPE_SUBTITLE,
+ mFetchSubtitleDataGeneration, mSubtitleTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatSendTimedTextData:
+ {
+ sendTextData(kWhatTimedTextData, MEDIA_TRACK_TYPE_TIMEDTEXT,
+ mFetchTimedTextDataGeneration, mTimedTextTrack.mPackets, msg);
+ break;
+ }
+
+ case kWhatChangeAVSource:
+ {
+ int32_t trackIndex;
+ CHECK(msg->findInt32("trackIndex", &trackIndex));
+ const sp<MediaSource> source = mSources.itemAt(trackIndex);
+
+ Track* track;
+ const char *mime;
+ media_track_type trackType, counterpartType;
+ sp<MetaData> meta = source->getFormat();
+ meta->findCString(kKeyMIMEType, &mime);
+ if (!strncasecmp(mime, "audio/", 6)) {
+ track = &mAudioTrack;
+ trackType = MEDIA_TRACK_TYPE_AUDIO;
+ counterpartType = MEDIA_TRACK_TYPE_VIDEO;;
+ } else {
+ CHECK(!strncasecmp(mime, "video/", 6));
+ track = &mVideoTrack;
+ trackType = MEDIA_TRACK_TYPE_VIDEO;
+ counterpartType = MEDIA_TRACK_TYPE_AUDIO;;
+ }
+
+
+ if (track->mSource != NULL) {
+ track->mSource->stop();
+ }
+ track->mSource = source;
+ track->mSource->start();
+ track->mIndex = trackIndex;
+
+ int64_t timeUs, actualTimeUs;
+ const bool formatChange = true;
+ if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ timeUs = mAudioLastDequeueTimeUs;
+ } else {
+ timeUs = mVideoLastDequeueTimeUs;
+ }
+ readBuffer(trackType, timeUs, &actualTimeUs, formatChange);
+ readBuffer(counterpartType, -1, NULL, formatChange);
+ ALOGV("timeUs %lld actualTimeUs %lld", timeUs, actualTimeUs);
+
+ break;
+ }
+
+ case kWhatStart:
+ case kWhatResume:
+ {
+ restartPollBuffering();
+ break;
+ }
+
+ case kWhatPollBuffering:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation == mPollBufferingGeneration) {
+ onPollBuffering();
+ }
+ break;
+ }
+
+ case kWhatGetFormat:
+ {
+ onGetFormatMeta(msg);
+ break;
+ }
+
+ case kWhatGetSelectedTrack:
+ {
+ onGetSelectedTrack(msg);
+ break;
+ }
+
+ case kWhatSelectTrack:
+ {
+ onSelectTrack(msg);
+ break;
+ }
+
+ case kWhatSeek:
+ {
+ onSeek(msg);
+ break;
+ }
+
+ case kWhatReadBuffer:
+ {
+ onReadBuffer(msg);
+ break;
+ }
+
+ case kWhatSecureDecodersInstantiated:
+ {
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+ onSecureDecodersInstantiated(err);
+ break;
+ }
+
+ case kWhatStopWidevine:
+ {
+ // mStopRead is only used for Widevine to prevent the video source
+ // from being read while the associated video decoder is shutting down.
+ mStopRead = true;
+ if (mVideoTrack.mSource != NULL) {
+ mVideoTrack.mPackets->clear();
+ }
+ sp<AMessage> response = new AMessage;
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+ break;
+ }
+ default:
+ Source::onMessageReceived(msg);
+ break;
+ }
+}
+
+void NuPlayer::GenericSource::fetchTextData(
+ uint32_t sendWhat,
+ media_track_type type,
+ int32_t curGen,
+ sp<AnotherPacketSource> packets,
+ sp<AMessage> msg) {
+ int32_t msgGeneration;
+ CHECK(msg->findInt32("generation", &msgGeneration));
+ if (msgGeneration != curGen) {
+ // stale
+ return;
+ }
+
+ int32_t avail;
+ if (packets->hasBufferAvailable(&avail)) {
+ return;
+ }
+
+ int64_t timeUs;
+ CHECK(msg->findInt64("timeUs", &timeUs));
+
+ int64_t subTimeUs;
+ readBuffer(type, timeUs, &subTimeUs);
+
+ int64_t delayUs = subTimeUs - timeUs;
+ if (msg->what() == kWhatFetchSubtitleData) {
+ const int64_t oneSecUs = 1000000ll;
+ delayUs -= oneSecUs;
+ }
+ sp<AMessage> msg2 = new AMessage(sendWhat, id());
+ msg2->setInt32("generation", msgGeneration);
+ msg2->post(delayUs < 0 ? 0 : delayUs);
+}
+
+void NuPlayer::GenericSource::sendTextData(
+ uint32_t what,
+ media_track_type type,
+ int32_t curGen,
+ sp<AnotherPacketSource> packets,
+ sp<AMessage> msg) {
+ int32_t msgGeneration;
+ CHECK(msg->findInt32("generation", &msgGeneration));
+ if (msgGeneration != curGen) {
+ // stale
+ return;
+ }
+
+ int64_t subTimeUs;
+ if (packets->nextBufferTime(&subTimeUs) != OK) {
+ return;
+ }
+
+ int64_t nextSubTimeUs;
+ readBuffer(type, -1, &nextSubTimeUs);
+
+ sp<ABuffer> buffer;
+ status_t dequeueStatus = packets->dequeueAccessUnit(&buffer);
+ if (dequeueStatus == OK) {
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", what);
+ notify->setBuffer("buffer", buffer);
+ notify->post();
+
+ const int64_t delayUs = nextSubTimeUs - subTimeUs;
+ msg->post(delayUs < 0 ? 0 : delayUs);
+ }
+}
+
sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) {
+ sp<AMessage> msg = new AMessage(kWhatGetFormat, id());
+ msg->setInt32("audio", audio);
+
+ sp<AMessage> response;
+ void *format;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findPointer("format", &format));
+ return (MetaData *)format;
+ } else {
+ return NULL;
+ }
+}
+
+void NuPlayer::GenericSource::onGetFormatMeta(sp<AMessage> msg) const {
+ int32_t audio;
+ CHECK(msg->findInt32("audio", &audio));
+
+ sp<AMessage> response = new AMessage;
+ sp<MetaData> format = doGetFormatMeta(audio);
+ response->setPointer("format", format.get());
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+sp<MetaData> NuPlayer::GenericSource::doGetFormatMeta(bool audio) const {
sp<MediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
if (source == NULL) {
@@ -170,14 +1052,63 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(
return -EWOULDBLOCK;
}
+ if (mIsWidevine && !audio) {
+ // try to read a buffer as we may not have been able to the last time
+ postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
+ }
+
status_t finalResult;
if (!track->mPackets->hasBufferAvailable(&finalResult)) {
- return finalResult == OK ? -EWOULDBLOCK : finalResult;
+ if (finalResult == OK) {
+ postReadBuffer(
+ audio ? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+ return -EWOULDBLOCK;
+ }
+ return finalResult;
}
status_t result = track->mPackets->dequeueAccessUnit(accessUnit);
- readBuffer(audio, -1ll);
+ if (!track->mPackets->hasBufferAvailable(&finalResult)) {
+ postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);
+ }
+
+ if (result != OK) {
+ if (mSubtitleTrack.mSource != NULL) {
+ mSubtitleTrack.mPackets->clear();
+ mFetchSubtitleDataGeneration++;
+ }
+ if (mTimedTextTrack.mSource != NULL) {
+ mTimedTextTrack.mPackets->clear();
+ mFetchTimedTextDataGeneration++;
+ }
+ return result;
+ }
+
+ int64_t timeUs;
+ status_t eosResult; // ignored
+ CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
+ if (audio) {
+ mAudioLastDequeueTimeUs = timeUs;
+ } else {
+ mVideoLastDequeueTimeUs = timeUs;
+ }
+
+ if (mSubtitleTrack.mSource != NULL
+ && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchSubtitleDataGeneration);
+ msg->post();
+ }
+
+ if (mTimedTextTrack.mSource != NULL
+ && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchTimedTextDataGeneration);
+ msg->post();
+ }
return result;
}
@@ -187,25 +1118,426 @@ status_t NuPlayer::GenericSource::getDuration(int64_t *durationUs) {
return OK;
}
+size_t NuPlayer::GenericSource::getTrackCount() const {
+ return mSources.size();
+}
+
+sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {
+ size_t trackCount = mSources.size();
+ if (trackIndex >= trackCount) {
+ return NULL;
+ }
+
+ sp<AMessage> format = new AMessage();
+ sp<MetaData> meta = mSources.itemAt(trackIndex)->getFormat();
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ int32_t trackType;
+ if (!strncasecmp(mime, "video/", 6)) {
+ trackType = MEDIA_TRACK_TYPE_VIDEO;
+ } else if (!strncasecmp(mime, "audio/", 6)) {
+ trackType = MEDIA_TRACK_TYPE_AUDIO;
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+ trackType = MEDIA_TRACK_TYPE_TIMEDTEXT;
+ } else {
+ trackType = MEDIA_TRACK_TYPE_UNKNOWN;
+ }
+ format->setInt32("type", trackType);
+
+ const char *lang;
+ if (!meta->findCString(kKeyMediaLanguage, &lang)) {
+ lang = "und";
+ }
+ format->setString("language", lang);
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ format->setString("mime", mime);
+
+ int32_t isAutoselect = 1, isDefault = 0, isForced = 0;
+ meta->findInt32(kKeyTrackIsAutoselect, &isAutoselect);
+ meta->findInt32(kKeyTrackIsDefault, &isDefault);
+ meta->findInt32(kKeyTrackIsForced, &isForced);
+
+ format->setInt32("auto", !!isAutoselect);
+ format->setInt32("default", !!isDefault);
+ format->setInt32("forced", !!isForced);
+ }
+
+ return format;
+}
+
+ssize_t NuPlayer::GenericSource::getSelectedTrack(media_track_type type) const {
+ sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id());
+ msg->setInt32("type", type);
+
+ sp<AMessage> response;
+ int32_t index;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("index", &index));
+ return index;
+ } else {
+ return -1;
+ }
+}
+
+void NuPlayer::GenericSource::onGetSelectedTrack(sp<AMessage> msg) const {
+ int32_t tmpType;
+ CHECK(msg->findInt32("type", &tmpType));
+ media_track_type type = (media_track_type)tmpType;
+
+ sp<AMessage> response = new AMessage;
+ ssize_t index = doGetSelectedTrack(type);
+ response->setInt32("index", index);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const {
+ const Track *track = NULL;
+ switch (type) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ track = &mVideoTrack;
+ break;
+ case MEDIA_TRACK_TYPE_AUDIO:
+ track = &mAudioTrack;
+ break;
+ case MEDIA_TRACK_TYPE_TIMEDTEXT:
+ track = &mTimedTextTrack;
+ break;
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ track = &mSubtitleTrack;
+ break;
+ default:
+ break;
+ }
+
+ if (track != NULL && track->mSource != NULL) {
+ return track->mIndex;
+ }
+
+ return -1;
+}
+
+status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+ ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex);
+ sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
+ msg->setInt32("trackIndex", trackIndex);
+ msg->setInt32("select", select);
+ msg->setInt64("timeUs", timeUs);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
+}
+
+void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) {
+ int32_t trackIndex, select;
+ int64_t timeUs;
+ CHECK(msg->findInt32("trackIndex", &trackIndex));
+ CHECK(msg->findInt32("select", &select));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+
+ sp<AMessage> response = new AMessage;
+ status_t err = doSelectTrack(trackIndex, select, timeUs);
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, int64_t timeUs) {
+ if (trackIndex >= mSources.size()) {
+ return BAD_INDEX;
+ }
+
+ if (!select) {
+ Track* track = NULL;
+ if (mSubtitleTrack.mSource != NULL && trackIndex == mSubtitleTrack.mIndex) {
+ track = &mSubtitleTrack;
+ mFetchSubtitleDataGeneration++;
+ } else if (mTimedTextTrack.mSource != NULL && trackIndex == mTimedTextTrack.mIndex) {
+ track = &mTimedTextTrack;
+ mFetchTimedTextDataGeneration++;
+ }
+ if (track == NULL) {
+ return INVALID_OPERATION;
+ }
+ track->mSource->stop();
+ track->mSource = NULL;
+ track->mPackets->clear();
+ return OK;
+ }
+
+ const sp<MediaSource> source = mSources.itemAt(trackIndex);
+ sp<MetaData> meta = source->getFormat();
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+ if (!strncasecmp(mime, "text/", 5)) {
+ bool isSubtitle = strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP);
+ Track *track = isSubtitle ? &mSubtitleTrack : &mTimedTextTrack;
+ if (track->mSource != NULL && track->mIndex == trackIndex) {
+ return OK;
+ }
+ track->mIndex = trackIndex;
+ if (track->mSource != NULL) {
+ track->mSource->stop();
+ }
+ track->mSource = mSources.itemAt(trackIndex);
+ track->mSource->start();
+ if (track->mPackets == NULL) {
+ track->mPackets = new AnotherPacketSource(track->mSource->getFormat());
+ } else {
+ track->mPackets->clear();
+ track->mPackets->setFormat(track->mSource->getFormat());
+
+ }
+
+ if (isSubtitle) {
+ mFetchSubtitleDataGeneration++;
+ } else {
+ mFetchTimedTextDataGeneration++;
+ }
+
+ status_t eosResult; // ignored
+ if (mSubtitleTrack.mSource != NULL
+ && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchSubtitleDataGeneration);
+ msg->post();
+ }
+
+ if (mTimedTextTrack.mSource != NULL
+ && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) {
+ sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id());
+ msg->setInt64("timeUs", timeUs);
+ msg->setInt32("generation", mFetchTimedTextDataGeneration);
+ msg->post();
+ }
+
+ return OK;
+ } else if (!strncasecmp(mime, "audio/", 6) || !strncasecmp(mime, "video/", 6)) {
+ bool audio = !strncasecmp(mime, "audio/", 6);
+ Track *track = audio ? &mAudioTrack : &mVideoTrack;
+ if (track->mSource != NULL && track->mIndex == trackIndex) {
+ return OK;
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatChangeAVSource, id());
+ msg->setInt32("trackIndex", trackIndex);
+ msg->post();
+ return OK;
+ }
+
+ return INVALID_OPERATION;
+}
+
status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs) {
+ sp<AMessage> msg = new AMessage(kWhatSeek, id());
+ msg->setInt64("seekTimeUs", seekTimeUs);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
+}
+
+void NuPlayer::GenericSource::onSeek(sp<AMessage> msg) {
+ int64_t seekTimeUs;
+ CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+
+ sp<AMessage> response = new AMessage;
+ status_t err = doSeek(seekTimeUs);
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+status_t NuPlayer::GenericSource::doSeek(int64_t seekTimeUs) {
+ // If the Widevine source is stopped, do not attempt to read any
+ // more buffers.
+ if (mStopRead) {
+ return INVALID_OPERATION;
+ }
if (mVideoTrack.mSource != NULL) {
int64_t actualTimeUs;
- readBuffer(false /* audio */, seekTimeUs, &actualTimeUs);
+ readBuffer(MEDIA_TRACK_TYPE_VIDEO, seekTimeUs, &actualTimeUs);
seekTimeUs = actualTimeUs;
+ mVideoLastDequeueTimeUs = seekTimeUs;
}
if (mAudioTrack.mSource != NULL) {
- readBuffer(true /* audio */, seekTimeUs);
+ readBuffer(MEDIA_TRACK_TYPE_AUDIO, seekTimeUs);
+ mAudioLastDequeueTimeUs = seekTimeUs;
+ }
+
+ setDrmPlaybackStatusIfNeeded(Playback::START, seekTimeUs / 1000);
+ if (!mStarted) {
+ setDrmPlaybackStatusIfNeeded(Playback::PAUSE, 0);
}
+ // If currently buffering, post kWhatBufferingEnd first, so that
+ // NuPlayer resumes. Otherwise, if cache hits high watermark
+ // before new polling happens, no one will resume the playback.
+ stopBufferingIfNecessary();
+ restartPollBuffering();
+
return OK;
}
+sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(
+ MediaBuffer* mb,
+ media_track_type trackType,
+ int64_t /* seekTimeUs */,
+ int64_t *actualTimeUs) {
+ bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO;
+ size_t outLength = mb->range_length();
+
+ if (audio && mAudioIsVorbis) {
+ outLength += sizeof(int32_t);
+ }
+
+ sp<ABuffer> ab;
+ if (mIsSecure && !audio) {
+ // data is already provided in the buffer
+ ab = new ABuffer(NULL, mb->range_length());
+ mb->add_ref();
+ ab->setMediaBufferBase(mb);
+ } else {
+ ab = new ABuffer(outLength);
+ memcpy(ab->data(),
+ (const uint8_t *)mb->data() + mb->range_offset(),
+ mb->range_length());
+ }
+
+ if (audio && mAudioIsVorbis) {
+ int32_t numPageSamples;
+ if (!mb->meta_data()->findInt32(kKeyValidSamples, &numPageSamples)) {
+ numPageSamples = -1;
+ }
+
+ uint8_t* abEnd = ab->data() + mb->range_length();
+ memcpy(abEnd, &numPageSamples, sizeof(numPageSamples));
+ }
+
+ sp<AMessage> meta = ab->meta();
+
+ int64_t timeUs;
+ CHECK(mb->meta_data()->findInt64(kKeyTime, &timeUs));
+ meta->setInt64("timeUs", timeUs);
+
+#if 0
+ // Temporarily disable pre-roll till we have a full solution to handle
+ // both single seek and continous seek gracefully.
+ if (seekTimeUs > timeUs) {
+ sp<AMessage> extra = new AMessage;
+ extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
+ meta->setMessage("extra", extra);
+ }
+#endif
+
+ if (trackType == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ const char *mime;
+ CHECK(mTimedTextTrack.mSource != NULL
+ && mTimedTextTrack.mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+ meta->setString("mime", mime);
+ }
+
+ int64_t durationUs;
+ if (mb->meta_data()->findInt64(kKeyDuration, &durationUs)) {
+ meta->setInt64("durationUs", durationUs);
+ }
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ meta->setInt32("trackIndex", mSubtitleTrack.mIndex);
+ }
+
+ if (actualTimeUs) {
+ *actualTimeUs = timeUs;
+ }
+
+ mb->release();
+ mb = NULL;
+
+ return ab;
+}
+
+void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {
+ Mutex::Autolock _l(mReadBufferLock);
+
+ if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
+ mPendingReadBufferTypes |= (1 << trackType);
+ sp<AMessage> msg = new AMessage(kWhatReadBuffer, id());
+ msg->setInt32("trackType", trackType);
+ msg->post();
+ }
+}
+
+void NuPlayer::GenericSource::onReadBuffer(sp<AMessage> msg) {
+ int32_t tmpType;
+ CHECK(msg->findInt32("trackType", &tmpType));
+ media_track_type trackType = (media_track_type)tmpType;
+ readBuffer(trackType);
+ {
+ // only protect the variable change, as readBuffer may
+ // take considerable time.
+ Mutex::Autolock _l(mReadBufferLock);
+ mPendingReadBufferTypes &= ~(1 << trackType);
+ }
+}
+
void NuPlayer::GenericSource::readBuffer(
- bool audio, int64_t seekTimeUs, int64_t *actualTimeUs) {
- Track *track = audio ? &mAudioTrack : &mVideoTrack;
- CHECK(track->mSource != NULL);
+ media_track_type trackType, int64_t seekTimeUs, int64_t *actualTimeUs, bool formatChange) {
+ // Do not read data if Widevine source is stopped
+ if (mStopRead) {
+ return;
+ }
+ Track *track;
+ size_t maxBuffers = 1;
+ switch (trackType) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ track = &mVideoTrack;
+ if (mIsWidevine) {
+ maxBuffers = 2;
+ }
+ break;
+ case MEDIA_TRACK_TYPE_AUDIO:
+ track = &mAudioTrack;
+ if (mIsWidevine) {
+ maxBuffers = 8;
+ } else {
+ maxBuffers = 64;
+ }
+ break;
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ track = &mSubtitleTrack;
+ break;
+ case MEDIA_TRACK_TYPE_TIMEDTEXT:
+ track = &mTimedTextTrack;
+ break;
+ default:
+ TRESPASS();
+ }
+
+ if (track->mSource == NULL) {
+ return;
+ }
if (actualTimeUs) {
*actualTimeUs = seekTimeUs;
@@ -216,64 +1548,55 @@ void NuPlayer::GenericSource::readBuffer(
bool seeking = false;
if (seekTimeUs >= 0) {
- options.setSeekTo(seekTimeUs);
+ options.setSeekTo(seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
seeking = true;
}
- for (;;) {
+ if (mIsWidevine) {
+ options.setNonBlocking();
+ }
+
+ for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
MediaBuffer *mbuf;
status_t err = track->mSource->read(&mbuf, &options);
options.clearSeekTo();
if (err == OK) {
- size_t outLength = mbuf->range_length();
-
- if (audio && mAudioIsVorbis) {
- outLength += sizeof(int32_t);
- }
-
- sp<ABuffer> buffer = new ABuffer(outLength);
-
- memcpy(buffer->data(),
- (const uint8_t *)mbuf->data() + mbuf->range_offset(),
- mbuf->range_length());
-
- if (audio && mAudioIsVorbis) {
- int32_t numPageSamples;
- if (!mbuf->meta_data()->findInt32(
- kKeyValidSamples, &numPageSamples)) {
- numPageSamples = -1;
- }
-
- memcpy(buffer->data() + mbuf->range_length(),
- &numPageSamples,
- sizeof(numPageSamples));
- }
-
int64_t timeUs;
CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
-
- buffer->meta()->setInt64("timeUs", timeUs);
-
- if (actualTimeUs) {
- *actualTimeUs = timeUs;
+ if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
+ mAudioTimeUs = timeUs;
+ } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
+ mVideoTimeUs = timeUs;
}
- mbuf->release();
- mbuf = NULL;
-
- if (seeking) {
- track->mPackets->queueDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, NULL);
+ // formatChange && seeking: track whose source is changed during selection
+ // formatChange && !seeking: track whose source is not changed during selection
+ // !formatChange: normal seek
+ if ((seeking || formatChange)
+ && (trackType == MEDIA_TRACK_TYPE_AUDIO
+ || trackType == MEDIA_TRACK_TYPE_VIDEO)) {
+ ATSParser::DiscontinuityType type = (formatChange && seeking)
+ ? ATSParser::DISCONTINUITY_FORMATCHANGE
+ : ATSParser::DISCONTINUITY_NONE;
+ track->mPackets->queueDiscontinuity( type, NULL, true /* discard */);
}
+ sp<ABuffer> buffer = mediaBufferToABuffer(
+ mbuf, trackType, seekTimeUs, actualTimeUs);
track->mPackets->queueAccessUnit(buffer);
+ formatChange = false;
+ seeking = false;
+ ++numBuffers;
+ } else if (err == WOULD_BLOCK) {
break;
} else if (err == INFO_FORMAT_CHANGED) {
#if 0
track->mPackets->queueDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE, NULL);
+ ATSParser::DISCONTINUITY_FORMATCHANGE,
+ NULL,
+ false /* discard */);
#endif
} else {
track->mPackets->signalEOS(err);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 2da680c..2d73ea9 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -23,58 +23,192 @@
#include "ATSParser.h"
+#include <media/mediaplayer.h>
+
namespace android {
+class DecryptHandle;
+class DrmManagerClient;
struct AnotherPacketSource;
struct ARTSPController;
struct DataSource;
+struct IMediaHTTPService;
struct MediaSource;
+class MediaBuffer;
+struct NuCachedSource2;
+struct WVMExtractor;
struct NuPlayer::GenericSource : public NuPlayer::Source {
- GenericSource(
- const sp<AMessage> &notify,
+ GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid);
+
+ status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid = false,
- uid_t uid = 0);
+ const KeyedVector<String8, String8> *headers);
- GenericSource(
- const sp<AMessage> &notify,
- int fd, int64_t offset, int64_t length);
+ status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual void prepareAsync();
virtual void start();
+ virtual void stop();
+ virtual void pause();
+ virtual void resume();
+
+ virtual void disconnect();
virtual status_t feedMoreTSData();
+ virtual sp<MetaData> getFileFormatMeta() const;
+
virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
virtual status_t getDuration(int64_t *durationUs);
+ virtual size_t getTrackCount() const;
+ virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+ virtual ssize_t getSelectedTrack(media_track_type type) const;
+ virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
virtual status_t seekTo(int64_t seekTimeUs);
+ virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers);
+
protected:
virtual ~GenericSource();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
virtual sp<MetaData> getFormatMeta(bool audio);
private:
+ enum {
+ kWhatPrepareAsync,
+ kWhatFetchSubtitleData,
+ kWhatFetchTimedTextData,
+ kWhatSendSubtitleData,
+ kWhatSendTimedTextData,
+ kWhatChangeAVSource,
+ kWhatPollBuffering,
+ kWhatGetFormat,
+ kWhatGetSelectedTrack,
+ kWhatSelectTrack,
+ kWhatSeek,
+ kWhatReadBuffer,
+ kWhatStopWidevine,
+ kWhatStart,
+ kWhatResume,
+ kWhatSecureDecodersInstantiated,
+ };
+
struct Track {
+ size_t mIndex;
sp<MediaSource> mSource;
sp<AnotherPacketSource> mPackets;
};
+ Vector<sp<MediaSource> > mSources;
Track mAudioTrack;
+ int64_t mAudioTimeUs;
+ int64_t mAudioLastDequeueTimeUs;
Track mVideoTrack;
+ int64_t mVideoTimeUs;
+ int64_t mVideoLastDequeueTimeUs;
+ Track mSubtitleTrack;
+ Track mTimedTextTrack;
+ int32_t mFetchSubtitleDataGeneration;
+ int32_t mFetchTimedTextDataGeneration;
int64_t mDurationUs;
bool mAudioIsVorbis;
-
- void initFromDataSource(const sp<DataSource> &dataSource);
-
+ bool mIsWidevine;
+ bool mIsSecure;
+ bool mIsStreaming;
+ bool mUIDValid;
+ uid_t mUID;
+ sp<IMediaHTTPService> mHTTPService;
+ AString mUri;
+ KeyedVector<String8, String8> mUriHeaders;
+ int mFd;
+ int64_t mOffset;
+ int64_t mLength;
+
+ sp<DataSource> mDataSource;
+ sp<NuCachedSource2> mCachedSource;
+ sp<DataSource> mHttpSource;
+ sp<WVMExtractor> mWVMExtractor;
+ sp<MetaData> mFileMeta;
+ DrmManagerClient *mDrmManagerClient;
+ sp<DecryptHandle> mDecryptHandle;
+ bool mStarted;
+ bool mStopRead;
+ String8 mContentType;
+ AString mSniffedMIME;
+ off64_t mMetaDataSize;
+ int64_t mBitrate;
+ int32_t mPollBufferingGeneration;
+ uint32_t mPendingReadBufferTypes;
+ bool mBuffering;
+ bool mPrepareBuffering;
+ mutable Mutex mReadBufferLock;
+
+ sp<ALooper> mLooper;
+
+ void resetDataSource();
+
+ status_t initFromDataSource();
+ void checkDrmStatus(const sp<DataSource>& dataSource);
+ int64_t getLastReadPosition();
+ void setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position);
+
+ status_t prefillCacheIfNecessary();
+
+ void notifyPreparedAndCleanup(status_t err);
+ void onSecureDecodersInstantiated(status_t err);
+ void finishPrepareAsync();
+ status_t startSources();
+
+ void onGetFormatMeta(sp<AMessage> msg) const;
+ sp<MetaData> doGetFormatMeta(bool audio) const;
+
+ void onGetSelectedTrack(sp<AMessage> msg) const;
+ ssize_t doGetSelectedTrack(media_track_type type) const;
+
+ void onSelectTrack(sp<AMessage> msg);
+ status_t doSelectTrack(size_t trackIndex, bool select, int64_t timeUs);
+
+ void onSeek(sp<AMessage> msg);
+ status_t doSeek(int64_t seekTimeUs);
+
+ void onPrepareAsync();
+
+ void fetchTextData(
+ uint32_t what, media_track_type type,
+ int32_t curGen, sp<AnotherPacketSource> packets, sp<AMessage> msg);
+
+ void sendTextData(
+ uint32_t what, media_track_type type,
+ int32_t curGen, sp<AnotherPacketSource> packets, sp<AMessage> msg);
+
+ sp<ABuffer> mediaBufferToABuffer(
+ MediaBuffer *mbuf,
+ media_track_type trackType,
+ int64_t seekTimeUs,
+ int64_t *actualTimeUs = NULL);
+
+ void postReadBuffer(media_track_type trackType);
+ void onReadBuffer(sp<AMessage> msg);
void readBuffer(
- bool audio,
- int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL);
+ media_track_type trackType,
+ int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL, bool formatChange = false);
+
+ void schedulePollBuffering();
+ void cancelPollBuffering();
+ void restartPollBuffering();
+ void onPollBuffering();
+ void notifyBufferingUpdate(int percentage);
+ void startBufferingIfNecessary();
+ void stopBufferingIfNecessary();
+ void sendCacheStats();
+ void ensureCacheIsFetching();
DISALLOW_EVIL_CONSTRUCTORS(GenericSource);
};
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index f1782cc..a26ef9e 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -24,6 +24,7 @@
#include "LiveDataSource.h"
#include "LiveSession.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -34,13 +35,12 @@ namespace android {
NuPlayer::HTTPLiveSource::HTTPLiveSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid, uid_t uid)
+ const KeyedVector<String8, String8> *headers)
: Source(notify),
+ mHTTPService(httpService),
mURL(url),
- mUIDValid(uidValid),
- mUID(uid),
mFlags(0),
mFinalResult(OK),
mOffset(0),
@@ -62,25 +62,31 @@ NuPlayer::HTTPLiveSource::HTTPLiveSource(
NuPlayer::HTTPLiveSource::~HTTPLiveSource() {
if (mLiveSession != NULL) {
mLiveSession->disconnect();
- mLiveSession.clear();
+ mLiveLooper->unregisterHandler(mLiveSession->id());
+ mLiveLooper->unregisterHandler(id());
mLiveLooper->stop();
+
+ mLiveSession.clear();
mLiveLooper.clear();
}
}
void NuPlayer::HTTPLiveSource::prepareAsync() {
- mLiveLooper = new ALooper;
- mLiveLooper->setName("http live");
- mLiveLooper->start();
+ if (mLiveLooper == NULL) {
+ mLiveLooper = new ALooper;
+ mLiveLooper->setName("http live");
+ mLiveLooper->start();
+
+ mLiveLooper->registerHandler(this);
+ }
sp<AMessage> notify = new AMessage(kWhatSessionNotify, id());
mLiveSession = new LiveSession(
notify,
(mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
- mUIDValid,
- mUID);
+ mHTTPService);
mLiveLooper->registerHandler(mLiveSession);
@@ -92,6 +98,10 @@ void NuPlayer::HTTPLiveSource::start() {
}
sp<AMessage> NuPlayer::HTTPLiveSource::getFormat(bool audio) {
+ if (mLiveSession == NULL) {
+ return NULL;
+ }
+
sp<AMessage> format;
status_t err = mLiveSession->getStreamFormat(
audio ? LiveSession::STREAMTYPE_AUDIO
@@ -121,11 +131,23 @@ status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) {
return mLiveSession->getDuration(durationUs);
}
-status_t NuPlayer::HTTPLiveSource::getTrackInfo(Parcel *reply) const {
- return mLiveSession->getTrackInfo(reply);
+size_t NuPlayer::HTTPLiveSource::getTrackCount() const {
+ return mLiveSession->getTrackCount();
+}
+
+sp<AMessage> NuPlayer::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
+ return mLiveSession->getTrackInfo(trackIndex);
+}
+
+ssize_t NuPlayer::HTTPLiveSource::getSelectedTrack(media_track_type type) const {
+ if (mLiveSession == NULL) {
+ return -1;
+ } else {
+ return mLiveSession->getSelectedTrack(type);
+ }
}
-status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) {
+status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, int64_t /*timeUs*/) {
status_t err = mLiveSession->selectTrack(trackIndex, select);
if (err == OK) {
@@ -140,7 +162,7 @@ status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) {
// LiveSession::selectTrack returns BAD_VALUE when selecting the currently
// selected track, or unselecting a non-selected track. In this case it's an
// no-op so we return OK.
- return (err == OK || err == BAD_VALUE) ? OK : err;
+ return (err == OK || err == BAD_VALUE) ? (status_t)OK : err;
}
status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {
@@ -207,9 +229,9 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
int32_t height;
if (format != NULL &&
format->findInt32("width", &width) && format->findInt32("height", &height)) {
- notifyVideoSizeChanged(width, height);
+ notifyVideoSizeChanged(format);
} else {
- notifyVideoSizeChanged(0, 0);
+ notifyVideoSizeChanged();
}
uint32_t flags = FLAG_CAN_PAUSE;
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index bcc3f8b..bbb8981 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -28,10 +28,9 @@ struct LiveSession;
struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
HTTPLiveSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid = false,
- uid_t uid = 0);
+ const KeyedVector<String8, String8> *headers);
virtual void prepareAsync();
virtual void start();
@@ -41,8 +40,10 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
virtual status_t feedMoreTSData();
virtual status_t getDuration(int64_t *durationUs);
- virtual status_t getTrackInfo(Parcel *reply) const;
- virtual status_t selectTrack(size_t trackIndex, bool select);
+ virtual size_t getTrackCount() const;
+ virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
+ virtual ssize_t getSelectedTrack(media_track_type /* type */) const;
+ virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
virtual status_t seekTo(int64_t seekTimeUs);
protected:
@@ -61,10 +62,9 @@ private:
kWhatFetchSubtitleData,
};
+ sp<IMediaHTTPService> mHTTPService;
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
- bool mUIDValid;
- uid_t mUID;
uint32_t mFlags;
status_t mFinalResult;
off64_t mOffset;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 3669a5b..aeea204 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -21,25 +21,27 @@
#include "NuPlayer.h"
#include "HTTPLiveSource.h"
+#include "NuPlayerCCDecoder.h"
#include "NuPlayerDecoder.h"
+#include "NuPlayerDecoderBase.h"
+#include "NuPlayerDecoderPassThrough.h"
#include "NuPlayerDriver.h"
#include "NuPlayerRenderer.h"
#include "NuPlayerSource.h"
#include "RTSPSource.h"
#include "StreamingSource.h"
#include "GenericSource.h"
-#include "mp4/MP4Source.h"
+#include "TextDescriptions.h"
#include "ATSParser.h"
-#include "SoftwareRenderer.h"
+#include <cutils/properties.h>
-#include <cutils/properties.h> // for property_get
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -62,20 +64,37 @@ private:
};
struct NuPlayer::SeekAction : public Action {
- SeekAction(int64_t seekTimeUs)
- : mSeekTimeUs(seekTimeUs) {
+ SeekAction(int64_t seekTimeUs, bool needNotify)
+ : mSeekTimeUs(seekTimeUs),
+ mNeedNotify(needNotify) {
}
virtual void execute(NuPlayer *player) {
- player->performSeek(mSeekTimeUs);
+ player->performSeek(mSeekTimeUs, mNeedNotify);
}
private:
int64_t mSeekTimeUs;
+ bool mNeedNotify;
DISALLOW_EVIL_CONSTRUCTORS(SeekAction);
};
+struct NuPlayer::ResumeDecoderAction : public Action {
+ ResumeDecoderAction(bool needNotify)
+ : mNeedNotify(needNotify) {
+ }
+
+ virtual void execute(NuPlayer *player) {
+ player->performResumeDecoders(mNeedNotify);
+ }
+
+private:
+ bool mNeedNotify;
+
+ DISALLOW_EVIL_CONSTRUCTORS(ResumeDecoderAction);
+};
+
struct NuPlayer::SetSurfaceAction : public Action {
SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper)
: mWrapper(wrapper) {
@@ -91,21 +110,21 @@ private:
DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);
};
-struct NuPlayer::ShutdownDecoderAction : public Action {
- ShutdownDecoderAction(bool audio, bool video)
+struct NuPlayer::FlushDecoderAction : public Action {
+ FlushDecoderAction(FlushCommand audio, FlushCommand video)
: mAudio(audio),
mVideo(video) {
}
virtual void execute(NuPlayer *player) {
- player->performDecoderShutdown(mAudio, mVideo);
+ player->performDecoderFlush(mAudio, mVideo);
}
private:
- bool mAudio;
- bool mVideo;
+ FlushCommand mAudio;
+ FlushCommand mVideo;
- DISALLOW_EVIL_CONSTRUCTORS(ShutdownDecoderAction);
+ DISALLOW_EVIL_CONSTRUCTORS(FlushDecoderAction);
};
struct NuPlayer::PostMessageAction : public Action {
@@ -147,23 +166,24 @@ private:
NuPlayer::NuPlayer()
: mUIDValid(false),
mSourceFlags(0),
- mVideoIsAVC(false),
- mNeedsSwRenderer(false),
+ mOffloadAudio(false),
+ mAudioDecoderGeneration(0),
+ mVideoDecoderGeneration(0),
+ mRendererGeneration(0),
mAudioEOS(false),
mVideoEOS(false),
mScanSourcesPending(false),
mScanSourcesGeneration(0),
mPollDurationGeneration(0),
- mTimeDiscontinuityPending(false),
+ mTimedTextGeneration(0),
mFlushingAudio(NONE),
mFlushingVideo(NONE),
- mSkipRenderingAudioUntilMediaTimeUs(-1ll),
- mSkipRenderingVideoUntilMediaTimeUs(-1ll),
- mVideoLateByUs(0ll),
- mNumFramesTotal(0ll),
- mNumFramesDropped(0ll),
+ mResumePending(false),
mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW),
- mStarted(false) {
+ mStarted(false),
+ mPaused(false),
+ mPausedByClient(false) {
+ clearFlushComplete();
}
NuPlayer::~NuPlayer() {
@@ -183,14 +203,7 @@ void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) {
sp<AMessage> notify = new AMessage(kWhatSourceNotify, id());
- char prop[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.use-mp4source", prop, NULL)
- && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) {
- msg->setObject("source", new MP4Source(notify, source));
- } else {
- msg->setObject("source", new StreamingSource(notify, source));
- }
-
+ msg->setObject("source", new StreamingSource(notify, source));
msg->post();
}
@@ -212,7 +225,10 @@ static bool IsHTTPLiveURL(const char *url) {
}
void NuPlayer::setDataSourceAsync(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+
sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
size_t len = strlen(url);
@@ -220,18 +236,31 @@ void NuPlayer::setDataSourceAsync(
sp<Source> source;
if (IsHTTPLiveURL(url)) {
- source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID);
+ source = new HTTPLiveSource(notify, httpService, url, headers);
} else if (!strncasecmp(url, "rtsp://", 7)) {
- source = new RTSPSource(notify, url, headers, mUIDValid, mUID);
+ source = new RTSPSource(
+ notify, httpService, url, headers, mUIDValid, mUID);
} else if ((!strncasecmp(url, "http://", 7)
|| !strncasecmp(url, "https://", 8))
&& ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
|| strstr(url, ".sdp?"))) {
- source = new RTSPSource(notify, url, headers, mUIDValid, mUID, true);
+ source = new RTSPSource(
+ notify, httpService, url, headers, mUIDValid, mUID, true);
} else {
- source = new GenericSource(notify, url, headers, mUIDValid, mUID);
+ sp<GenericSource> genericSource =
+ new GenericSource(notify, mUIDValid, mUID);
+ // Don't set FLAG_SECURE on mSourceFlags here for widevine.
+ // The correct flags will be updated in Source::kWhatFlagsChanged
+ // handler when GenericSource is prepared.
+
+ status_t err = genericSource->setDataSource(httpService, url, headers);
+
+ if (err == OK) {
+ source = genericSource;
+ } else {
+ ALOGE("Failed to set data source!");
+ }
}
-
msg->setObject("source", source);
msg->post();
}
@@ -241,7 +270,16 @@ void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
sp<AMessage> notify = new AMessage(kWhatSourceNotify, id());
- sp<Source> source = new GenericSource(notify, fd, offset, length);
+ sp<GenericSource> source =
+ new GenericSource(notify, mUIDValid, mUID);
+
+ status_t err = source->setDataSource(fd, offset, length);
+
+ if (err != OK) {
+ ALOGE("Failed to set data source!");
+ source = NULL;
+ }
+
msg->setObject("source", source);
msg->post();
}
@@ -260,7 +298,7 @@ void NuPlayer::setVideoSurfaceTextureAsync(
msg->setObject(
"native-window",
new NativeWindowWrapper(
- new Surface(bufferProducer)));
+ new Surface(bufferProducer, true /* controlledByApp */)));
}
msg->post();
@@ -280,37 +318,53 @@ void NuPlayer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
-void NuPlayer::resume() {
- (new AMessage(kWhatResume, id()))->post();
-}
-
void NuPlayer::resetAsync() {
+ if (mSource != NULL) {
+ // During a reset, the data source might be unresponsive already, we need to
+ // disconnect explicitly so that reads exit promptly.
+ // We can't queue the disconnect request to the looper, as it might be
+ // queued behind a stuck read and never gets processed.
+ // Doing a disconnect outside the looper to allows the pending reads to exit
+ // (either successfully or with error).
+ mSource->disconnect();
+ }
+
(new AMessage(kWhatReset, id()))->post();
}
-void NuPlayer::seekToAsync(int64_t seekTimeUs) {
+void NuPlayer::seekToAsync(int64_t seekTimeUs, bool needNotify) {
sp<AMessage> msg = new AMessage(kWhatSeek, id());
msg->setInt64("seekTimeUs", seekTimeUs);
+ msg->setInt32("needNotify", needNotify);
msg->post();
}
-// static
-bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) {
- switch (state) {
- case FLUSHING_DECODER:
- if (needShutdown != NULL) {
- *needShutdown = false;
- }
- return true;
- case FLUSHING_DECODER_SHUTDOWN:
- if (needShutdown != NULL) {
- *needShutdown = true;
- }
- return true;
+void NuPlayer::writeTrackInfo(
+ Parcel* reply, const sp<AMessage> format) const {
+ int32_t trackType;
+ CHECK(format->findInt32("type", &trackType));
- default:
- return false;
+ AString lang;
+ CHECK(format->findString("language", &lang));
+
+ reply->writeInt32(2); // write something non-zero
+ reply->writeInt32(trackType);
+ reply->writeString16(String16(lang.c_str()));
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ int32_t isAuto, isDefault, isForced;
+ CHECK(format->findInt32("auto", &isAuto));
+ CHECK(format->findInt32("default", &isDefault));
+ CHECK(format->findInt32("forced", &isForced));
+
+ reply->writeString16(String16(mime.c_str()));
+ reply->writeInt32(isAuto);
+ reply->writeInt32(isDefault);
+ reply->writeInt32(isForced);
}
}
@@ -322,17 +376,19 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
CHECK(mSource == NULL);
+ status_t err = OK;
sp<RefBase> obj;
CHECK(msg->findObject("source", &obj));
-
- mSource = static_cast<Source *>(obj.get());
-
- looper()->registerHandler(mSource);
+ if (obj != NULL) {
+ mSource = static_cast<Source *>(obj.get());
+ } else {
+ err = UNKNOWN_ERROR;
+ }
CHECK(mDriver != NULL);
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
- driver->notifySetDataSourceCompleted(OK);
+ driver->notifySetDataSourceCompleted(err);
}
break;
}
@@ -348,16 +404,58 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ Parcel* reply;
+ CHECK(msg->findPointer("reply", (void**)&reply));
+
+ size_t inbandTracks = 0;
+ if (mSource != NULL) {
+ inbandTracks = mSource->getTrackCount();
+ }
+
+ size_t ccTracks = 0;
+ if (mCCDecoder != NULL) {
+ ccTracks = mCCDecoder->getTrackCount();
+ }
+
+ // total track count
+ reply->writeInt32(inbandTracks + ccTracks);
+
+ // write inband tracks
+ for (size_t i = 0; i < inbandTracks; ++i) {
+ writeTrackInfo(reply, mSource->getTrackInfo(i));
+ }
+
+ // write CC track
+ for (size_t i = 0; i < ccTracks; ++i) {
+ writeTrackInfo(reply, mCCDecoder->getTrackInfo(i));
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatGetSelectedTrack:
+ {
status_t err = INVALID_OPERATION;
if (mSource != NULL) {
+ err = OK;
+
+ int32_t type32;
+ CHECK(msg->findInt32("type", (int32_t*)&type32));
+ media_track_type type = (media_track_type)type32;
+ ssize_t selectedTrack = mSource->getSelectedTrack(type);
+
Parcel* reply;
CHECK(msg->findPointer("reply", (void**)&reply));
- err = mSource->getTrackInfo(reply);
+ reply->writeInt32(selectedTrack);
}
sp<AMessage> response = new AMessage;
response->setInt32("err", err);
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
response->postReply(replyID);
break;
}
@@ -367,13 +465,42 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ size_t trackIndex;
+ int32_t select;
+ int64_t timeUs;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK(msg->findInt32("select", &select));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+
status_t err = INVALID_OPERATION;
+
+ size_t inbandTracks = 0;
if (mSource != NULL) {
- size_t trackIndex;
- int32_t select;
- CHECK(msg->findSize("trackIndex", &trackIndex));
- CHECK(msg->findInt32("select", &select));
- err = mSource->selectTrack(trackIndex, select);
+ inbandTracks = mSource->getTrackCount();
+ }
+ size_t ccTracks = 0;
+ if (mCCDecoder != NULL) {
+ ccTracks = mCCDecoder->getTrackCount();
+ }
+
+ if (trackIndex < inbandTracks) {
+ err = mSource->selectTrack(trackIndex, select, timeUs);
+
+ if (!select && err == OK) {
+ int32_t type;
+ sp<AMessage> info = mSource->getTrackInfo(trackIndex);
+ if (info != NULL
+ && info->findInt32("type", &type)
+ && type == MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ ++mTimedTextGeneration;
+ }
+ }
+ } else {
+ trackIndex -= inbandTracks;
+
+ if (trackIndex < ccTracks) {
+ err = mCCDecoder->selectTrack(trackIndex, select);
+ }
}
sp<AMessage> response = new AMessage;
@@ -409,24 +536,47 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
{
ALOGV("kWhatSetVideoNativeWindow");
- mDeferredActions.push_back(
- new ShutdownDecoderAction(
- false /* audio */, true /* video */));
-
sp<RefBase> obj;
CHECK(msg->findObject("native-window", &obj));
+ if (mSource == NULL || mSource->getFormat(false /* audio */) == NULL) {
+ performSetSurface(static_cast<NativeWindowWrapper *>(obj.get()));
+ break;
+ }
+
+ mDeferredActions.push_back(
+ new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
+
mDeferredActions.push_back(
new SetSurfaceAction(
static_cast<NativeWindowWrapper *>(obj.get())));
if (obj != NULL) {
+ if (mStarted) {
+ // Issue a seek to refresh the video screen only if started otherwise
+ // the extractor may not yet be started and will assert.
+ // If the video decoder is not set (perhaps audio only in this case)
+ // do not perform a seek as it is not needed.
+ int64_t currentPositionUs = 0;
+ if (getCurrentPosition(&currentPositionUs) == OK) {
+ mDeferredActions.push_back(
+ new SeekAction(currentPositionUs, false /* needNotify */));
+ }
+ }
+
// If there is a new surface texture, instantiate decoders
// again if possible.
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performScanSources));
}
+ // After a flush without shutdown, decoder is paused.
+ // Don't resume it until source seek is done, otherwise it could
+ // start pulling stale data too soon.
+ mDeferredActions.push_back(
+ new ResumeDecoderAction(false /* needNotify */));
+
processDeferredActions();
break;
}
@@ -445,34 +595,12 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatStart:
{
ALOGV("kWhatStart");
-
- mVideoIsAVC = false;
- mNeedsSwRenderer = false;
- mAudioEOS = false;
- mVideoEOS = false;
- mSkipRenderingAudioUntilMediaTimeUs = -1;
- mSkipRenderingVideoUntilMediaTimeUs = -1;
- mVideoLateByUs = 0;
- mNumFramesTotal = 0;
- mNumFramesDropped = 0;
- mStarted = true;
-
- mSource->start();
-
- uint32_t flags = 0;
-
- if (mSource->isRealTime()) {
- flags |= Renderer::FLAG_REAL_TIME;
+ if (mStarted) {
+ onResume();
+ } else {
+ onStart();
}
-
- mRenderer = new Renderer(
- mAudioSink,
- new AMessage(kWhatRendererNotify, id()),
- flags);
-
- looper()->registerHandler(mRenderer);
-
- postScanSources();
+ mPausedByClient = false;
break;
}
@@ -493,11 +621,28 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
bool mHadAnySourcesBefore =
(mAudioDecoder != NULL) || (mVideoDecoder != NULL);
+ // initialize video before audio because successful initialization of
+ // video may change deep buffer mode of audio.
if (mNativeWindow != NULL) {
instantiateDecoder(false, &mVideoDecoder);
}
- if (mAudioSink != NULL) {
+ // Don't try to re-open audio sink if there's an existing decoder.
+ if (mAudioSink != NULL && mAudioDecoder == NULL) {
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+ audio_stream_type_t streamType = mAudioSink->getAudioStreamType();
+ const bool hasVideo = (videoFormat != NULL);
+ const bool canOffload = canOffloadStream(
+ audioMeta, hasVideo, true /* is_streaming */, streamType);
+ if (canOffload) {
+ if (!mOffloadAudio) {
+ mRenderer->signalEnableOffloadAudio();
+ }
+ // open audio sink early under offload mode.
+ sp<AMessage> format = mSource->getFormat(true /*audio*/);
+ tryOpenAudioSinkForOffload(format, hasVideo);
+ }
instantiateDecoder(true, &mAudioDecoder);
}
@@ -538,24 +683,50 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
{
bool audio = msg->what() == kWhatAudioNotify;
- sp<AMessage> codecRequest;
- CHECK(msg->findMessage("codec-request", &codecRequest));
+ int32_t currentDecoderGeneration =
+ (audio? mAudioDecoderGeneration : mVideoDecoderGeneration);
+ int32_t requesterGeneration = currentDecoderGeneration - 1;
+ CHECK(msg->findInt32("generation", &requesterGeneration));
+
+ if (requesterGeneration != currentDecoderGeneration) {
+ ALOGV("got message from old %s decoder, generation(%d:%d)",
+ audio ? "audio" : "video", requesterGeneration,
+ currentDecoderGeneration);
+ sp<AMessage> reply;
+ if (!(msg->findMessage("reply", &reply))) {
+ return;
+ }
+
+ reply->setInt32("err", INFO_DISCONTINUITY);
+ reply->post();
+ return;
+ }
int32_t what;
- CHECK(codecRequest->findInt32("what", &what));
+ CHECK(msg->findInt32("what", &what));
- if (what == ACodec::kWhatFillThisBuffer) {
- status_t err = feedDecoderInputData(
- audio, codecRequest);
+ if (what == DecoderBase::kWhatInputDiscontinuity) {
+ int32_t formatChange;
+ CHECK(msg->findInt32("formatChange", &formatChange));
- if (err == -EWOULDBLOCK) {
- if (mSource->feedMoreTSData() == OK) {
- msg->post(10000ll);
- }
+ ALOGV("%s discontinuity: formatChange %d",
+ audio ? "audio" : "video", formatChange);
+
+ if (formatChange) {
+ mDeferredActions.push_back(
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
}
- } else if (what == ACodec::kWhatEOS) {
+
+ mDeferredActions.push_back(
+ new SimpleAction(
+ &NuPlayer::performScanSources));
+
+ processDeferredActions();
+ } else if (what == DecoderBase::kWhatEOS) {
int32_t err;
- CHECK(codecRequest->findInt32("err", &err));
+ CHECK(msg->findInt32("err", &err));
if (err == ERROR_END_OF_STREAM) {
ALOGV("got %s decoder EOS", audio ? "audio" : "video");
@@ -566,170 +737,90 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
mRenderer->queueEOS(audio, err);
- } else if (what == ACodec::kWhatFlushCompleted) {
- bool needShutdown;
-
- if (audio) {
- CHECK(IsFlushingState(mFlushingAudio, &needShutdown));
- mFlushingAudio = FLUSHED;
- } else {
- CHECK(IsFlushingState(mFlushingVideo, &needShutdown));
- mFlushingVideo = FLUSHED;
-
- mVideoLateByUs = 0;
- }
-
+ } else if (what == DecoderBase::kWhatFlushCompleted) {
ALOGV("decoder %s flush completed", audio ? "audio" : "video");
- if (needShutdown) {
- ALOGV("initiating %s decoder shutdown",
- audio ? "audio" : "video");
-
- (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown();
-
- if (audio) {
- mFlushingAudio = SHUTTING_DOWN_DECODER;
- } else {
- mFlushingVideo = SHUTTING_DOWN_DECODER;
- }
- }
-
+ handleFlushComplete(audio, true /* isDecoder */);
finishFlushIfPossible();
- } else if (what == ACodec::kWhatOutputFormatChanged) {
- if (audio) {
- int32_t numChannels;
- CHECK(codecRequest->findInt32(
- "channel-count", &numChannels));
-
- int32_t sampleRate;
- CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
-
- ALOGV("Audio output format changed to %d Hz, %d channels",
- sampleRate, numChannels);
-
- mAudioSink->close();
-
- audio_output_flags_t flags;
- int64_t durationUs;
- // FIXME: we should handle the case where the video decoder
- // is created after we receive the format change indication.
- // Current code will just make that we select deep buffer
- // with video which should not be a problem as it should
- // not prevent from keeping A/V sync.
- if (mVideoDecoder == NULL &&
- mSource->getDuration(&durationUs) == OK &&
- durationUs
- > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
- flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
- } else {
- flags = AUDIO_OUTPUT_FLAG_NONE;
- }
-
- int32_t channelMask;
- if (!codecRequest->findInt32("channel-mask", &channelMask)) {
- channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
- }
+ } else if (what == DecoderBase::kWhatVideoSizeChanged) {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
- CHECK_EQ(mAudioSink->open(
- sampleRate,
- numChannels,
- (audio_channel_mask_t)channelMask,
- AUDIO_FORMAT_PCM_16_BIT,
- 8 /* bufferCount */,
- NULL,
- NULL,
- flags),
- (status_t)OK);
- mAudioSink->start();
-
- mRenderer->signalAudioSinkChanged();
- } else {
- // video
-
- int32_t width, height;
- CHECK(codecRequest->findInt32("width", &width));
- CHECK(codecRequest->findInt32("height", &height));
-
- int32_t cropLeft, cropTop, cropRight, cropBottom;
- CHECK(codecRequest->findRect(
- "crop",
- &cropLeft, &cropTop, &cropRight, &cropBottom));
-
- int32_t displayWidth = cropRight - cropLeft + 1;
- int32_t displayHeight = cropBottom - cropTop + 1;
-
- ALOGV("Video output format changed to %d x %d "
- "(crop: %d x %d @ (%d, %d))",
- width, height,
- displayWidth,
- displayHeight,
- cropLeft, cropTop);
-
- sp<AMessage> videoInputFormat =
+ sp<AMessage> inputFormat =
mSource->getFormat(false /* audio */);
- // Take into account sample aspect ratio if necessary:
- int32_t sarWidth, sarHeight;
- if (videoInputFormat->findInt32("sar-width", &sarWidth)
- && videoInputFormat->findInt32(
- "sar-height", &sarHeight)) {
- ALOGV("Sample aspect ratio %d : %d",
- sarWidth, sarHeight);
-
- displayWidth = (displayWidth * sarWidth) / sarHeight;
-
- ALOGV("display dimensions %d x %d",
- displayWidth, displayHeight);
- }
-
- notifyListener(
- MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight);
-
- if (mNeedsSwRenderer && mNativeWindow != NULL) {
- int32_t colorFormat;
- CHECK(codecRequest->findInt32("color-format", &colorFormat));
-
- sp<MetaData> meta = new MetaData;
- meta->setInt32(kKeyWidth, width);
- meta->setInt32(kKeyHeight, height);
- meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
- meta->setInt32(kKeyColorFormat, colorFormat);
-
- mRenderer->setSoftRenderer(
- new SoftwareRenderer(mNativeWindow->getNativeWindow(), meta));
- }
- }
- } else if (what == ACodec::kWhatShutdownCompleted) {
+ updateVideoSize(inputFormat, format);
+ } else if (what == DecoderBase::kWhatShutdownCompleted) {
ALOGV("%s shutdown completed", audio ? "audio" : "video");
if (audio) {
mAudioDecoder.clear();
+ ++mAudioDecoderGeneration;
CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER);
mFlushingAudio = SHUT_DOWN;
} else {
mVideoDecoder.clear();
+ ++mVideoDecoderGeneration;
CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER);
mFlushingVideo = SHUT_DOWN;
}
finishFlushIfPossible();
- } else if (what == ACodec::kWhatError) {
- ALOGE("Received error from %s decoder, aborting playback.",
- audio ? "audio" : "video");
-
- mRenderer->queueEOS(audio, UNKNOWN_ERROR);
- } else if (what == ACodec::kWhatDrainThisBuffer) {
- renderBuffer(audio, codecRequest);
- } else if (what == ACodec::kWhatComponentAllocated) {
- if (!audio) {
- AString name;
- CHECK(codecRequest->findString("componentName", &name));
- mNeedsSwRenderer = name.startsWith("OMX.google.");
+ } else if (what == DecoderBase::kWhatResumeCompleted) {
+ finishResume();
+ } else if (what == DecoderBase::kWhatError) {
+ status_t err;
+ if (!msg->findInt32("err", &err) || err == OK) {
+ err = UNKNOWN_ERROR;
}
- } else if (what != ACodec::kWhatComponentConfigured
- && what != ACodec::kWhatBuffersAllocated) {
- ALOGV("Unhandled codec notification %d '%c%c%c%c'.",
+
+ // Decoder errors can be due to Source (e.g. from streaming),
+ // or from decoding corrupted bitstreams, or from other decoder
+ // MediaCodec operations (e.g. from an ongoing reset or seek).
+ // They may also be due to openAudioSink failure at
+ // decoder start or after a format change.
+ //
+ // We try to gracefully shut down the affected decoder if possible,
+ // rather than trying to force the shutdown with something
+ // similar to performReset(). This method can lead to a hang
+ // if MediaCodec functions block after an error, but they should
+ // typically return INVALID_OPERATION instead of blocking.
+
+ FlushStatus *flushing = audio ? &mFlushingAudio : &mFlushingVideo;
+ ALOGE("received error(%#x) from %s decoder, flushing(%d), now shutting down",
+ err, audio ? "audio" : "video", *flushing);
+
+ switch (*flushing) {
+ case NONE:
+ mDeferredActions.push_back(
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ audio ? FLUSH_CMD_NONE : FLUSH_CMD_SHUTDOWN));
+ processDeferredActions();
+ break;
+ case FLUSHING_DECODER:
+ *flushing = FLUSHING_DECODER_SHUTDOWN; // initiate shutdown after flush.
+ break; // Wait for flush to complete.
+ case FLUSHING_DECODER_SHUTDOWN:
+ break; // Wait for flush to complete.
+ case SHUTTING_DOWN_DECODER:
+ break; // Wait for shutdown to complete.
+ case FLUSHED:
+ // Widevine source reads must stop before releasing the video decoder.
+ if (!audio && mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {
+ mSource->stop();
+ }
+ getDecoder(audio)->initiateShutdown(); // In the middle of a seek.
+ *flushing = SHUTTING_DOWN_DECODER; // Shut down.
+ break;
+ case SHUT_DOWN:
+ finishFlushIfPossible(); // Should not occur.
+ break; // Finish anyways.
+ }
+ notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+ } else {
+ ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
what,
what >> 24,
(what >> 16) & 0xff,
@@ -742,6 +833,14 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatRendererNotify:
{
+ int32_t requesterGeneration = mRendererGeneration - 1;
+ CHECK(msg->findInt32("generation", &requesterGeneration));
+ if (requesterGeneration != mRendererGeneration) {
+ ALOGV("got message from old renderer, generation(%d:%d)",
+ requesterGeneration, mRendererGeneration);
+ return;
+ }
+
int32_t what;
CHECK(msg->findInt32("what", &what));
@@ -772,31 +871,40 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
&& (mVideoEOS || mVideoDecoder == NULL)) {
notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
}
- } else if (what == Renderer::kWhatPosition) {
- int64_t positionUs;
- CHECK(msg->findInt64("positionUs", &positionUs));
-
- CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs));
-
- if (mDriver != NULL) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
- driver->notifyPosition(positionUs);
-
- driver->notifyFrameStats(
- mNumFramesTotal, mNumFramesDropped);
- }
- }
} else if (what == Renderer::kWhatFlushComplete) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
+ handleFlushComplete(audio, false /* isDecoder */);
+ finishFlushIfPossible();
} else if (what == Renderer::kWhatVideoRenderingStart) {
notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
} else if (what == Renderer::kWhatMediaRenderingStart) {
ALOGV("media rendering started");
notifyListener(MEDIA_STARTED, 0, 0);
+ } else if (what == Renderer::kWhatAudioOffloadTearDown) {
+ ALOGV("Tear down audio offload, fall back to s/w path if due to error.");
+ int64_t positionUs;
+ CHECK(msg->findInt64("positionUs", &positionUs));
+ int32_t reason;
+ CHECK(msg->findInt32("reason", &reason));
+ closeAudioSink();
+ mAudioDecoder.clear();
+ ++mAudioDecoderGeneration;
+ mRenderer->flush(
+ true /* audio */, false /* notifyComplete */);
+ if (mVideoDecoder != NULL) {
+ mRenderer->flush(
+ false /* audio */, false /* notifyComplete */);
+ }
+
+ performSeek(positionUs, false /* needNotify */);
+ if (reason == Renderer::kDueToError) {
+ mRenderer->signalDisableOffloadAudio();
+ mOffloadAudio = false;
+ instantiateDecoder(true /* audio */, &mAudioDecoder);
+ }
}
break;
}
@@ -811,8 +919,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("kWhatReset");
mDeferredActions.push_back(
- new ShutdownDecoderAction(
- true /* audio */, true /* video */));
+ new FlushDecoderAction(
+ FLUSH_CMD_SHUTDOWN /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
mDeferredActions.push_back(
new SimpleAction(&NuPlayer::performReset));
@@ -824,14 +933,25 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSeek:
{
int64_t seekTimeUs;
+ int32_t needNotify;
CHECK(msg->findInt64("seekTimeUs", &seekTimeUs));
+ CHECK(msg->findInt32("needNotify", &needNotify));
- ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs);
+ ALOGV("kWhatSeek seekTimeUs=%lld us, needNotify=%d",
+ seekTimeUs, needNotify);
mDeferredActions.push_back(
- new SimpleAction(&NuPlayer::performDecoderFlush));
+ new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,
+ FLUSH_CMD_FLUSH /* video */));
- mDeferredActions.push_back(new SeekAction(seekTimeUs));
+ mDeferredActions.push_back(
+ new SeekAction(seekTimeUs, needNotify));
+
+ // After a flush without shutdown, decoder is paused.
+ // Don't resume it until source seek is done, otherwise it could
+ // start pulling stale data too soon.
+ mDeferredActions.push_back(
+ new ResumeDecoderAction(needNotify));
processDeferredActions();
break;
@@ -839,23 +959,20 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatPause:
{
- CHECK(mRenderer != NULL);
- mSource->pause();
- mRenderer->pause();
+ onPause();
+ mPausedByClient = true;
break;
}
- case kWhatResume:
+ case kWhatSourceNotify:
{
- CHECK(mRenderer != NULL);
- mSource->resume();
- mRenderer->resume();
+ onSourceNotify(msg);
break;
}
- case kWhatSourceNotify:
+ case kWhatClosedCaptionNotify:
{
- onSourceNotify(msg);
+ onClosedCaptionNotify(msg);
break;
}
@@ -865,33 +982,192 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
}
-void NuPlayer::finishFlushIfPossible() {
- if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) {
+void NuPlayer::onResume() {
+ if (!mPaused) {
return;
}
+ mPaused = false;
+ if (mSource != NULL) {
+ mSource->resume();
+ } else {
+ ALOGW("resume called when source is gone or not set");
+ }
+ // |mAudioDecoder| may have been released due to the pause timeout, so re-create it if
+ // needed.
+ if (audioDecoderStillNeeded() && mAudioDecoder == NULL) {
+ instantiateDecoder(true /* audio */, &mAudioDecoder);
+ }
+ if (mRenderer != NULL) {
+ mRenderer->resume();
+ } else {
+ ALOGW("resume called when renderer is gone or not set");
+ }
+}
- if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) {
- return;
+status_t NuPlayer::onInstantiateSecureDecoders() {
+ status_t err;
+ if (!(mSourceFlags & Source::FLAG_SECURE)) {
+ return BAD_TYPE;
}
- ALOGV("both audio and video are flushed now.");
+ if (mRenderer != NULL) {
+ ALOGE("renderer should not be set when instantiating secure decoders");
+ return UNKNOWN_ERROR;
+ }
- if (mTimeDiscontinuityPending) {
- mRenderer->signalTimeDiscontinuity();
- mTimeDiscontinuityPending = false;
+ // TRICKY: We rely on mRenderer being null, so that decoder does not start requesting
+ // data on instantiation.
+ if (mNativeWindow != NULL) {
+ err = instantiateDecoder(false, &mVideoDecoder);
+ if (err != OK) {
+ return err;
+ }
}
- if (mAudioDecoder != NULL) {
- mAudioDecoder->signalResume();
+ if (mAudioSink != NULL) {
+ err = instantiateDecoder(true, &mAudioDecoder);
+ if (err != OK) {
+ return err;
+ }
+ }
+ return OK;
+}
+
+void NuPlayer::onStart() {
+ mOffloadAudio = false;
+ mAudioEOS = false;
+ mVideoEOS = false;
+ mStarted = true;
+
+ mSource->start();
+
+ uint32_t flags = 0;
+
+ if (mSource->isRealTime()) {
+ flags |= Renderer::FLAG_REAL_TIME;
+ }
+
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+
+ sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
+
+ mOffloadAudio =
+ canOffloadStream(audioMeta, (videoFormat != NULL),
+ true /* is_streaming */, streamType);
+ if (mOffloadAudio) {
+ flags |= Renderer::FLAG_OFFLOAD_AUDIO;
+ }
+
+ sp<AMessage> notify = new AMessage(kWhatRendererNotify, id());
+ ++mRendererGeneration;
+ notify->setInt32("generation", mRendererGeneration);
+ mRenderer = new Renderer(mAudioSink, notify, flags);
+
+ mRendererLooper = new ALooper;
+ mRendererLooper->setName("NuPlayerRenderer");
+ mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ mRendererLooper->registerHandler(mRenderer);
+
+ sp<MetaData> meta = getFileMeta();
+ int32_t rate;
+ if (meta != NULL
+ && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) {
+ mRenderer->setVideoFrameRate(rate);
}
if (mVideoDecoder != NULL) {
- mVideoDecoder->signalResume();
+ mVideoDecoder->setRenderer(mRenderer);
+ }
+ if (mAudioDecoder != NULL) {
+ mAudioDecoder->setRenderer(mRenderer);
+ }
+
+ postScanSources();
+}
+
+void NuPlayer::onPause() {
+ if (mPaused) {
+ return;
+ }
+ mPaused = true;
+ if (mSource != NULL) {
+ mSource->pause();
+ } else {
+ ALOGW("pause called when source is gone or not set");
+ }
+ if (mRenderer != NULL) {
+ mRenderer->pause();
+ } else {
+ ALOGW("pause called when renderer is gone or not set");
+ }
+}
+
+bool NuPlayer::audioDecoderStillNeeded() {
+ // Audio decoder is no longer needed if it's in shut/shutting down status.
+ return ((mFlushingAudio != SHUT_DOWN) && (mFlushingAudio != SHUTTING_DOWN_DECODER));
+}
+
+void NuPlayer::handleFlushComplete(bool audio, bool isDecoder) {
+ // We wait for both the decoder flush and the renderer flush to complete
+ // before entering either the FLUSHED or the SHUTTING_DOWN_DECODER state.
+
+ mFlushComplete[audio][isDecoder] = true;
+ if (!mFlushComplete[audio][!isDecoder]) {
+ return;
+ }
+
+ FlushStatus *state = audio ? &mFlushingAudio : &mFlushingVideo;
+ switch (*state) {
+ case FLUSHING_DECODER:
+ {
+ *state = FLUSHED;
+ break;
+ }
+
+ case FLUSHING_DECODER_SHUTDOWN:
+ {
+ *state = SHUTTING_DOWN_DECODER;
+
+ ALOGV("initiating %s decoder shutdown", audio ? "audio" : "video");
+ if (!audio) {
+ // Widevine source reads must stop before releasing the video decoder.
+ if (mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {
+ mSource->stop();
+ }
+ }
+ getDecoder(audio)->initiateShutdown();
+ break;
+ }
+
+ default:
+ // decoder flush completes only occur in a flushing state.
+ LOG_ALWAYS_FATAL_IF(isDecoder, "decoder flush in invalid state %d", *state);
+ break;
}
+}
+
+void NuPlayer::finishFlushIfPossible() {
+ if (mFlushingAudio != NONE && mFlushingAudio != FLUSHED
+ && mFlushingAudio != SHUT_DOWN) {
+ return;
+ }
+
+ if (mFlushingVideo != NONE && mFlushingVideo != FLUSHED
+ && mFlushingVideo != SHUT_DOWN) {
+ return;
+ }
+
+ ALOGV("both audio and video are flushed now.");
mFlushingAudio = NONE;
mFlushingVideo = NONE;
+ clearFlushComplete();
+
processDeferredActions();
}
@@ -907,7 +1183,27 @@ void NuPlayer::postScanSources() {
mScanSourcesPending = true;
}
-status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
+void NuPlayer::tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo) {
+ // Note: This is called early in NuPlayer to determine whether offloading
+ // is possible; otherwise the decoders call the renderer openAudioSink directly.
+
+ status_t err = mRenderer->openAudioSink(
+ format, true /* offloadOnly */, hasVideo, AUDIO_OUTPUT_FLAG_NONE, &mOffloadAudio);
+ if (err != OK) {
+ // Any failure we turn off mOffloadAudio.
+ mOffloadAudio = false;
+ } else if (mOffloadAudio) {
+ sp<MetaData> audioMeta =
+ mSource->getFormatMeta(true /* audio */);
+ sendMetaDataToHal(mAudioSink, audioMeta);
+ }
+}
+
+void NuPlayer::closeAudioSink() {
+ mRenderer->closeAudioSink();
+}
+
+status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {
if (*decoder != NULL) {
return OK;
}
@@ -921,191 +1217,142 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
if (!audio) {
AString mime;
CHECK(format->findString("mime", &mime));
- mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
- }
-
- sp<AMessage> notify =
- new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify,
- id());
- *decoder = audio ? new Decoder(notify) :
- new Decoder(notify, mNativeWindow);
- looper()->registerHandler(*decoder);
-
- (*decoder)->configure(format);
-
- return OK;
-}
+ sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id());
+ if (mCCDecoder == NULL) {
+ mCCDecoder = new CCDecoder(ccNotify);
+ }
-status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
+ if (mSourceFlags & Source::FLAG_SECURE) {
+ format->setInt32("secure", true);
+ }
- if ((audio && IsFlushingState(mFlushingAudio))
- || (!audio && IsFlushingState(mFlushingVideo))) {
- reply->setInt32("err", INFO_DISCONTINUITY);
- reply->post();
- return OK;
+ if (mSourceFlags & Source::FLAG_PROTECTED) {
+ format->setInt32("protected", true);
+ }
}
- sp<ABuffer> accessUnit;
-
- bool dropAccessUnit;
- do {
- status_t err = mSource->dequeueAccessUnit(audio, &accessUnit);
-
- if (err == -EWOULDBLOCK) {
- return err;
- } else if (err != OK) {
- if (err == INFO_DISCONTINUITY) {
- int32_t type;
- CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
-
- bool formatChange =
- (audio &&
- (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
- || (!audio &&
- (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
-
- bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
-
- ALOGI("%s discontinuity (formatChange=%d, time=%d)",
- audio ? "audio" : "video", formatChange, timeChange);
-
- if (audio) {
- mSkipRenderingAudioUntilMediaTimeUs = -1;
- } else {
- mSkipRenderingVideoUntilMediaTimeUs = -1;
- }
-
- if (timeChange) {
- sp<AMessage> extra;
- if (accessUnit->meta()->findMessage("extra", &extra)
- && extra != NULL) {
- int64_t resumeAtMediaTimeUs;
- if (extra->findInt64(
- "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
- ALOGI("suppressing rendering of %s until %lld us",
- audio ? "audio" : "video", resumeAtMediaTimeUs);
-
- if (audio) {
- mSkipRenderingAudioUntilMediaTimeUs =
- resumeAtMediaTimeUs;
- } else {
- mSkipRenderingVideoUntilMediaTimeUs =
- resumeAtMediaTimeUs;
- }
- }
- }
- }
-
- mTimeDiscontinuityPending =
- mTimeDiscontinuityPending || timeChange;
-
- if (formatChange || timeChange) {
- if (mFlushingAudio == NONE && mFlushingVideo == NONE) {
- // And we'll resume scanning sources once we're done
- // flushing.
- mDeferredActions.push_front(
- new SimpleAction(
- &NuPlayer::performScanSources));
- }
-
- flushDecoder(audio, formatChange);
- } else {
- // This stream is unaffected by the discontinuity
-
- if (audio) {
- mFlushingAudio = FLUSHED;
- } else {
- mFlushingVideo = FLUSHED;
- }
+ if (audio) {
+ sp<AMessage> notify = new AMessage(kWhatAudioNotify, id());
+ ++mAudioDecoderGeneration;
+ notify->setInt32("generation", mAudioDecoderGeneration);
+
+ if (mOffloadAudio) {
+ *decoder = new DecoderPassThrough(notify, mSource, mRenderer);
+ } else {
+ *decoder = new Decoder(notify, mSource, mRenderer);
+ }
+ } else {
+ sp<AMessage> notify = new AMessage(kWhatVideoNotify, id());
+ ++mVideoDecoderGeneration;
+ notify->setInt32("generation", mVideoDecoderGeneration);
- finishFlushIfPossible();
+ *decoder = new Decoder(
+ notify, mSource, mRenderer, mNativeWindow, mCCDecoder);
- return -EWOULDBLOCK;
- }
+ // enable FRC if high-quality AV sync is requested, even if not
+ // queuing to native window, as this will even improve textureview
+ // playback.
+ {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("persist.sys.media.avsync", value, NULL) &&
+ (!strcmp("1", value) || !strcasecmp("true", value))) {
+ format->setInt32("auto-frc", 1);
}
-
- reply->setInt32("err", err);
- reply->post();
- return OK;
}
+ }
+ (*decoder)->init();
+ (*decoder)->configure(format);
- if (!audio) {
- ++mNumFramesTotal;
- }
+ // allocate buffers to decrypt widevine source buffers
+ if (!audio && (mSourceFlags & Source::FLAG_SECURE)) {
+ Vector<sp<ABuffer> > inputBufs;
+ CHECK_EQ((*decoder)->getInputBuffers(&inputBufs), (status_t)OK);
- dropAccessUnit = false;
- if (!audio
- && mVideoLateByUs > 100000ll
- && mVideoIsAVC
- && !IsAVCReferenceFrame(accessUnit)) {
- dropAccessUnit = true;
- ++mNumFramesDropped;
+ Vector<MediaBuffer *> mediaBufs;
+ for (size_t i = 0; i < inputBufs.size(); i++) {
+ const sp<ABuffer> &buffer = inputBufs[i];
+ MediaBuffer *mbuf = new MediaBuffer(buffer->data(), buffer->size());
+ mediaBufs.push(mbuf);
}
- } while (dropAccessUnit);
-
- // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video");
-
-#if 0
- int64_t mediaTimeUs;
- CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
- ALOGV("feeding %s input buffer at media time %.2f secs",
- audio ? "audio" : "video",
- mediaTimeUs / 1E6);
-#endif
-
- reply->setBuffer("buffer", accessUnit);
- reply->post();
+ status_t err = mSource->setBuffers(audio, mediaBufs);
+ if (err != OK) {
+ for (size_t i = 0; i < mediaBufs.size(); ++i) {
+ mediaBufs[i]->release();
+ }
+ mediaBufs.clear();
+ ALOGE("Secure source didn't support secure mediaBufs.");
+ return err;
+ }
+ }
return OK;
}
-void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
- // ALOGV("renderBuffer %s", audio ? "audio" : "video");
+void NuPlayer::updateVideoSize(
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat) {
+ if (inputFormat == NULL) {
+ ALOGW("Unknown video size, reporting 0x0!");
+ notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
+ return;
+ }
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
+ int32_t displayWidth, displayHeight;
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
- if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) {
- // We're currently attempting to flush the decoder, in order
- // to complete this, the decoder wants all its buffers back,
- // so we don't want any output buffers it sent us (from before
- // we initiated the flush) to be stuck in the renderer's queue.
+ if (outputFormat != NULL) {
+ int32_t width, height;
+ CHECK(outputFormat->findInt32("width", &width));
+ CHECK(outputFormat->findInt32("height", &height));
- ALOGV("we're still flushing the %s decoder, sending its output buffer"
- " right back.", audio ? "audio" : "video");
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
+ CHECK(outputFormat->findRect(
+ "crop",
+ &cropLeft, &cropTop, &cropRight, &cropBottom));
- reply->post();
- return;
- }
+ displayWidth = cropRight - cropLeft + 1;
+ displayHeight = cropBottom - cropTop + 1;
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
+ ALOGV("Video output format changed to %d x %d "
+ "(crop: %d x %d @ (%d, %d))",
+ width, height,
+ displayWidth,
+ displayHeight,
+ cropLeft, cropTop);
+ } else {
+ CHECK(inputFormat->findInt32("width", &displayWidth));
+ CHECK(inputFormat->findInt32("height", &displayHeight));
- int64_t &skipUntilMediaTimeUs =
- audio
- ? mSkipRenderingAudioUntilMediaTimeUs
- : mSkipRenderingVideoUntilMediaTimeUs;
+ ALOGV("Video input format %d x %d", displayWidth, displayHeight);
+ }
- if (skipUntilMediaTimeUs >= 0) {
- int64_t mediaTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ // Take into account sample aspect ratio if necessary:
+ int32_t sarWidth, sarHeight;
+ if (inputFormat->findInt32("sar-width", &sarWidth)
+ && inputFormat->findInt32("sar-height", &sarHeight)) {
+ ALOGV("Sample aspect ratio %d : %d", sarWidth, sarHeight);
- if (mediaTimeUs < skipUntilMediaTimeUs) {
- ALOGV("dropping %s buffer at time %lld as requested.",
- audio ? "audio" : "video",
- mediaTimeUs);
+ displayWidth = (displayWidth * sarWidth) / sarHeight;
- reply->post();
- return;
- }
+ ALOGV("display dimensions %d x %d", displayWidth, displayHeight);
+ }
- skipUntilMediaTimeUs = -1;
+ int32_t rotationDegrees;
+ if (!inputFormat->findInt32("rotation-degrees", &rotationDegrees)) {
+ rotationDegrees = 0;
}
- mRenderer->queueBuffer(audio, buffer, reply);
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ int32_t tmp = displayWidth;
+ displayWidth = displayHeight;
+ displayHeight = tmp;
+ }
+
+ notifyListener(
+ MEDIA_SET_VIDEO_SIZE,
+ displayWidth,
+ displayHeight);
}
void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) {
@@ -1126,59 +1373,50 @@ void NuPlayer::flushDecoder(bool audio, bool needShutdown) {
ALOGV("[%s] flushDecoder needShutdown=%d",
audio ? "audio" : "video", needShutdown);
- if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) {
+ const sp<DecoderBase> &decoder = getDecoder(audio);
+ if (decoder == NULL) {
ALOGI("flushDecoder %s without decoder present",
audio ? "audio" : "video");
+ return;
}
// Make sure we don't continue to scan sources until we finish flushing.
++mScanSourcesGeneration;
mScanSourcesPending = false;
- (audio ? mAudioDecoder : mVideoDecoder)->signalFlush();
- mRenderer->flush(audio);
+ decoder->signalFlush();
FlushStatus newStatus =
needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER;
+ mFlushComplete[audio][false /* isDecoder */] = (mRenderer == NULL);
+ mFlushComplete[audio][true /* isDecoder */] = false;
if (audio) {
- CHECK(mFlushingAudio == NONE
- || mFlushingAudio == AWAITING_DISCONTINUITY);
-
+ ALOGE_IF(mFlushingAudio != NONE,
+ "audio flushDecoder() is called in state %d", mFlushingAudio);
mFlushingAudio = newStatus;
-
- if (mFlushingVideo == NONE) {
- mFlushingVideo = (mVideoDecoder != NULL)
- ? AWAITING_DISCONTINUITY
- : FLUSHED;
- }
} else {
- CHECK(mFlushingVideo == NONE
- || mFlushingVideo == AWAITING_DISCONTINUITY);
-
+ ALOGE_IF(mFlushingVideo != NONE,
+ "video flushDecoder() is called in state %d", mFlushingVideo);
mFlushingVideo = newStatus;
-
- if (mFlushingAudio == NONE) {
- mFlushingAudio = (mAudioDecoder != NULL)
- ? AWAITING_DISCONTINUITY
- : FLUSHED;
- }
}
}
-sp<AMessage> NuPlayer::Source::getFormat(bool audio) {
- sp<MetaData> meta = getFormatMeta(audio);
+void NuPlayer::queueDecoderShutdown(
+ bool audio, bool video, const sp<AMessage> &reply) {
+ ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
- if (meta == NULL) {
- return NULL;
- }
+ mDeferredActions.push_back(
+ new FlushDecoderAction(
+ audio ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE,
+ video ? FLUSH_CMD_SHUTDOWN : FLUSH_CMD_NONE));
- sp<AMessage> msg = new AMessage;
+ mDeferredActions.push_back(
+ new SimpleAction(&NuPlayer::performScanSources));
- if(convertMetaDataToMessage(meta, &msg) == OK) {
- return msg;
- }
- return NULL;
+ mDeferredActions.push_back(new PostMessageAction(reply));
+
+ processDeferredActions();
}
status_t NuPlayer::setVideoScalingMode(int32_t mode) {
@@ -1204,17 +1442,62 @@ status_t NuPlayer::getTrackInfo(Parcel* reply) const {
return err;
}
-status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
+status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const {
+ sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id());
+ msg->setPointer("reply", reply);
+ msg->setInt32("type", type);
+
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+ return err;
+}
+
+status_t NuPlayer::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {
sp<AMessage> msg = new AMessage(kWhatSelectTrack, id());
msg->setSize("trackIndex", trackIndex);
msg->setInt32("select", select);
+ msg->setInt64("timeUs", timeUs);
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
return err;
}
+status_t NuPlayer::getCurrentPosition(int64_t *mediaUs) {
+ sp<Renderer> renderer = mRenderer;
+ if (renderer == NULL) {
+ return NO_INIT;
+ }
+
+ return renderer->getCurrentPosition(mediaUs);
+}
+
+void NuPlayer::getStats(int64_t *numFramesTotal, int64_t *numFramesDropped) {
+ sp<DecoderBase> decoder = getDecoder(false /* audio */);
+ if (decoder != NULL) {
+ decoder->getStats(numFramesTotal, numFramesDropped);
+ } else {
+ *numFramesTotal = 0;
+ *numFramesDropped = 0;
+ }
+}
+
+sp<MetaData> NuPlayer::getFileMeta() {
+ return mSource->getFileFormatMeta();
+}
+
void NuPlayer::schedulePollDuration() {
sp<AMessage> msg = new AMessage(kWhatPollDuration, id());
msg->setInt32("generation", mPollDurationGeneration);
@@ -1231,18 +1514,6 @@ void NuPlayer::processDeferredActions() {
// an intermediate state, i.e. one more more decoders are currently
// flushing or shutting down.
- if (mRenderer != NULL) {
- // There's an edge case where the renderer owns all output
- // buffers and is paused, therefore the decoder will not read
- // more input data and will never encounter the matching
- // discontinuity. To avoid this, we resume the renderer.
-
- if (mFlushingAudio == AWAITING_DISCONTINUITY
- || mFlushingVideo == AWAITING_DISCONTINUITY) {
- mRenderer->resume();
- }
- }
-
if (mFlushingAudio != NONE || mFlushingVideo != NONE) {
// We're currently flushing, postpone the reset until that's
// completed.
@@ -1260,66 +1531,40 @@ void NuPlayer::processDeferredActions() {
}
}
-void NuPlayer::performSeek(int64_t seekTimeUs) {
- ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)",
+void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) {
+ ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), needNotify(%d)",
seekTimeUs,
- seekTimeUs / 1E6);
-
- mSource->seekTo(seekTimeUs);
-
- if (mDriver != NULL) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
- driver->notifyPosition(seekTimeUs);
- driver->notifySeekComplete();
- }
- }
-
- // everything's flushed, continue playback.
-}
-
-void NuPlayer::performDecoderFlush() {
- ALOGV("performDecoderFlush");
-
- if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
+ seekTimeUs / 1E6,
+ needNotify);
+
+ if (mSource == NULL) {
+ // This happens when reset occurs right before the loop mode
+ // asynchronously seeks to the start of the stream.
+ LOG_ALWAYS_FATAL_IF(mAudioDecoder != NULL || mVideoDecoder != NULL,
+ "mSource is NULL and decoders not NULL audio(%p) video(%p)",
+ mAudioDecoder.get(), mVideoDecoder.get());
return;
}
+ mSource->seekTo(seekTimeUs);
+ ++mTimedTextGeneration;
- mTimeDiscontinuityPending = true;
-
- if (mAudioDecoder != NULL) {
- flushDecoder(true /* audio */, false /* needShutdown */);
- }
-
- if (mVideoDecoder != NULL) {
- flushDecoder(false /* audio */, false /* needShutdown */);
- }
+ // everything's flushed, continue playback.
}
-void NuPlayer::performDecoderShutdown(bool audio, bool video) {
- ALOGV("performDecoderShutdown audio=%d, video=%d", audio, video);
+void NuPlayer::performDecoderFlush(FlushCommand audio, FlushCommand video) {
+ ALOGV("performDecoderFlush audio=%d, video=%d", audio, video);
- if ((!audio || mAudioDecoder == NULL)
- && (!video || mVideoDecoder == NULL)) {
+ if ((audio == FLUSH_CMD_NONE || mAudioDecoder == NULL)
+ && (video == FLUSH_CMD_NONE || mVideoDecoder == NULL)) {
return;
}
- mTimeDiscontinuityPending = true;
-
- if (mFlushingAudio == NONE && (!audio || mAudioDecoder == NULL)) {
- mFlushingAudio = FLUSHED;
+ if (audio != FLUSH_CMD_NONE && mAudioDecoder != NULL) {
+ flushDecoder(true /* audio */, (audio == FLUSH_CMD_SHUTDOWN));
}
- if (mFlushingVideo == NONE && (!video || mVideoDecoder == NULL)) {
- mFlushingVideo = FLUSHED;
- }
-
- if (audio && mAudioDecoder != NULL) {
- flushDecoder(true /* audio */, true /* needShutdown */);
- }
-
- if (video && mVideoDecoder != NULL) {
- flushDecoder(false /* audio */, true /* needShutdown */);
+ if (video != FLUSH_CMD_NONE && mVideoDecoder != NULL) {
+ flushDecoder(false /* audio */, (video == FLUSH_CMD_SHUTDOWN));
}
}
@@ -1334,13 +1579,19 @@ void NuPlayer::performReset() {
++mScanSourcesGeneration;
mScanSourcesPending = false;
+ if (mRendererLooper != NULL) {
+ if (mRenderer != NULL) {
+ mRendererLooper->unregisterHandler(mRenderer->id());
+ }
+ mRendererLooper->stop();
+ mRendererLooper.clear();
+ }
mRenderer.clear();
+ ++mRendererGeneration;
if (mSource != NULL) {
mSource->stop();
- looper()->unregisterHandler(mSource->id());
-
mSource.clear();
}
@@ -1382,11 +1633,64 @@ void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) {
}
}
+void NuPlayer::performResumeDecoders(bool needNotify) {
+ if (needNotify) {
+ mResumePending = true;
+ if (mVideoDecoder == NULL) {
+ // if audio-only, we can notify seek complete now,
+ // as the resume operation will be relatively fast.
+ finishResume();
+ }
+ }
+
+ if (mVideoDecoder != NULL) {
+ // When there is continuous seek, MediaPlayer will cache the seek
+ // position, and send down new seek request when previous seek is
+ // complete. Let's wait for at least one video output frame before
+ // notifying seek complete, so that the video thumbnail gets updated
+ // when seekbar is dragged.
+ mVideoDecoder->signalResume(needNotify);
+ }
+
+ if (mAudioDecoder != NULL) {
+ mAudioDecoder->signalResume(false /* needNotify */);
+ }
+}
+
+void NuPlayer::finishResume() {
+ if (mResumePending) {
+ mResumePending = false;
+ if (mDriver != NULL) {
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver != NULL) {
+ driver->notifySeekComplete();
+ }
+ }
+ }
+}
+
void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
int32_t what;
CHECK(msg->findInt32("what", &what));
switch (what) {
+ case Source::kWhatInstantiateSecureDecoders:
+ {
+ if (mSource == NULL) {
+ // This is a stale notification from a source that was
+ // asynchronously preparing when the client called reset().
+ // We handled the reset, the source is gone.
+ break;
+ }
+
+ sp<AMessage> reply;
+ CHECK(msg->findMessage("reply", &reply));
+ status_t err = onInstantiateSecureDecoders();
+ reply->setInt32("err", err);
+ reply->post();
+ break;
+ }
+
case Source::kWhatPrepared:
{
if (mSource == NULL) {
@@ -1399,18 +1703,25 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
int32_t err;
CHECK(msg->findInt32("err", &err));
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
- driver->notifyPrepareCompleted(err);
+ if (err != OK) {
+ // shut down potential secure codecs in case client never calls reset
+ mDeferredActions.push_back(
+ new FlushDecoderAction(FLUSH_CMD_SHUTDOWN /* audio */,
+ FLUSH_CMD_SHUTDOWN /* video */));
+ processDeferredActions();
}
- int64_t durationUs;
- if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver != NULL) {
+ // notify duration first, so that it's definitely set when
+ // the app received the "prepare complete" callback.
+ int64_t durationUs;
+ if (mSource->getDuration(&durationUs) == OK) {
driver->notifyDuration(durationUs);
}
+ driver->notifyPrepareCompleted(err);
}
+
break;
}
@@ -1421,6 +1732,10 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
+ if ((flags & NuPlayer::Source::FLAG_CAN_SEEK) == 0) {
+ driver->notifyListener(
+ MEDIA_INFO, MEDIA_INFO_NOT_SEEKABLE, 0);
+ }
driver->notifyFlagsChanged(flags);
}
@@ -1439,46 +1754,104 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
case Source::kWhatVideoSizeChanged:
{
- int32_t width, height;
- CHECK(msg->findInt32("width", &width));
- CHECK(msg->findInt32("height", &height));
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+
+ updateVideoSize(format);
+ break;
+ }
+
+ case Source::kWhatBufferingUpdate:
+ {
+ int32_t percentage;
+ CHECK(msg->findInt32("percentage", &percentage));
- notifyListener(MEDIA_SET_VIDEO_SIZE, width, height);
+ notifyListener(MEDIA_BUFFERING_UPDATE, percentage, 0);
break;
}
+ case Source::kWhatPauseOnBufferingStart:
+ {
+ // ignore if not playing
+ if (mStarted && !mPausedByClient) {
+ ALOGI("buffer low, pausing...");
+
+ onPause();
+ }
+ // fall-thru
+ }
+
case Source::kWhatBufferingStart:
{
notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0);
break;
}
+ case Source::kWhatResumeOnBufferingEnd:
+ {
+ // ignore if not playing
+ if (mStarted && !mPausedByClient) {
+ ALOGI("buffer ready, resuming...");
+
+ onResume();
+ }
+ // fall-thru
+ }
+
case Source::kWhatBufferingEnd:
{
notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0);
break;
}
+ case Source::kWhatCacheStats:
+ {
+ int32_t kbps;
+ CHECK(msg->findInt32("bandwidth", &kbps));
+
+ notifyListener(MEDIA_INFO, MEDIA_INFO_NETWORK_BANDWIDTH, kbps);
+ break;
+ }
+
case Source::kWhatSubtitleData:
{
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
- int32_t trackIndex;
- int64_t timeUs, durationUs;
- CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+ sendSubtitleData(buffer, 0 /* baseIndex */);
+ break;
+ }
+
+ case Source::kWhatTimedTextData:
+ {
+ int32_t generation;
+ if (msg->findInt32("generation", &generation)
+ && generation != mTimedTextGeneration) {
+ break;
+ }
+
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
+
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver == NULL) {
+ break;
+ }
- Parcel in;
- in.writeInt32(trackIndex);
- in.writeInt64(timeUs);
- in.writeInt64(durationUs);
- in.writeInt32(buffer->size());
- in.writeInt32(buffer->size());
- in.write(buffer->data(), buffer->size());
+ int posMs;
+ int64_t timeUs, posUs;
+ driver->getCurrentPosition(&posMs);
+ posUs = posMs * 1000;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+ if (posUs < timeUs) {
+ if (!msg->findInt32("generation", &generation)) {
+ msg->setInt32("generation", mTimedTextGeneration);
+ }
+ msg->post(timeUs - posUs);
+ } else {
+ sendTimedTextData(buffer);
+ }
break;
}
@@ -1495,13 +1868,112 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
break;
}
+ case Source::kWhatDrmNoLicense:
+ {
+ notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void NuPlayer::onClosedCaptionNotify(const sp<AMessage> &msg) {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ switch (what) {
+ case NuPlayer::CCDecoder::kWhatClosedCaptionData:
+ {
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
+
+ size_t inbandTracks = 0;
+ if (mSource != NULL) {
+ inbandTracks = mSource->getTrackCount();
+ }
+
+ sendSubtitleData(buffer, inbandTracks);
+ break;
+ }
+
+ case NuPlayer::CCDecoder::kWhatTrackAdded:
+ {
+ notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0);
+
+ break;
+ }
+
default:
TRESPASS();
}
+
+
}
+void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {
+ int32_t trackIndex;
+ int64_t timeUs, durationUs;
+ CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+
+ Parcel in;
+ in.writeInt32(trackIndex + baseIndex);
+ in.writeInt64(timeUs);
+ in.writeInt64(durationUs);
+ in.writeInt32(buffer->size());
+ in.writeInt32(buffer->size());
+ in.write(buffer->data(), buffer->size());
+
+ notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+}
+
+void NuPlayer::sendTimedTextData(const sp<ABuffer> &buffer) {
+ const void *data;
+ size_t size = 0;
+ int64_t timeUs;
+ int32_t flag = TextDescriptions::LOCAL_DESCRIPTIONS;
+
+ AString mime;
+ CHECK(buffer->meta()->findString("mime", &mime));
+ CHECK(strcasecmp(mime.c_str(), MEDIA_MIMETYPE_TEXT_3GPP) == 0);
+
+ data = buffer->data();
+ size = buffer->size();
+
+ Parcel parcel;
+ if (size > 0) {
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ flag |= TextDescriptions::IN_BAND_TEXT_3GPP;
+ TextDescriptions::getParcelOfDescriptions(
+ (const uint8_t *)data, size, flag, timeUs / 1000, &parcel);
+ }
+
+ if ((parcel.dataSize() > 0)) {
+ notifyListener(MEDIA_TIMED_TEXT, 0, 0, &parcel);
+ } else { // send an empty timed text
+ notifyListener(MEDIA_TIMED_TEXT, 0, 0);
+ }
+}
////////////////////////////////////////////////////////////////////////////////
+sp<AMessage> NuPlayer::Source::getFormat(bool audio) {
+ sp<MetaData> meta = getFormatMeta(audio);
+
+ if (meta == NULL) {
+ return NULL;
+ }
+
+ sp<AMessage> msg = new AMessage;
+
+ if(convertMetaDataToMessage(meta, &msg) == OK) {
+ return msg;
+ }
+ return NULL;
+}
+
void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) {
sp<AMessage> notify = dupNotify();
notify->setInt32("what", kWhatFlagsChanged);
@@ -1509,11 +1981,10 @@ void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) {
notify->post();
}
-void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) {
+void NuPlayer::Source::notifyVideoSizeChanged(const sp<AMessage> &format) {
sp<AMessage> notify = dupNotify();
notify->setInt32("what", kWhatVideoSizeChanged);
- notify->setInt32("width", width);
- notify->setInt32("height", height);
+ notify->setMessage("format", format);
notify->post();
}
@@ -1524,23 +1995,15 @@ void NuPlayer::Source::notifyPrepared(status_t err) {
notify->post();
}
-void NuPlayer::Source::onMessageReceived(const sp<AMessage> &msg) {
- TRESPASS();
+void NuPlayer::Source::notifyInstantiateSecureDecoders(const sp<AMessage> &reply) {
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatInstantiateSecureDecoders);
+ notify->setMessage("reply", reply);
+ notify->post();
}
-void NuPlayer::queueDecoderShutdown(
- bool audio, bool video, const sp<AMessage> &reply) {
- ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video);
-
- mDeferredActions.push_back(
- new ShutdownDecoderAction(audio, video));
-
- mDeferredActions.push_back(
- new SimpleAction(&NuPlayer::performScanSources));
-
- mDeferredActions.push_back(new PostMessageAction(reply));
-
- processDeferredActions();
+void NuPlayer::Source::onMessageReceived(const sp<AMessage> & /* msg */) {
+ TRESPASS();
}
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 590e1f2..30ede1a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -24,8 +24,9 @@
namespace android {
-struct ACodec;
-struct MetaData;
+struct ABuffer;
+struct AMessage;
+class MetaData;
struct NuPlayerDriver;
struct NuPlayer : public AHandler {
@@ -38,7 +39,9 @@ struct NuPlayer : public AHandler {
void setDataSourceAsync(const sp<IStreamSource> &source);
void setDataSourceAsync(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
void setDataSourceAsync(int fd, int64_t offset, int64_t length);
@@ -51,17 +54,22 @@ struct NuPlayer : public AHandler {
void start();
void pause();
- void resume();
// Will notify the driver through "notifyResetComplete" once finished.
void resetAsync();
- // Will notify the driver through "notifySeekComplete" once finished.
- void seekToAsync(int64_t seekTimeUs);
+ // Will notify the driver through "notifySeekComplete" once finished
+ // and needNotify is true.
+ void seekToAsync(int64_t seekTimeUs, bool needNotify = false);
status_t setVideoScalingMode(int32_t mode);
status_t getTrackInfo(Parcel* reply) const;
- status_t selectTrack(size_t trackIndex, bool select);
+ status_t getSelectedTrack(int32_t type, Parcel* reply) const;
+ status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
+ status_t getCurrentPosition(int64_t *mediaUs);
+ void getStats(int64_t *mNumFramesTotal, int64_t *mNumFramesDropped);
+
+ sp<MetaData> getFileMeta();
protected:
virtual ~NuPlayer();
@@ -74,6 +82,9 @@ public:
private:
struct Decoder;
+ struct DecoderBase;
+ struct DecoderPassThrough;
+ struct CCDecoder;
struct GenericSource;
struct HTTPLiveSource;
struct Renderer;
@@ -82,7 +93,8 @@ private:
struct Action;
struct SeekAction;
struct SetSurfaceAction;
- struct ShutdownDecoderAction;
+ struct ResumeDecoderAction;
+ struct FlushDecoderAction;
struct PostMessageAction;
struct SimpleAction;
@@ -96,6 +108,7 @@ private:
kWhatScanSources = 'scan',
kWhatVideoNotify = 'vidN',
kWhatAudioNotify = 'audN',
+ kWhatClosedCaptionNotify = 'capN',
kWhatRendererNotify = 'renN',
kWhatReset = 'rset',
kWhatSeek = 'seek',
@@ -104,6 +117,7 @@ private:
kWhatPollDuration = 'polD',
kWhatSourceNotify = 'srcN',
kWhatGetTrackInfo = 'gTrI',
+ kWhatGetSelectedTrack = 'gSel',
kWhatSelectTrack = 'selT',
};
@@ -114,11 +128,15 @@ private:
uint32_t mSourceFlags;
sp<NativeWindowWrapper> mNativeWindow;
sp<MediaPlayerBase::AudioSink> mAudioSink;
- sp<Decoder> mVideoDecoder;
- bool mVideoIsAVC;
- bool mNeedsSwRenderer;
- sp<Decoder> mAudioDecoder;
+ sp<DecoderBase> mVideoDecoder;
+ bool mOffloadAudio;
+ sp<DecoderBase> mAudioDecoder;
+ sp<CCDecoder> mCCDecoder;
sp<Renderer> mRenderer;
+ sp<ALooper> mRendererLooper;
+ int32_t mAudioDecoderGeneration;
+ int32_t mVideoDecoderGeneration;
+ int32_t mRendererGeneration;
List<sp<Action> > mDeferredActions;
@@ -129,10 +147,10 @@ private:
int32_t mScanSourcesGeneration;
int32_t mPollDurationGeneration;
+ int32_t mTimedTextGeneration;
enum FlushStatus {
NONE,
- AWAITING_DISCONTINUITY,
FLUSHING_DECODER,
FLUSHING_DECODER_SHUTDOWN,
SHUTTING_DOWN_DECODER,
@@ -140,35 +158,69 @@ private:
SHUT_DOWN,
};
- // Once the current flush is complete this indicates whether the
- // notion of time has changed.
- bool mTimeDiscontinuityPending;
+ enum FlushCommand {
+ FLUSH_CMD_NONE,
+ FLUSH_CMD_FLUSH,
+ FLUSH_CMD_SHUTDOWN,
+ };
+
+ // Status of flush responses from the decoder and renderer.
+ bool mFlushComplete[2][2];
FlushStatus mFlushingAudio;
FlushStatus mFlushingVideo;
- int64_t mSkipRenderingAudioUntilMediaTimeUs;
- int64_t mSkipRenderingVideoUntilMediaTimeUs;
-
- int64_t mVideoLateByUs;
- int64_t mNumFramesTotal, mNumFramesDropped;
+ // Status of flush responses from the decoder and renderer.
+ bool mResumePending;
int32_t mVideoScalingMode;
bool mStarted;
- status_t instantiateDecoder(bool audio, sp<Decoder> *decoder);
+ // Actual pause state, either as requested by client or due to buffering.
+ bool mPaused;
+
+ // Pause state as requested by client. Note that if mPausedByClient is
+ // true, mPaused is always true; if mPausedByClient is false, mPaused could
+ // still become true, when we pause internally due to buffering.
+ bool mPausedByClient;
+
+ inline const sp<DecoderBase> &getDecoder(bool audio) {
+ return audio ? mAudioDecoder : mVideoDecoder;
+ }
+
+ inline void clearFlushComplete() {
+ mFlushComplete[0][0] = false;
+ mFlushComplete[0][1] = false;
+ mFlushComplete[1][0] = false;
+ mFlushComplete[1][1] = false;
+ }
- status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg);
- void renderBuffer(bool audio, const sp<AMessage> &msg);
+ void tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo);
+ void closeAudioSink();
+
+ status_t instantiateDecoder(bool audio, sp<DecoderBase> *decoder);
+
+ status_t onInstantiateSecureDecoders();
+
+ void updateVideoSize(
+ const sp<AMessage> &inputFormat,
+ const sp<AMessage> &outputFormat = NULL);
void notifyListener(int msg, int ext1, int ext2, const Parcel *in = NULL);
+ void handleFlushComplete(bool audio, bool isDecoder);
void finishFlushIfPossible();
+ void onStart();
+ void onResume();
+ void onPause();
+
+ bool audioDecoderStillNeeded();
+
void flushDecoder(bool audio, bool needShutdown);
- static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL);
+ void finishResume();
void postScanSources();
@@ -177,18 +229,24 @@ private:
void processDeferredActions();
- void performSeek(int64_t seekTimeUs);
- void performDecoderFlush();
- void performDecoderShutdown(bool audio, bool video);
+ void performSeek(int64_t seekTimeUs, bool needNotify);
+ void performDecoderFlush(FlushCommand audio, FlushCommand video);
void performReset();
void performScanSources();
void performSetSurface(const sp<NativeWindowWrapper> &wrapper);
+ void performResumeDecoders(bool needNotify);
void onSourceNotify(const sp<AMessage> &msg);
+ void onClosedCaptionNotify(const sp<AMessage> &msg);
void queueDecoderShutdown(
bool audio, bool video, const sp<AMessage> &reply);
+ void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
+ void sendTimedTextData(const sp<ABuffer> &buffer);
+
+ void writeTrackInfo(Parcel* reply, const sp<AMessage> format) const;
+
DISALLOW_EVIL_CONSTRUCTORS(NuPlayer);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
new file mode 100644
index 0000000..9229704
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp
@@ -0,0 +1,361 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerCCDecoder"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerCCDecoder.h"
+
+#include <media/stagefright/foundation/ABitReader.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+struct CCData {
+ CCData(uint8_t type, uint8_t data1, uint8_t data2)
+ : mType(type), mData1(data1), mData2(data2) {
+ }
+ bool getChannel(size_t *channel) const {
+ if (mData1 >= 0x10 && mData1 <= 0x1f) {
+ *channel = (mData1 >= 0x18 ? 1 : 0) + (mType ? 2 : 0);
+ return true;
+ }
+ return false;
+ }
+
+ uint8_t mType;
+ uint8_t mData1;
+ uint8_t mData2;
+};
+
+static bool isNullPad(CCData *cc) {
+ return cc->mData1 < 0x10 && cc->mData2 < 0x10;
+}
+
+static void dumpBytePair(const sp<ABuffer> &ccBuf) {
+ size_t offset = 0;
+ AString out;
+
+ while (offset < ccBuf->size()) {
+ char tmp[128];
+
+ CCData *cc = (CCData *) (ccBuf->data() + offset);
+
+ if (isNullPad(cc)) {
+ // 1 null pad or XDS metadata, ignore
+ offset += sizeof(CCData);
+ continue;
+ }
+
+ if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
+ // 2 basic chars
+ sprintf(tmp, "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
+ // 1 special char
+ sprintf(tmp, "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Spanish/French char
+ sprintf(tmp, "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Portuguese/German/Danish char
+ sprintf(tmp, "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
+ // Mid-Row Codes (Table 69)
+ sprintf(tmp, "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
+ ||
+ ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
+ && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
+ // Misc Control Codes (Table 70)
+ sprintf(tmp, "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 & 0x70) == 0x10
+ && (cc->mData2 & 0x40) == 0x40
+ && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
+ // Preamble Address Codes (Table 71)
+ sprintf(tmp, "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else {
+ sprintf(tmp, "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ }
+
+ if (out.size() > 0) {
+ out.append(", ");
+ }
+
+ out.append(tmp);
+
+ offset += sizeof(CCData);
+ }
+
+ ALOGI("%s", out.c_str());
+}
+
+NuPlayer::CCDecoder::CCDecoder(const sp<AMessage> &notify)
+ : mNotify(notify),
+ mCurrentChannel(0),
+ mSelectedTrack(-1) {
+ for (size_t i = 0; i < sizeof(mTrackIndices)/sizeof(mTrackIndices[0]); ++i) {
+ mTrackIndices[i] = -1;
+ }
+}
+
+size_t NuPlayer::CCDecoder::getTrackCount() const {
+ return mFoundChannels.size();
+}
+
+sp<AMessage> NuPlayer::CCDecoder::getTrackInfo(size_t index) const {
+ if (!isTrackValid(index)) {
+ return NULL;
+ }
+
+ sp<AMessage> format = new AMessage();
+
+ format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
+ format->setString("language", "und");
+ format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
+ //CC1, field 0 channel 0
+ bool isDefaultAuto = (mFoundChannels[index] == 0);
+ format->setInt32("auto", isDefaultAuto);
+ format->setInt32("default", isDefaultAuto);
+ format->setInt32("forced", 0);
+
+ return format;
+}
+
+status_t NuPlayer::CCDecoder::selectTrack(size_t index, bool select) {
+ if (!isTrackValid(index)) {
+ return BAD_VALUE;
+ }
+
+ if (select) {
+ if (mSelectedTrack == (ssize_t)index) {
+ ALOGE("track %zu already selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("selected track %zu", index);
+ mSelectedTrack = index;
+ } else {
+ if (mSelectedTrack != (ssize_t)index) {
+ ALOGE("track %zu is not selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("unselected track %zu", index);
+ mSelectedTrack = -1;
+ }
+
+ return OK;
+}
+
+bool NuPlayer::CCDecoder::isSelected() const {
+ return mSelectedTrack >= 0 && mSelectedTrack < (int32_t) getTrackCount();
+}
+
+bool NuPlayer::CCDecoder::isTrackValid(size_t index) const {
+ return index < getTrackCount();
+}
+
+int32_t NuPlayer::CCDecoder::getTrackIndex(size_t channel) const {
+ if (channel < sizeof(mTrackIndices)/sizeof(mTrackIndices[0])) {
+ return mTrackIndices[channel];
+ }
+ return -1;
+}
+
+// returns true if a new CC track is found
+bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ sp<ABuffer> sei;
+ if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
+ return false;
+ }
+
+ bool trackAdded = false;
+
+ NALBitReader br(sei->data() + 1, sei->size() - 1);
+ // sei_message()
+ while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()
+ uint32_t payload_type = 0;
+ size_t payload_size = 0;
+ uint8_t last_byte;
+
+ do {
+ last_byte = br.getBits(8);
+ payload_type += last_byte;
+ } while (last_byte == 0xFF);
+
+ do {
+ last_byte = br.getBits(8);
+ payload_size += last_byte;
+ } while (last_byte == 0xFF);
+
+ // sei_payload()
+ if (payload_type == 4) {
+ // user_data_registered_itu_t_t35()
+
+ // ATSC A/72: 6.4.2
+ uint8_t itu_t_t35_country_code = br.getBits(8);
+ uint16_t itu_t_t35_provider_code = br.getBits(16);
+ uint32_t user_identifier = br.getBits(32);
+ uint8_t user_data_type_code = br.getBits(8);
+
+ payload_size -= 1 + 2 + 4 + 1;
+
+ if (itu_t_t35_country_code == 0xB5
+ && itu_t_t35_provider_code == 0x0031
+ && user_identifier == 'GA94'
+ && user_data_type_code == 0x3) {
+ // MPEG_cc_data()
+ // ATSC A/53 Part 4: 6.2.3.1
+ br.skipBits(1); //process_em_data_flag
+ bool process_cc_data_flag = br.getBits(1);
+ br.skipBits(1); //additional_data_flag
+ size_t cc_count = br.getBits(5);
+ br.skipBits(8); // em_data;
+ payload_size -= 2;
+
+ if (process_cc_data_flag) {
+ AString out;
+
+ sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData));
+ ccBuf->setRange(0, 0);
+
+ for (size_t i = 0; i < cc_count; i++) {
+ uint8_t marker = br.getBits(5);
+ CHECK_EQ(marker, 0x1f);
+
+ bool cc_valid = br.getBits(1);
+ uint8_t cc_type = br.getBits(2);
+ // remove odd parity bit
+ uint8_t cc_data_1 = br.getBits(8) & 0x7f;
+ uint8_t cc_data_2 = br.getBits(8) & 0x7f;
+
+ if (cc_valid
+ && (cc_type == 0 || cc_type == 1)) {
+ CCData cc(cc_type, cc_data_1, cc_data_2);
+ if (!isNullPad(&cc)) {
+ size_t channel;
+ if (cc.getChannel(&channel) && getTrackIndex(channel) < 0) {
+ mTrackIndices[channel] = mFoundChannels.size();
+ mFoundChannels.push_back(channel);
+ trackAdded = true;
+ }
+ memcpy(ccBuf->data() + ccBuf->size(),
+ (void *)&cc, sizeof(cc));
+ ccBuf->setRange(0, ccBuf->size() + sizeof(CCData));
+ }
+ }
+ }
+ payload_size -= cc_count * 3;
+
+ mCCMap.add(timeUs, ccBuf);
+ break;
+ }
+ } else {
+ ALOGV("Malformed SEI payload type 4");
+ }
+ } else {
+ ALOGV("Unsupported SEI payload type %d", payload_type);
+ }
+
+ // skipping remaining bits of this payload
+ br.skipBits(payload_size * 8);
+ }
+
+ return trackAdded;
+}
+
+sp<ABuffer> NuPlayer::CCDecoder::filterCCBuf(
+ const sp<ABuffer> &ccBuf, size_t index) {
+ sp<ABuffer> filteredCCBuf = new ABuffer(ccBuf->size());
+ filteredCCBuf->setRange(0, 0);
+
+ size_t cc_count = ccBuf->size() / sizeof(CCData);
+ const CCData* cc_data = (const CCData*)ccBuf->data();
+ for (size_t i = 0; i < cc_count; ++i) {
+ size_t channel;
+ if (cc_data[i].getChannel(&channel)) {
+ mCurrentChannel = channel;
+ }
+ if (mCurrentChannel == mFoundChannels[index]) {
+ memcpy(filteredCCBuf->data() + filteredCCBuf->size(),
+ (void *)&cc_data[i], sizeof(CCData));
+ filteredCCBuf->setRange(0, filteredCCBuf->size() + sizeof(CCData));
+ }
+ }
+
+ return filteredCCBuf;
+}
+
+void NuPlayer::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
+ if (extractFromSEI(accessUnit)) {
+ ALOGI("Found CEA-608 track");
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTrackAdded);
+ msg->post();
+ }
+ // TODO: extract CC from other sources
+}
+
+void NuPlayer::CCDecoder::display(int64_t timeUs) {
+ if (!isTrackValid(mSelectedTrack)) {
+ ALOGE("Could not find current track(index=%d)", mSelectedTrack);
+ return;
+ }
+
+ ssize_t index = mCCMap.indexOfKey(timeUs);
+ if (index < 0) {
+ ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
+ return;
+ }
+
+ sp<ABuffer> ccBuf = filterCCBuf(mCCMap.valueAt(index), mSelectedTrack);
+
+ if (ccBuf->size() > 0) {
+#if 0
+ dumpBytePair(ccBuf);
+#endif
+
+ ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
+ ccBuf->meta()->setInt64("timeUs", timeUs);
+ ccBuf->meta()->setInt64("durationUs", 0ll);
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatClosedCaptionData);
+ msg->setBuffer("buffer", ccBuf);
+ msg->post();
+ }
+
+ // remove all entries before timeUs
+ mCCMap.removeItemsAt(0, index + 1);
+}
+
+void NuPlayer::CCDecoder::flush() {
+ mCCMap.clear();
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h
new file mode 100644
index 0000000..5e06f4e
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_CCDECODER_H_
+
+#define NUPLAYER_CCDECODER_H_
+
+#include "NuPlayer.h"
+
+namespace android {
+
+struct NuPlayer::CCDecoder : public RefBase {
+ enum {
+ kWhatClosedCaptionData,
+ kWhatTrackAdded,
+ };
+
+ CCDecoder(const sp<AMessage> &notify);
+
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
+ status_t selectTrack(size_t index, bool select);
+ bool isSelected() const;
+ void decode(const sp<ABuffer> &accessUnit);
+ void display(int64_t timeUs);
+ void flush();
+
+private:
+ sp<AMessage> mNotify;
+ KeyedVector<int64_t, sp<ABuffer> > mCCMap;
+ size_t mCurrentChannel;
+ int32_t mSelectedTrack;
+ int32_t mTrackIndices[4];
+ Vector<size_t> mFoundChannels;
+
+ bool isTrackValid(size_t index) const;
+ int32_t getTrackIndex(size_t channel) const;
+ bool extractFromSEI(const sp<ABuffer> &accessUnit);
+ sp<ABuffer> filterCCBuf(const sp<ABuffer> &ccBuf, size_t index);
+
+ DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+};
+
+} // namespace android
+
+#endif // NUPLAYER_CCDECODER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 22f699e..5d98d98 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,133 +17,885 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "NuPlayerDecoder"
#include <utils/Log.h>
+#include <inttypes.h>
+#include "NuPlayerCCDecoder.h"
#include "NuPlayerDecoder.h"
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "avc_utils.h"
+#include "ATSParser.h"
namespace android {
NuPlayer::Decoder::Decoder(
const sp<AMessage> &notify,
- const sp<NativeWindowWrapper> &nativeWindow)
- : mNotify(notify),
- mNativeWindow(nativeWindow) {
+ const sp<Source> &source,
+ const sp<Renderer> &renderer,
+ const sp<NativeWindowWrapper> &nativeWindow,
+ const sp<CCDecoder> &ccDecoder)
+ : DecoderBase(notify),
+ mNativeWindow(nativeWindow),
+ mSource(source),
+ mRenderer(renderer),
+ mCCDecoder(ccDecoder),
+ mSkipRenderingUntilMediaTimeUs(-1ll),
+ mNumFramesTotal(0ll),
+ mNumFramesDropped(0ll),
+ mIsAudio(true),
+ mIsVideoAVC(false),
+ mIsSecure(false),
+ mFormatChangePending(false),
+ mPaused(true),
+ mResumePending(false),
+ mComponentName("decoder") {
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("NPDecoder-CL");
+ mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
NuPlayer::Decoder::~Decoder() {
+ releaseAndResetMediaBuffers();
+}
+
+void NuPlayer::Decoder::getStats(
+ int64_t *numFramesTotal,
+ int64_t *numFramesDropped) const {
+ *numFramesTotal = mNumFramesTotal;
+ *numFramesDropped = mNumFramesDropped;
}
-void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
+void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+ ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
+
+ switch (msg->what()) {
+ case kWhatCodecNotify:
+ {
+ if (!isStaleReply(msg)) {
+ int32_t numInput, numOutput;
+
+ if (!msg->findInt32("input-buffers", &numInput)) {
+ numInput = INT32_MAX;
+ }
+
+ if (!msg->findInt32("output-buffers", &numOutput)) {
+ numOutput = INT32_MAX;
+ }
+
+ if (!mPaused) {
+ while (numInput-- > 0 && handleAnInputBuffer()) {}
+ }
+
+ while (numOutput-- > 0 && handleAnOutputBuffer()) {}
+ }
+
+ requestCodecNotification();
+ break;
+ }
+
+ case kWhatRenderBuffer:
+ {
+ if (!isStaleReply(msg)) {
+ onRenderBuffer(msg);
+ }
+ break;
+ }
+
+ default:
+ DecoderBase::onMessageReceived(msg);
+ break;
+ }
+}
+
+void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
CHECK(mCodec == NULL);
+ mFormatChangePending = false;
+
+ ++mBufferGeneration;
+
AString mime;
CHECK(format->findString("mime", &mime));
- sp<AMessage> notifyMsg =
- new AMessage(kWhatCodecNotify, id());
+ mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
+ mIsVideoAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
- mCSDIndex = 0;
- for (size_t i = 0;; ++i) {
- sp<ABuffer> csd;
- if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
+ sp<Surface> surface = NULL;
+ if (mNativeWindow != NULL) {
+ surface = mNativeWindow->getSurfaceTextureClient();
+ }
+
+ mComponentName = mime;
+ mComponentName.append(" decoder");
+ ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), surface.get());
+
+ mCodec = MediaCodec::CreateByType(mCodecLooper, mime.c_str(), false /* encoder */);
+ int32_t secure = 0;
+ if (format->findInt32("secure", &secure) && secure != 0) {
+ if (mCodec != NULL) {
+ mCodec->getName(&mComponentName);
+ mComponentName.append(".secure");
+ mCodec->release();
+ ALOGI("[%s] creating", mComponentName.c_str());
+ mCodec = MediaCodec::CreateByComponentName(
+ mCodecLooper, mComponentName.c_str());
+ }
+ }
+ if (mCodec == NULL) {
+ ALOGE("Failed to create %s%s decoder",
+ (secure ? "secure " : ""), mime.c_str());
+ handleError(UNKNOWN_ERROR);
+ return;
+ }
+ mIsSecure = secure;
+
+ mCodec->getName(&mComponentName);
+
+ status_t err;
+ if (mNativeWindow != NULL) {
+ // disconnect from surface as MediaCodec will reconnect
+ err = native_window_api_disconnect(
+ surface.get(), NATIVE_WINDOW_API_MEDIA);
+ // We treat this as a warning, as this is a preparatory step.
+ // Codec will try to connect to the surface, which is where
+ // any error signaling will occur.
+ ALOGW_IF(err != OK, "failed to disconnect from surface: %d", err);
+ }
+ err = mCodec->configure(
+ format, surface, NULL /* crypto */, 0 /* flags */);
+ if (err != OK) {
+ ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
+ mCodec->release();
+ mCodec.clear();
+ handleError(err);
+ return;
+ }
+ rememberCodecSpecificData(format);
+
+ // the following should work in configured state
+ CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
+ CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));
+
+ err = mCodec->start();
+ if (err != OK) {
+ ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
+ mCodec->release();
+ mCodec.clear();
+ handleError(err);
+ return;
+ }
+
+ // the following should work after start
+ CHECK_EQ((status_t)OK, mCodec->getInputBuffers(&mInputBuffers));
+ releaseAndResetMediaBuffers();
+ CHECK_EQ((status_t)OK, mCodec->getOutputBuffers(&mOutputBuffers));
+ ALOGV("[%s] got %zu input and %zu output buffers",
+ mComponentName.c_str(),
+ mInputBuffers.size(),
+ mOutputBuffers.size());
+
+ if (mRenderer != NULL) {
+ requestCodecNotification();
+ }
+ mPaused = false;
+ mResumePending = false;
+}
+
+void NuPlayer::Decoder::onSetRenderer(const sp<Renderer> &renderer) {
+ bool hadNoRenderer = (mRenderer == NULL);
+ mRenderer = renderer;
+ if (hadNoRenderer && mRenderer != NULL) {
+ requestCodecNotification();
+ }
+}
+
+void NuPlayer::Decoder::onGetInputBuffers(
+ Vector<sp<ABuffer> > *dstBuffers) {
+ dstBuffers->clear();
+ for (size_t i = 0; i < mInputBuffers.size(); i++) {
+ dstBuffers->push(mInputBuffers[i]);
+ }
+}
+
+void NuPlayer::Decoder::onResume(bool notifyComplete) {
+ mPaused = false;
+
+ if (notifyComplete) {
+ mResumePending = true;
+ }
+}
+
+void NuPlayer::Decoder::onFlush(bool notifyComplete) {
+ if (mCCDecoder != NULL) {
+ mCCDecoder->flush();
+ }
+
+ if (mRenderer != NULL) {
+ mRenderer->flush(mIsAudio, notifyComplete);
+ mRenderer->signalTimeDiscontinuity();
+ }
+
+ status_t err = OK;
+ if (mCodec != NULL) {
+ err = mCodec->flush();
+ mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
+ ++mBufferGeneration;
+ }
+
+ if (err != OK) {
+ ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ // finish with posting kWhatFlushCompleted.
+ // we attempt to release the buffers even if flush fails.
+ }
+ releaseAndResetMediaBuffers();
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ mPaused = true;
+ }
+}
+
+void NuPlayer::Decoder::onShutdown(bool notifyComplete) {
+ status_t err = OK;
+
+ // if there is a pending resume request, notify complete now
+ notifyResumeCompleteIfNecessary();
+
+ if (mCodec != NULL) {
+ err = mCodec->release();
+ mCodec = NULL;
+ ++mBufferGeneration;
+
+ if (mNativeWindow != NULL) {
+ // reconnect to surface as MediaCodec disconnected from it
+ status_t error =
+ native_window_api_connect(
+ mNativeWindow->getNativeWindow().get(),
+ NATIVE_WINDOW_API_MEDIA);
+ ALOGW_IF(error != NO_ERROR,
+ "[%s] failed to connect to native window, error=%d",
+ mComponentName.c_str(), error);
+ }
+ mComponentName = "decoder";
+ }
+
+ releaseAndResetMediaBuffers();
+
+ if (err != OK) {
+ ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ // finish with posting kWhatShutdownCompleted.
+ }
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ mPaused = true;
+ }
+}
+
+void NuPlayer::Decoder::doRequestBuffers() {
+ if (mFormatChangePending) {
+ return;
+ }
+ status_t err = OK;
+ while (!mDequeuedInputBuffers.empty()) {
+ size_t bufferIx = *mDequeuedInputBuffers.begin();
+ sp<AMessage> msg = new AMessage();
+ msg->setSize("buffer-ix", bufferIx);
+ err = fetchInputData(msg);
+ if (err != OK) {
break;
}
+ mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin());
- mCSD.push(csd);
+ if (!mPendingInputMessages.empty()
+ || !onInputBufferFetched(msg)) {
+ mPendingInputMessages.push_back(msg);
+ }
}
- if (mNativeWindow != NULL) {
- format->setObject("native-window", mNativeWindow);
+ if (err == -EWOULDBLOCK
+ && mSource->feedMoreTSData() == OK) {
+ scheduleRequestBuffers();
+ }
+}
+
+bool NuPlayer::Decoder::handleAnInputBuffer() {
+ if (mFormatChangePending) {
+ return false;
+ }
+ size_t bufferIx = -1;
+ status_t res = mCodec->dequeueInputBuffer(&bufferIx);
+ ALOGV("[%s] dequeued input: %d",
+ mComponentName.c_str(), res == OK ? (int)bufferIx : res);
+ if (res != OK) {
+ if (res != -EAGAIN) {
+ ALOGE("Failed to dequeue input buffer for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ }
+ return false;
+ }
+
+ CHECK_LT(bufferIx, mInputBuffers.size());
+
+ if (mMediaBuffers[bufferIx] != NULL) {
+ mMediaBuffers[bufferIx]->release();
+ mMediaBuffers.editItemAt(bufferIx) = NULL;
}
+ mInputBufferIsDequeued.editItemAt(bufferIx) = true;
- // Current video decoders do not return from OMX_FillThisBuffer
- // quickly, violating the OpenMAX specs, until that is remedied
- // we need to invest in an extra looper to free the main event
- // queue.
- bool needDedicatedLooper = !strncasecmp(mime.c_str(), "video/", 6);
+ if (!mCSDsToSubmit.isEmpty()) {
+ sp<AMessage> msg = new AMessage();
+ msg->setSize("buffer-ix", bufferIx);
- mCodec = new ACodec;
+ sp<ABuffer> buffer = mCSDsToSubmit.itemAt(0);
+ ALOGI("[%s] resubmitting CSD", mComponentName.c_str());
+ msg->setBuffer("buffer", buffer);
+ mCSDsToSubmit.removeAt(0);
+ CHECK(onInputBufferFetched(msg));
+ return true;
+ }
- if (needDedicatedLooper && mCodecLooper == NULL) {
- mCodecLooper = new ALooper;
- mCodecLooper->setName("NuPlayerDecoder");
- mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ while (!mPendingInputMessages.empty()) {
+ sp<AMessage> msg = *mPendingInputMessages.begin();
+ if (!onInputBufferFetched(msg)) {
+ break;
+ }
+ mPendingInputMessages.erase(mPendingInputMessages.begin());
}
- (needDedicatedLooper ? mCodecLooper : looper())->registerHandler(mCodec);
+ if (!mInputBufferIsDequeued.editItemAt(bufferIx)) {
+ return true;
+ }
- mCodec->setNotificationMessage(notifyMsg);
- mCodec->initiateSetup(format);
+ mDequeuedInputBuffers.push_back(bufferIx);
+
+ onRequestInputBuffers();
+ return true;
}
-void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatCodecNotify:
- {
- int32_t what;
- CHECK(msg->findInt32("what", &what));
+bool NuPlayer::Decoder::handleAnOutputBuffer() {
+ if (mFormatChangePending) {
+ return false;
+ }
+ size_t bufferIx = -1;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ status_t res = mCodec->dequeueOutputBuffer(
+ &bufferIx, &offset, &size, &timeUs, &flags);
+
+ if (res != OK) {
+ ALOGV("[%s] dequeued output: %d", mComponentName.c_str(), res);
+ } else {
+ ALOGV("[%s] dequeued output: %d (time=%lld flags=%" PRIu32 ")",
+ mComponentName.c_str(), (int)bufferIx, timeUs, flags);
+ }
+
+ if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
+ res = mCodec->getOutputBuffers(&mOutputBuffers);
+ if (res != OK) {
+ ALOGE("Failed to get output buffers for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+ // NuPlayer ignores this
+ return true;
+ } else if (res == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format = new AMessage();
+ res = mCodec->getOutputFormat(&format);
+ if (res != OK) {
+ ALOGE("Failed to get output format for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
- if (what == ACodec::kWhatFillThisBuffer) {
- onFillThisBuffer(msg);
+ if (!mIsAudio) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatVideoSizeChanged);
+ notify->setMessage("format", format);
+ notify->post();
+ } else if (mRenderer != NULL) {
+ uint32_t flags;
+ int64_t durationUs;
+ bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+ if (!hasVideo &&
+ mSource->getDuration(&durationUs) == OK &&
+ durationUs
+ > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) {
+ flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
} else {
- sp<AMessage> notify = mNotify->dup();
- notify->setMessage("codec-request", msg);
- notify->post();
+ flags = AUDIO_OUTPUT_FLAG_NONE;
+ }
+
+ res = mRenderer->openAudioSink(
+ format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaded */);
+ if (res != OK) {
+ ALOGE("Failed to open AudioSink on format change for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
}
- break;
}
+ return true;
+ } else if (res == INFO_DISCONTINUITY) {
+ // nothing to do
+ return true;
+ } else if (res != OK) {
+ if (res != -EAGAIN) {
+ ALOGE("Failed to dequeue output buffer for %s (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ }
+ return false;
+ }
- default:
- TRESPASS();
- break;
+ CHECK_LT(bufferIx, mOutputBuffers.size());
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->setRange(offset, size);
+ buffer->meta()->clear();
+ buffer->meta()->setInt64("timeUs", timeUs);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ buffer->meta()->setInt32("eos", true);
+ notifyResumeCompleteIfNecessary();
}
+ // we do not expect CODECCONFIG or SYNCFRAME for decoder
+
+ sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id());
+ reply->setSize("buffer-ix", bufferIx);
+ reply->setInt32("generation", mBufferGeneration);
+
+ if (mSkipRenderingUntilMediaTimeUs >= 0) {
+ if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+ ALOGV("[%s] dropping buffer at time %lld as requested.",
+ mComponentName.c_str(), (long long)timeUs);
+
+ reply->post();
+ return true;
+ }
+
+ mSkipRenderingUntilMediaTimeUs = -1;
+ }
+
+ // wait until 1st frame comes out to signal resume complete
+ notifyResumeCompleteIfNecessary();
+
+ if (mRenderer != NULL) {
+ // send the buffer to renderer.
+ mRenderer->queueBuffer(mIsAudio, buffer, reply);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
+ }
+ }
+
+ return true;
}
-void NuPlayer::Decoder::onFillThisBuffer(const sp<AMessage> &msg) {
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
+void NuPlayer::Decoder::releaseAndResetMediaBuffers() {
+ for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+ if (mMediaBuffers[i] != NULL) {
+ mMediaBuffers[i]->release();
+ mMediaBuffers.editItemAt(i) = NULL;
+ }
+ }
+ mMediaBuffers.resize(mInputBuffers.size());
+ for (size_t i = 0; i < mMediaBuffers.size(); i++) {
+ mMediaBuffers.editItemAt(i) = NULL;
+ }
+ mInputBufferIsDequeued.clear();
+ mInputBufferIsDequeued.resize(mInputBuffers.size());
+ for (size_t i = 0; i < mInputBufferIsDequeued.size(); i++) {
+ mInputBufferIsDequeued.editItemAt(i) = false;
+ }
+
+ mPendingInputMessages.clear();
+ mDequeuedInputBuffers.clear();
+ mSkipRenderingUntilMediaTimeUs = -1;
+}
+void NuPlayer::Decoder::requestCodecNotification() {
+ if (mFormatChangePending) {
+ return;
+ }
+ if (mCodec != NULL) {
+ sp<AMessage> reply = new AMessage(kWhatCodecNotify, id());
+ reply->setInt32("generation", mBufferGeneration);
+ mCodec->requestActivityNotification(reply);
+ }
+}
+
+bool NuPlayer::Decoder::isStaleReply(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ return generation != mBufferGeneration;
+}
+
+status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) {
+ sp<ABuffer> accessUnit;
+ bool dropAccessUnit;
+ do {
+ status_t err = mSource->dequeueAccessUnit(mIsAudio, &accessUnit);
+
+ if (err == -EWOULDBLOCK) {
+ return err;
+ } else if (err != OK) {
+ if (err == INFO_DISCONTINUITY) {
+ int32_t type;
+ CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+ bool formatChange =
+ (mIsAudio &&
+ (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT))
+ || (!mIsAudio &&
+ (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT));
+
+ bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+ ALOGI("%s discontinuity (format=%d, time=%d)",
+ mIsAudio ? "audio" : "video", formatChange, timeChange);
+
+ bool seamlessFormatChange = false;
+ sp<AMessage> newFormat = mSource->getFormat(mIsAudio);
+ if (formatChange) {
+ seamlessFormatChange =
+ supportsSeamlessFormatChange(newFormat);
+ // treat seamless format change separately
+ formatChange = !seamlessFormatChange;
+ }
+
+ if (formatChange || timeChange) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatInputDiscontinuity);
+ msg->setInt32("formatChange", formatChange);
+ msg->post();
+ }
+
+ if (formatChange /* not seamless */) {
+ // must change decoder
+ // return EOS and wait to be killed
+ mFormatChangePending = true;
+ return ERROR_END_OF_STREAM;
+ } else if (timeChange) {
+ // need to flush
+ // TODO: Ideally we shouldn't need a flush upon time
+ // discontinuity, flushing will cause loss of frames.
+ // We probably should queue a time change marker to the
+ // output queue, and handles it in renderer instead.
+ rememberCodecSpecificData(newFormat);
+ onFlush(false /* notifyComplete */);
+ err = OK;
+ } else if (seamlessFormatChange) {
+ // reuse existing decoder and don't flush
+ rememberCodecSpecificData(newFormat);
+ err = OK;
+ } else {
+ // This stream is unaffected by the discontinuity
+ return -EWOULDBLOCK;
+ }
+ }
+
+ reply->setInt32("err", err);
+ return OK;
+ }
+
+ if (!mIsAudio) {
+ ++mNumFramesTotal;
+ }
+
+ dropAccessUnit = false;
+ if (!mIsAudio
+ && !mIsSecure
+ && mRenderer->getVideoLateByUs() > 100000ll
+ && mIsVideoAVC
+ && !IsAVCReferenceFrame(accessUnit)) {
+ dropAccessUnit = true;
+ ++mNumFramesDropped;
+ }
+ } while (dropAccessUnit);
+
+ // ALOGV("returned a valid buffer of %s data", mIsAudio ? "mIsAudio" : "video");
#if 0
- sp<ABuffer> outBuffer;
- CHECK(msg->findBuffer("buffer", &outBuffer));
-#else
- sp<ABuffer> outBuffer;
+ int64_t mediaTimeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("feeding %s input buffer at media time %.2f secs",
+ mIsAudio ? "audio" : "video",
+ mediaTimeUs / 1E6);
#endif
- if (mCSDIndex < mCSD.size()) {
- outBuffer = mCSD.editItemAt(mCSDIndex++);
- outBuffer->meta()->setInt64("timeUs", 0);
+ if (mCCDecoder != NULL) {
+ mCCDecoder->decode(accessUnit);
+ }
- reply->setBuffer("buffer", outBuffer);
- reply->post();
- return;
+ reply->setBuffer("buffer", accessUnit);
+
+ return OK;
+}
+
+bool NuPlayer::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {
+ size_t bufferIx;
+ CHECK(msg->findSize("buffer-ix", &bufferIx));
+ CHECK_LT(bufferIx, mInputBuffers.size());
+ sp<ABuffer> codecBuffer = mInputBuffers[bufferIx];
+
+ sp<ABuffer> buffer;
+ bool hasBuffer = msg->findBuffer("buffer", &buffer);
+
+ // handle widevine classic source - that fills an arbitrary input buffer
+ MediaBuffer *mediaBuffer = NULL;
+ if (hasBuffer) {
+ mediaBuffer = (MediaBuffer *)(buffer->getMediaBufferBase());
+ if (mediaBuffer != NULL) {
+ // likely filled another buffer than we requested: adjust buffer index
+ size_t ix;
+ for (ix = 0; ix < mInputBuffers.size(); ix++) {
+ const sp<ABuffer> &buf = mInputBuffers[ix];
+ if (buf->data() == mediaBuffer->data()) {
+ // all input buffers are dequeued on start, hence the check
+ if (!mInputBufferIsDequeued[ix]) {
+ ALOGV("[%s] received MediaBuffer for #%zu instead of #%zu",
+ mComponentName.c_str(), ix, bufferIx);
+ mediaBuffer->release();
+ return false;
+ }
+
+ // TRICKY: need buffer for the metadata, so instead, set
+ // codecBuffer to the same (though incorrect) buffer to
+ // avoid a memcpy into the codecBuffer
+ codecBuffer = buffer;
+ codecBuffer->setRange(
+ mediaBuffer->range_offset(),
+ mediaBuffer->range_length());
+ bufferIx = ix;
+ break;
+ }
+ }
+ CHECK(ix < mInputBuffers.size());
+ }
}
- sp<AMessage> notify = mNotify->dup();
- notify->setMessage("codec-request", msg);
- notify->post();
+ if (buffer == NULL /* includes !hasBuffer */) {
+ int32_t streamErr = ERROR_END_OF_STREAM;
+ CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+
+ if (streamErr == OK) {
+ /* buffers are returned to hold on to */
+ return true;
+ }
+
+ // attempt to queue EOS
+ status_t err = mCodec->queueInputBuffer(
+ bufferIx,
+ 0,
+ 0,
+ 0,
+ MediaCodec::BUFFER_FLAG_EOS);
+ if (err == OK) {
+ mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+ } else if (streamErr == ERROR_END_OF_STREAM) {
+ streamErr = err;
+ // err will not be ERROR_END_OF_STREAM
+ }
+
+ if (streamErr != ERROR_END_OF_STREAM) {
+ ALOGE("Stream error for %s (err=%d), EOS %s queued",
+ mComponentName.c_str(),
+ streamErr,
+ err == OK ? "successfully" : "unsuccessfully");
+ handleError(streamErr);
+ }
+ } else {
+ sp<AMessage> extra;
+ if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+ int64_t resumeAtMediaTimeUs;
+ if (extra->findInt64(
+ "resume-at-mediaTimeUs", &resumeAtMediaTimeUs)) {
+ ALOGI("[%s] suppressing rendering until %lld us",
+ mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+ mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+ }
+ }
+
+ int64_t timeUs = 0;
+ uint32_t flags = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ int32_t eos, csd;
+ // we do not expect SYNCFRAME for decoder
+ if (buffer->meta()->findInt32("eos", &eos) && eos) {
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ } else if (buffer->meta()->findInt32("csd", &csd) && csd) {
+ flags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
+ }
+
+ // copy into codec buffer
+ if (buffer != codecBuffer) {
+ CHECK_LE(buffer->size(), codecBuffer->capacity());
+ codecBuffer->setRange(0, buffer->size());
+ memcpy(codecBuffer->data(), buffer->data(), buffer->size());
+ }
+
+ status_t err = mCodec->queueInputBuffer(
+ bufferIx,
+ codecBuffer->offset(),
+ codecBuffer->size(),
+ timeUs,
+ flags);
+ if (err != OK) {
+ if (mediaBuffer != NULL) {
+ mediaBuffer->release();
+ }
+ ALOGE("Failed to queue input buffer for %s (err=%d)",
+ mComponentName.c_str(), err);
+ handleError(err);
+ } else {
+ mInputBufferIsDequeued.editItemAt(bufferIx) = false;
+ if (mediaBuffer != NULL) {
+ CHECK(mMediaBuffers[bufferIx] == NULL);
+ mMediaBuffers.editItemAt(bufferIx) = mediaBuffer;
+ }
+ }
+ }
+ return true;
}
-void NuPlayer::Decoder::signalFlush() {
- if (mCodec != NULL) {
- mCodec->signalFlush();
+void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
+ status_t err;
+ int32_t render;
+ size_t bufferIx;
+ CHECK(msg->findSize("buffer-ix", &bufferIx));
+
+ if (!mIsAudio) {
+ int64_t timeUs;
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->meta()->findInt64("timeUs", &timeUs);
+
+ if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
+ mCCDecoder->display(timeUs);
+ }
+ }
+
+ if (msg->findInt32("render", &render) && render) {
+ int64_t timestampNs;
+ CHECK(msg->findInt64("timestampNs", &timestampNs));
+ err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
+ } else {
+ err = mCodec->releaseOutputBuffer(bufferIx);
+ }
+ if (err != OK) {
+ ALOGE("failed to release output buffer for %s (err=%d)",
+ mComponentName.c_str(), err);
+ handleError(err);
}
}
-void NuPlayer::Decoder::signalResume() {
- if (mCodec != NULL) {
- mCodec->signalResume();
+bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(
+ const sp<AMessage> &targetFormat) const {
+ if (targetFormat == NULL) {
+ return true;
+ }
+
+ AString mime;
+ if (!targetFormat->findString("mime", &mime)) {
+ return false;
+ }
+
+ if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
+ // field-by-field comparison
+ const char * keys[] = { "channel-count", "sample-rate", "is-adts" };
+ for (unsigned int i = 0; i < sizeof(keys) / sizeof(keys[0]); i++) {
+ int32_t oldVal, newVal;
+ if (!mInputFormat->findInt32(keys[i], &oldVal) ||
+ !targetFormat->findInt32(keys[i], &newVal) ||
+ oldVal != newVal) {
+ return false;
+ }
+ }
+
+ sp<ABuffer> oldBuf, newBuf;
+ if (mInputFormat->findBuffer("csd-0", &oldBuf) &&
+ targetFormat->findBuffer("csd-0", &newBuf)) {
+ if (oldBuf->size() != newBuf->size()) {
+ return false;
+ }
+ return !memcmp(oldBuf->data(), newBuf->data(), oldBuf->size());
+ }
}
+ return false;
}
-void NuPlayer::Decoder::initiateShutdown() {
- if (mCodec != NULL) {
- mCodec->initiateShutdown();
+bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetFormat) const {
+ if (mInputFormat == NULL) {
+ return false;
+ }
+
+ if (targetFormat == NULL) {
+ return true;
+ }
+
+ AString oldMime, newMime;
+ if (!mInputFormat->findString("mime", &oldMime)
+ || !targetFormat->findString("mime", &newMime)
+ || !(oldMime == newMime)) {
+ return false;
+ }
+
+ bool audio = !strncasecmp(oldMime.c_str(), "audio/", strlen("audio/"));
+ bool seamless;
+ if (audio) {
+ seamless = supportsSeamlessAudioFormatChange(targetFormat);
+ } else {
+ int32_t isAdaptive;
+ seamless = (mCodec != NULL &&
+ mInputFormat->findInt32("adaptive-playback", &isAdaptive) &&
+ isAdaptive);
+ }
+
+ ALOGV("%s seamless support for %s", seamless ? "yes" : "no", oldMime.c_str());
+ return seamless;
+}
+
+void NuPlayer::Decoder::rememberCodecSpecificData(const sp<AMessage> &format) {
+ if (format == NULL) {
+ return;
+ }
+ mCSDsForCurrentFormat.clear();
+ for (int32_t i = 0; ; ++i) {
+ AString tag = "csd-";
+ tag.append(i);
+ sp<ABuffer> buffer;
+ if (!format->findBuffer(tag.c_str(), &buffer)) {
+ break;
+ }
+ mCSDsForCurrentFormat.push(buffer);
+ }
+}
+
+void NuPlayer::Decoder::notifyResumeCompleteIfNecessary() {
+ if (mResumePending) {
+ mResumePending = false;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatResumeCompleted);
+ notify->post();
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index a876148..1bfa94f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2010 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,49 +15,93 @@
*/
#ifndef NUPLAYER_DECODER_H_
-
#define NUPLAYER_DECODER_H_
#include "NuPlayer.h"
-#include <media/stagefright/foundation/AHandler.h>
+#include "NuPlayerDecoderBase.h"
namespace android {
-struct ABuffer;
-
-struct NuPlayer::Decoder : public AHandler {
+struct NuPlayer::Decoder : public DecoderBase {
Decoder(const sp<AMessage> &notify,
- const sp<NativeWindowWrapper> &nativeWindow = NULL);
-
- void configure(const sp<AMessage> &format);
+ const sp<Source> &source,
+ const sp<Renderer> &renderer = NULL,
+ const sp<NativeWindowWrapper> &nativeWindow = NULL,
+ const sp<CCDecoder> &ccDecoder = NULL);
- void signalFlush();
- void signalResume();
- void initiateShutdown();
+ virtual void getStats(
+ int64_t *mNumFramesTotal,
+ int64_t *mNumFramesDropped) const;
protected:
virtual ~Decoder();
virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual void onConfigure(const sp<AMessage> &format);
+ virtual void onSetRenderer(const sp<Renderer> &renderer);
+ virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);
+ virtual void onResume(bool notifyComplete);
+ virtual void onFlush(bool notifyComplete);
+ virtual void onShutdown(bool notifyComplete);
+ virtual void doRequestBuffers();
+
private:
enum {
- kWhatCodecNotify = 'cdcN',
+ kWhatCodecNotify = 'cdcN',
+ kWhatRenderBuffer = 'rndr',
};
- sp<AMessage> mNotify;
sp<NativeWindowWrapper> mNativeWindow;
- sp<ACodec> mCodec;
+ sp<Source> mSource;
+ sp<Renderer> mRenderer;
+ sp<CCDecoder> mCCDecoder;
+
+ sp<AMessage> mInputFormat;
+ sp<AMessage> mOutputFormat;
+ sp<MediaCodec> mCodec;
sp<ALooper> mCodecLooper;
- Vector<sp<ABuffer> > mCSD;
- size_t mCSDIndex;
+ List<sp<AMessage> > mPendingInputMessages;
+
+ Vector<sp<ABuffer> > mInputBuffers;
+ Vector<sp<ABuffer> > mOutputBuffers;
+ Vector<sp<ABuffer> > mCSDsForCurrentFormat;
+ Vector<sp<ABuffer> > mCSDsToSubmit;
+ Vector<bool> mInputBufferIsDequeued;
+ Vector<MediaBuffer *> mMediaBuffers;
+ Vector<size_t> mDequeuedInputBuffers;
+
+ int64_t mSkipRenderingUntilMediaTimeUs;
+ int64_t mNumFramesTotal;
+ int64_t mNumFramesDropped;
+ bool mIsAudio;
+ bool mIsVideoAVC;
+ bool mIsSecure;
+ bool mFormatChangePending;
+
+ bool mPaused;
+ bool mResumePending;
+ AString mComponentName;
+
+ bool handleAnInputBuffer();
+ bool handleAnOutputBuffer();
+
+ void releaseAndResetMediaBuffers();
+ void requestCodecNotification();
+ bool isStaleReply(const sp<AMessage> &msg);
+
+ status_t fetchInputData(sp<AMessage> &reply);
+ bool onInputBufferFetched(const sp<AMessage> &msg);
+ void onRenderBuffer(const sp<AMessage> &msg);
- sp<AMessage> makeFormat(const sp<MetaData> &meta);
+ bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+ bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
+ void rememberCodecSpecificData(const sp<AMessage> &format);
- void onFillThisBuffer(const sp<AMessage> &msg);
+ void notifyResumeCompleteIfNecessary();
DISALLOW_EVIL_CONSTRUCTORS(Decoder);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
new file mode 100644
index 0000000..d56fc4d
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoderBase"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerDecoderBase.h"
+
+#include "NuPlayerRenderer.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+NuPlayer::DecoderBase::DecoderBase(const sp<AMessage> &notify)
+ : mNotify(notify),
+ mBufferGeneration(0),
+ mRequestInputBuffersPending(false) {
+ // Every decoder has its own looper because MediaCodec operations
+ // are blocking, but NuPlayer needs asynchronous operations.
+ mDecoderLooper = new ALooper;
+ mDecoderLooper->setName("NPDecoder");
+ mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+NuPlayer::DecoderBase::~DecoderBase() {
+ mDecoderLooper->unregisterHandler(id());
+ mDecoderLooper->stop();
+}
+
+static
+status_t PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response) {
+ status_t err = msg->postAndAwaitResponse(response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!(*response)->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+void NuPlayer::DecoderBase::configure(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+ msg->setMessage("format", format);
+ msg->post();
+}
+
+void NuPlayer::DecoderBase::init() {
+ mDecoderLooper->registerHandler(this);
+}
+
+void NuPlayer::DecoderBase::setRenderer(const sp<Renderer> &renderer) {
+ sp<AMessage> msg = new AMessage(kWhatSetRenderer, id());
+ msg->setObject("renderer", renderer);
+ msg->post();
+}
+
+status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {
+ sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, id());
+ msg->setPointer("buffers", buffers);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+void NuPlayer::DecoderBase::signalFlush() {
+ (new AMessage(kWhatFlush, id()))->post();
+}
+
+void NuPlayer::DecoderBase::signalResume(bool notifyComplete) {
+ sp<AMessage> msg = new AMessage(kWhatResume, id());
+ msg->setInt32("notifyComplete", notifyComplete);
+ msg->post();
+}
+
+void NuPlayer::DecoderBase::initiateShutdown() {
+ (new AMessage(kWhatShutdown, id()))->post();
+}
+
+void NuPlayer::DecoderBase::onRequestInputBuffers() {
+ if (mRequestInputBuffersPending) {
+ return;
+ }
+
+ doRequestBuffers();
+}
+
+void NuPlayer::DecoderBase::scheduleRequestBuffers() {
+ if (mRequestInputBuffersPending) {
+ return;
+ }
+ mRequestInputBuffersPending = true;
+ sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, id());
+ msg->post(10 * 1000ll);
+}
+
+void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {
+
+ switch (msg->what()) {
+ case kWhatConfigure:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+ onConfigure(format);
+ break;
+ }
+
+ case kWhatSetRenderer:
+ {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("renderer", &obj));
+ onSetRenderer(static_cast<Renderer *>(obj.get()));
+ break;
+ }
+
+ case kWhatGetInputBuffers:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ Vector<sp<ABuffer> > *dstBuffers;
+ CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
+
+ onGetInputBuffers(dstBuffers);
+
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
+ case kWhatRequestInputBuffers:
+ {
+ mRequestInputBuffersPending = false;
+ onRequestInputBuffers();
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ onFlush(true);
+ break;
+ }
+
+ case kWhatResume:
+ {
+ int32_t notifyComplete;
+ CHECK(msg->findInt32("notifyComplete", &notifyComplete));
+
+ onResume(notifyComplete);
+ break;
+ }
+
+ case kWhatShutdown:
+ {
+ onShutdown(true);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+void NuPlayer::DecoderBase::handleError(int32_t err)
+{
+ // We cannot immediately release the codec due to buffers still outstanding
+ // in the renderer. We signal to the player the error so it can shutdown/release the
+ // decoder after flushing and increment the generation to discard unnecessary messages.
+
+ ++mBufferGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+} // namespace android
+
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
new file mode 100644
index 0000000..6732ff4
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_BASE_H_
+
+#define NUPLAYER_DECODER_BASE_H_
+
+#include "NuPlayer.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct MediaCodec;
+struct MediaBuffer;
+
+struct NuPlayer::DecoderBase : public AHandler {
+ DecoderBase(const sp<AMessage> &notify);
+
+ void configure(const sp<AMessage> &format);
+ void init();
+
+ void setRenderer(const sp<Renderer> &renderer);
+
+ status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;
+ void signalFlush();
+ void signalResume(bool notifyComplete);
+ void initiateShutdown();
+
+ virtual void getStats(
+ int64_t *mNumFramesTotal,
+ int64_t *mNumFramesDropped) const = 0;
+
+ enum {
+ kWhatInputDiscontinuity = 'inDi',
+ kWhatVideoSizeChanged = 'viSC',
+ kWhatFlushCompleted = 'flsC',
+ kWhatShutdownCompleted = 'shDC',
+ kWhatResumeCompleted = 'resC',
+ kWhatEOS = 'eos ',
+ kWhatError = 'err ',
+ };
+
+protected:
+
+ virtual ~DecoderBase();
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+ virtual void onConfigure(const sp<AMessage> &format) = 0;
+ virtual void onSetRenderer(const sp<Renderer> &renderer) = 0;
+ virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers) = 0;
+ virtual void onResume(bool notifyComplete) = 0;
+ virtual void onFlush(bool notifyComplete) = 0;
+ virtual void onShutdown(bool notifyComplete) = 0;
+
+ void onRequestInputBuffers();
+ void scheduleRequestBuffers();
+ virtual void doRequestBuffers() = 0;
+ virtual void handleError(int32_t err);
+
+ sp<AMessage> mNotify;
+ int32_t mBufferGeneration;
+
+private:
+ enum {
+ kWhatConfigure = 'conf',
+ kWhatSetRenderer = 'setR',
+ kWhatGetInputBuffers = 'gInB',
+ kWhatRequestInputBuffers = 'reqB',
+ kWhatFlush = 'flus',
+ kWhatShutdown = 'shuD',
+ };
+
+ sp<ALooper> mDecoderLooper;
+ bool mRequestInputBuffersPending;
+
+ DISALLOW_EVIL_CONSTRUCTORS(DecoderBase);
+};
+
+} // namespace android
+
+#endif // NUPLAYER_DECODER_BASE_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
new file mode 100644
index 0000000..9f7f09a
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp
@@ -0,0 +1,425 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NuPlayerDecoderPassThrough"
+#include <utils/Log.h>
+#include <inttypes.h>
+
+#include "NuPlayerDecoderPassThrough.h"
+
+#include "NuPlayerRenderer.h"
+#include "NuPlayerSource.h"
+
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "ATSParser.h"
+
+namespace android {
+
+// TODO optimize buffer size for power consumption
+// The offload read buffer size is 32 KB but 24 KB uses less power.
+static const size_t kAggregateBufferSizeBytes = 24 * 1024;
+static const size_t kMaxCachedBytes = 200000;
+
+NuPlayer::DecoderPassThrough::DecoderPassThrough(
+ const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer)
+ : DecoderBase(notify),
+ mSource(source),
+ mRenderer(renderer),
+ mSkipRenderingUntilMediaTimeUs(-1ll),
+ mPaused(false),
+ mReachedEOS(true),
+ mPendingAudioErr(OK),
+ mPendingBuffersToDrain(0),
+ mCachedBytes(0),
+ mComponentName("pass through decoder") {
+ ALOGW_IF(renderer == NULL, "expect a non-NULL renderer");
+}
+
+NuPlayer::DecoderPassThrough::~DecoderPassThrough() {
+}
+
+void NuPlayer::DecoderPassThrough::getStats(
+ int64_t *numFramesTotal, int64_t *numFramesDropped) const {
+ *numFramesTotal = 0;
+ *numFramesDropped = 0;
+}
+
+void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {
+ ALOGV("[%s] onConfigure", mComponentName.c_str());
+ mCachedBytes = 0;
+ mPendingBuffersToDrain = 0;
+ mReachedEOS = false;
+ ++mBufferGeneration;
+
+ onRequestInputBuffers();
+
+ // The audio sink is already opened before the PassThrough decoder is created.
+ // Opening again might be relevant if decoder is instantiated after shutdown and
+ // format is different.
+ status_t err = mRenderer->openAudioSink(
+ format, true /* offloadOnly */, false /* hasVideo */,
+ AUDIO_OUTPUT_FLAG_NONE /* flags */, NULL /* isOffloaded */);
+ if (err != OK) {
+ handleError(err);
+ }
+}
+
+void NuPlayer::DecoderPassThrough::onSetRenderer(
+ const sp<Renderer> &renderer) {
+ // renderer can't be changed during offloading
+ ALOGW_IF(renderer != mRenderer,
+ "ignoring request to change renderer");
+}
+
+void NuPlayer::DecoderPassThrough::onGetInputBuffers(
+ Vector<sp<ABuffer> > * /* dstBuffers */) {
+ ALOGE("onGetInputBuffers() called unexpectedly");
+}
+
+bool NuPlayer::DecoderPassThrough::isStaleReply(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ return generation != mBufferGeneration;
+}
+
+bool NuPlayer::DecoderPassThrough::isDoneFetching() const {
+ ALOGV("[%s] mCachedBytes = %zu, mReachedEOS = %d mPaused = %d",
+ mComponentName.c_str(), mCachedBytes, mReachedEOS, mPaused);
+
+ return mCachedBytes >= kMaxCachedBytes || mReachedEOS || mPaused;
+}
+
+void NuPlayer::DecoderPassThrough::doRequestBuffers() {
+ status_t err = OK;
+ while (!isDoneFetching()) {
+ sp<AMessage> msg = new AMessage();
+
+ err = fetchInputData(msg);
+ if (err != OK) {
+ break;
+ }
+
+ onInputBufferFetched(msg);
+ }
+
+ if (err == -EWOULDBLOCK
+ && mSource->feedMoreTSData() == OK) {
+ scheduleRequestBuffers();
+ }
+}
+
+status_t NuPlayer::DecoderPassThrough::dequeueAccessUnit(sp<ABuffer> *accessUnit) {
+ status_t err;
+
+ // Did we save an accessUnit earlier because of a discontinuity?
+ if (mPendingAudioAccessUnit != NULL) {
+ *accessUnit = mPendingAudioAccessUnit;
+ mPendingAudioAccessUnit.clear();
+ err = mPendingAudioErr;
+ ALOGV("feedDecoderInputData() use mPendingAudioAccessUnit");
+ } else {
+ err = mSource->dequeueAccessUnit(true /* audio */, accessUnit);
+ }
+
+ if (err == INFO_DISCONTINUITY || err == ERROR_END_OF_STREAM) {
+ if (mAggregateBuffer != NULL) {
+ // We already have some data so save this for later.
+ mPendingAudioErr = err;
+ mPendingAudioAccessUnit = *accessUnit;
+ (*accessUnit).clear();
+ ALOGD("return aggregated buffer and save err(=%d) for later", err);
+ err = OK;
+ }
+ }
+
+ return err;
+}
+
+sp<ABuffer> NuPlayer::DecoderPassThrough::aggregateBuffer(
+ const sp<ABuffer> &accessUnit) {
+ sp<ABuffer> aggregate;
+
+ if (accessUnit == NULL) {
+ // accessUnit is saved to mPendingAudioAccessUnit
+ // return current mAggregateBuffer
+ aggregate = mAggregateBuffer;
+ mAggregateBuffer.clear();
+ return aggregate;
+ }
+
+ size_t smallSize = accessUnit->size();
+ if ((mAggregateBuffer == NULL)
+ // Don't bother if only room for a few small buffers.
+ && (smallSize < (kAggregateBufferSizeBytes / 3))) {
+ // Create a larger buffer for combining smaller buffers from the extractor.
+ mAggregateBuffer = new ABuffer(kAggregateBufferSizeBytes);
+ mAggregateBuffer->setRange(0, 0); // start empty
+ }
+
+ if (mAggregateBuffer != NULL) {
+ int64_t timeUs;
+ int64_t dummy;
+ bool smallTimestampValid = accessUnit->meta()->findInt64("timeUs", &timeUs);
+ bool bigTimestampValid = mAggregateBuffer->meta()->findInt64("timeUs", &dummy);
+ // Will the smaller buffer fit?
+ size_t bigSize = mAggregateBuffer->size();
+ size_t roomLeft = mAggregateBuffer->capacity() - bigSize;
+ // Should we save this small buffer for the next big buffer?
+ // If the first small buffer did not have a timestamp then save
+ // any buffer that does have a timestamp until the next big buffer.
+ if ((smallSize > roomLeft)
+ || (!bigTimestampValid && (bigSize > 0) && smallTimestampValid)) {
+ mPendingAudioErr = OK;
+ mPendingAudioAccessUnit = accessUnit;
+ aggregate = mAggregateBuffer;
+ mAggregateBuffer.clear();
+ } else {
+ // Grab time from first small buffer if available.
+ if ((bigSize == 0) && smallTimestampValid) {
+ mAggregateBuffer->meta()->setInt64("timeUs", timeUs);
+ }
+ // Append small buffer to the bigger buffer.
+ memcpy(mAggregateBuffer->base() + bigSize, accessUnit->data(), smallSize);
+ bigSize += smallSize;
+ mAggregateBuffer->setRange(0, bigSize);
+
+ ALOGV("feedDecoderInputData() smallSize = %zu, bigSize = %zu, capacity = %zu",
+ smallSize, bigSize, mAggregateBuffer->capacity());
+ }
+ } else {
+ // decided not to aggregate
+ aggregate = accessUnit;
+ }
+
+ return aggregate;
+}
+
+status_t NuPlayer::DecoderPassThrough::fetchInputData(sp<AMessage> &reply) {
+ sp<ABuffer> accessUnit;
+
+ do {
+ status_t err = dequeueAccessUnit(&accessUnit);
+
+ if (err == -EWOULDBLOCK) {
+ return err;
+ } else if (err != OK) {
+ if (err == INFO_DISCONTINUITY) {
+ int32_t type;
+ CHECK(accessUnit->meta()->findInt32("discontinuity", &type));
+
+ bool formatChange =
+ (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT) != 0;
+
+ bool timeChange =
+ (type & ATSParser::DISCONTINUITY_TIME) != 0;
+
+ ALOGI("audio discontinuity (formatChange=%d, time=%d)",
+ formatChange, timeChange);
+
+ if (formatChange || timeChange) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatInputDiscontinuity);
+ // will perform seamless format change,
+ // only notify NuPlayer to scan sources
+ msg->setInt32("formatChange", false);
+ msg->post();
+ }
+
+ if (timeChange) {
+ onFlush(false /* notifyComplete */);
+ err = OK;
+ } else if (formatChange) {
+ // do seamless format change
+ err = OK;
+ } else {
+ // This stream is unaffected by the discontinuity
+ return -EWOULDBLOCK;
+ }
+ }
+
+ reply->setInt32("err", err);
+ return OK;
+ }
+
+ accessUnit = aggregateBuffer(accessUnit);
+ } while (accessUnit == NULL);
+
+#if 0
+ int64_t mediaTimeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("feeding audio input buffer at media time %.2f secs",
+ mediaTimeUs / 1E6);
+#endif
+
+ reply->setBuffer("buffer", accessUnit);
+
+ return OK;
+}
+
+void NuPlayer::DecoderPassThrough::onInputBufferFetched(
+ const sp<AMessage> &msg) {
+ if (mReachedEOS) {
+ return;
+ }
+
+ sp<ABuffer> buffer;
+ bool hasBuffer = msg->findBuffer("buffer", &buffer);
+ if (buffer == NULL) {
+ int32_t streamErr = ERROR_END_OF_STREAM;
+ CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+ if (streamErr == OK) {
+ return;
+ }
+
+ mReachedEOS = true;
+ if (mRenderer != NULL) {
+ mRenderer->queueEOS(true /* audio */, ERROR_END_OF_STREAM);
+ }
+ return;
+ }
+
+ sp<AMessage> extra;
+ if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
+ int64_t resumeAtMediaTimeUs;
+ if (extra->findInt64(
+ "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) {
+ ALOGI("[%s] suppressing rendering until %lld us",
+ mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
+ mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
+ }
+ }
+
+ int32_t bufferSize = buffer->size();
+ mCachedBytes += bufferSize;
+
+ if (mSkipRenderingUntilMediaTimeUs >= 0) {
+ int64_t timeUs = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ if (timeUs < mSkipRenderingUntilMediaTimeUs) {
+ ALOGV("[%s] dropping buffer at time %lld as requested.",
+ mComponentName.c_str(), (long long)timeUs);
+
+ onBufferConsumed(bufferSize);
+ return;
+ }
+
+ mSkipRenderingUntilMediaTimeUs = -1;
+ }
+
+ if (mRenderer == NULL) {
+ onBufferConsumed(bufferSize);
+ return;
+ }
+
+ sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id());
+ reply->setInt32("generation", mBufferGeneration);
+ reply->setInt32("size", bufferSize);
+
+ mRenderer->queueBuffer(true /* audio */, buffer, reply);
+
+ ++mPendingBuffersToDrain;
+ ALOGV("onInputBufferFilled: #ToDrain = %zu, cachedBytes = %zu",
+ mPendingBuffersToDrain, mCachedBytes);
+}
+
+void NuPlayer::DecoderPassThrough::onBufferConsumed(int32_t size) {
+ --mPendingBuffersToDrain;
+ mCachedBytes -= size;
+ ALOGV("onBufferConsumed: #ToDrain = %zu, cachedBytes = %zu",
+ mPendingBuffersToDrain, mCachedBytes);
+ onRequestInputBuffers();
+}
+
+void NuPlayer::DecoderPassThrough::onResume(bool notifyComplete) {
+ mPaused = false;
+
+ onRequestInputBuffers();
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatResumeCompleted);
+ notify->post();
+ }
+}
+
+void NuPlayer::DecoderPassThrough::onFlush(bool notifyComplete) {
+ ++mBufferGeneration;
+ mSkipRenderingUntilMediaTimeUs = -1;
+ mPendingAudioAccessUnit.clear();
+ mPendingAudioErr = OK;
+ mAggregateBuffer.clear();
+
+ if (mRenderer != NULL) {
+ mRenderer->flush(true /* audio */, notifyComplete);
+ mRenderer->signalTimeDiscontinuity();
+ }
+
+ if (notifyComplete) {
+ mPaused = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ }
+
+ mPendingBuffersToDrain = 0;
+ mCachedBytes = 0;
+ mReachedEOS = false;
+}
+
+void NuPlayer::DecoderPassThrough::onShutdown(bool notifyComplete) {
+ ++mBufferGeneration;
+ mSkipRenderingUntilMediaTimeUs = -1;
+
+ if (notifyComplete) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ }
+
+ mReachedEOS = true;
+}
+
+void NuPlayer::DecoderPassThrough::onMessageReceived(const sp<AMessage> &msg) {
+ ALOGV("[%s] onMessage: %s", mComponentName.c_str(),
+ msg->debugString().c_str());
+
+ switch (msg->what()) {
+ case kWhatBufferConsumed:
+ {
+ if (!isStaleReply(msg)) {
+ int32_t size;
+ CHECK(msg->findInt32("size", &size));
+ onBufferConsumed(size);
+ }
+ break;
+ }
+
+ default:
+ DecoderBase::onMessageReceived(msg);
+ break;
+ }
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
new file mode 100644
index 0000000..a6e1faf
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NUPLAYER_DECODER_PASS_THROUGH_H_
+
+#define NUPLAYER_DECODER_PASS_THROUGH_H_
+
+#include "NuPlayer.h"
+
+#include "NuPlayerDecoderBase.h"
+
+namespace android {
+
+struct NuPlayer::DecoderPassThrough : public DecoderBase {
+ DecoderPassThrough(const sp<AMessage> &notify,
+ const sp<Source> &source,
+ const sp<Renderer> &renderer);
+
+ virtual void getStats(
+ int64_t *mNumFramesTotal,
+ int64_t *mNumFramesDropped) const;
+
+protected:
+
+ virtual ~DecoderPassThrough();
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+ virtual void onConfigure(const sp<AMessage> &format);
+ virtual void onSetRenderer(const sp<Renderer> &renderer);
+ virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);
+ virtual void onResume(bool notifyComplete);
+ virtual void onFlush(bool notifyComplete);
+ virtual void onShutdown(bool notifyComplete);
+ virtual void doRequestBuffers();
+
+private:
+ enum {
+ kWhatBufferConsumed = 'bufC',
+ };
+
+ sp<Source> mSource;
+ sp<Renderer> mRenderer;
+ int64_t mSkipRenderingUntilMediaTimeUs;
+ bool mPaused;
+
+ bool mReachedEOS;
+
+ // Used by feedDecoderInputData to aggregate small buffers into
+ // one large buffer.
+ sp<ABuffer> mPendingAudioAccessUnit;
+ status_t mPendingAudioErr;
+ sp<ABuffer> mAggregateBuffer;
+
+ // mPendingBuffersToDrain are only for debugging. It can be removed
+ // when the power investigation is done.
+ size_t mPendingBuffersToDrain;
+ size_t mCachedBytes;
+ AString mComponentName;
+
+ bool isStaleReply(const sp<AMessage> &msg);
+ bool isDoneFetching() const;
+
+ status_t dequeueAccessUnit(sp<ABuffer> *accessUnit);
+ sp<ABuffer> aggregateBuffer(const sp<ABuffer> &accessUnit);
+ status_t fetchInputData(sp<AMessage> &reply);
+
+ void onInputBufferFetched(const sp<AMessage> &msg);
+ void onBufferConsumed(int32_t size);
+
+ DISALLOW_EVIL_CONSTRUCTORS(DecoderPassThrough);
+};
+
+} // namespace android
+
+#endif // NUPLAYER_DECODER_PASS_THROUGH_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 239296e..bc79fdb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -26,7 +26,9 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
namespace android {
@@ -37,12 +39,14 @@ NuPlayerDriver::NuPlayerDriver()
mSetSurfaceInProgress(false),
mDurationUs(-1),
mPositionUs(-1),
- mNumFramesTotal(0),
- mNumFramesDropped(0),
+ mSeekInProgress(false),
mLooper(new ALooper),
mPlayerFlags(0),
mAtEOS(false),
+ mLooping(false),
+ mAutoLoop(false),
mStartupSeekTimeUs(-1) {
+ ALOGV("NuPlayerDriver(%p)", this);
mLooper->setName("NuPlayerDriver Looper");
mLooper->start(
@@ -57,6 +61,7 @@ NuPlayerDriver::NuPlayerDriver()
}
NuPlayerDriver::~NuPlayerDriver() {
+ ALOGV("~NuPlayerDriver(%p)", this);
mLooper->stop();
}
@@ -71,7 +76,10 @@ status_t NuPlayerDriver::setUID(uid_t uid) {
}
status_t NuPlayerDriver::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+ ALOGV("setDataSource(%p) url(%s)", this, uriDebugString(url, false).c_str());
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -80,7 +88,7 @@ status_t NuPlayerDriver::setDataSource(
mState = STATE_SET_DATASOURCE_PENDING;
- mPlayer->setDataSourceAsync(url, headers);
+ mPlayer->setDataSourceAsync(httpService, url, headers);
while (mState == STATE_SET_DATASOURCE_PENDING) {
mCondition.wait(mLock);
@@ -90,6 +98,7 @@ status_t NuPlayerDriver::setDataSource(
}
status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
+ ALOGV("setDataSource(%p) file(%d)", this, fd);
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -108,6 +117,7 @@ status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
}
status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
+ ALOGV("setDataSource(%p) stream source", this);
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -127,6 +137,7 @@ status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
status_t NuPlayerDriver::setVideoSurfaceTexture(
const sp<IGraphicBufferProducer> &bufferProducer) {
+ ALOGV("setVideoSurfaceTexture(%p)", this);
Mutex::Autolock autoLock(mLock);
if (mSetSurfaceInProgress) {
@@ -154,6 +165,7 @@ status_t NuPlayerDriver::setVideoSurfaceTexture(
}
status_t NuPlayerDriver::prepare() {
+ ALOGV("prepare(%p)", this);
Mutex::Autolock autoLock(mLock);
return prepare_l();
}
@@ -172,12 +184,23 @@ status_t NuPlayerDriver::prepare_l() {
mCondition.wait(mLock);
}
return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR;
+ case STATE_STOPPED:
+ // this is really just paused. handle as seek to start
+ mAtEOS = false;
+ mState = STATE_STOPPED_AND_PREPARING;
+ mIsAsyncPrepare = false;
+ mPlayer->seekToAsync(0, true /* needNotify */);
+ while (mState == STATE_STOPPED_AND_PREPARING) {
+ mCondition.wait(mLock);
+ }
+ return (mState == STATE_STOPPED_AND_PREPARED) ? OK : UNKNOWN_ERROR;
default:
return INVALID_OPERATION;
};
}
status_t NuPlayerDriver::prepareAsync() {
+ ALOGV("prepareAsync(%p)", this);
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -186,12 +209,20 @@ status_t NuPlayerDriver::prepareAsync() {
mIsAsyncPrepare = true;
mPlayer->prepareAsync();
return OK;
+ case STATE_STOPPED:
+ // this is really just paused. handle as seek to start
+ mAtEOS = false;
+ mState = STATE_STOPPED_AND_PREPARING;
+ mIsAsyncPrepare = true;
+ mPlayer->seekToAsync(0, true /* needNotify */);
+ return OK;
default:
return INVALID_OPERATION;
};
}
status_t NuPlayerDriver::start() {
+ ALOGD("start(%p)", this);
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -208,29 +239,36 @@ status_t NuPlayerDriver::start() {
// fall through
}
+ case STATE_PAUSED:
+ case STATE_STOPPED_AND_PREPARED:
+ {
+ if (mAtEOS && mStartupSeekTimeUs < 0) {
+ mStartupSeekTimeUs = 0;
+ mPositionUs = -1;
+ }
+
+ // fall through
+ }
+
case STATE_PREPARED:
{
mAtEOS = false;
mPlayer->start();
if (mStartupSeekTimeUs >= 0) {
- if (mStartupSeekTimeUs == 0) {
- notifySeekComplete();
- } else {
- mPlayer->seekToAsync(mStartupSeekTimeUs);
- }
-
+ mPlayer->seekToAsync(mStartupSeekTimeUs);
mStartupSeekTimeUs = -1;
}
break;
}
case STATE_RUNNING:
- break;
-
- case STATE_PAUSED:
{
- mPlayer->resume();
+ if (mAtEOS) {
+ mPlayer->seekToAsync(0);
+ mAtEOS = false;
+ mPositionUs = -1;
+ }
break;
}
@@ -244,10 +282,41 @@ status_t NuPlayerDriver::start() {
}
status_t NuPlayerDriver::stop() {
- return pause();
+ ALOGD("stop(%p)", this);
+ Mutex::Autolock autoLock(mLock);
+
+ switch (mState) {
+ case STATE_RUNNING:
+ mPlayer->pause();
+ // fall through
+
+ case STATE_PAUSED:
+ mState = STATE_STOPPED;
+ notifyListener_l(MEDIA_STOPPED);
+ break;
+
+ case STATE_PREPARED:
+ case STATE_STOPPED:
+ case STATE_STOPPED_AND_PREPARING:
+ case STATE_STOPPED_AND_PREPARED:
+ mState = STATE_STOPPED;
+ break;
+
+ default:
+ return INVALID_OPERATION;
+ }
+
+ return OK;
}
status_t NuPlayerDriver::pause() {
+ // The NuPlayerRenderer may get flushed if pause for long enough, e.g. the pause timeout tear
+ // down for audio offload mode. If that happens, the NuPlayerRenderer will no longer know the
+ // current position. So similar to seekTo, update |mPositionUs| to the pause position by calling
+ // getCurrentPosition here.
+ int msec;
+ getCurrentPosition(&msec);
+
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -256,7 +325,8 @@ status_t NuPlayerDriver::pause() {
return OK;
case STATE_RUNNING:
- notifyListener(MEDIA_PAUSED);
+ mState = STATE_PAUSED;
+ notifyListener_l(MEDIA_PAUSED);
mPlayer->pause();
break;
@@ -264,8 +334,6 @@ status_t NuPlayerDriver::pause() {
return INVALID_OPERATION;
}
- mState = STATE_PAUSED;
-
return OK;
}
@@ -274,14 +342,20 @@ bool NuPlayerDriver::isPlaying() {
}
status_t NuPlayerDriver::seekTo(int msec) {
+ ALOGD("seekTo(%p) %d ms", this, msec);
Mutex::Autolock autoLock(mLock);
int64_t seekTimeUs = msec * 1000ll;
switch (mState) {
case STATE_PREPARED:
+ case STATE_STOPPED_AND_PREPARED:
{
mStartupSeekTimeUs = seekTimeUs;
+ // pretend that the seek completed. It will actually happen when starting playback.
+ // TODO: actually perform the seek here, so the player is ready to go at the new
+ // location
+ notifySeekComplete_l();
break;
}
@@ -289,9 +363,10 @@ status_t NuPlayerDriver::seekTo(int msec) {
case STATE_PAUSED:
{
mAtEOS = false;
+ mSeekInProgress = true;
// seeks can take a while, so we essentially paused
- notifyListener(MEDIA_PAUSED);
- mPlayer->seekToAsync(seekTimeUs);
+ notifyListener_l(MEDIA_PAUSED);
+ mPlayer->seekToAsync(seekTimeUs, true /* needNotify */);
break;
}
@@ -299,18 +374,33 @@ status_t NuPlayerDriver::seekTo(int msec) {
return INVALID_OPERATION;
}
+ mPositionUs = seekTimeUs;
return OK;
}
status_t NuPlayerDriver::getCurrentPosition(int *msec) {
- Mutex::Autolock autoLock(mLock);
+ int64_t tempUs = 0;
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mSeekInProgress || mState == STATE_PAUSED) {
+ tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
+ *msec = (int)divRound(tempUs, (int64_t)(1000));
+ return OK;
+ }
+ }
- if (mPositionUs < 0) {
- *msec = 0;
+ status_t ret = mPlayer->getCurrentPosition(&tempUs);
+
+ Mutex::Autolock autoLock(mLock);
+ // We need to check mSeekInProgress here because mPlayer->seekToAsync is an async call, which
+ // means getCurrentPosition can be called before seek is completed. Iow, renderer may return a
+ // position value that's different the seek to position.
+ if (ret != OK) {
+ tempUs = (mPositionUs <= 0) ? 0 : mPositionUs;
} else {
- *msec = (mPositionUs + 500ll) / 1000;
+ mPositionUs = tempUs;
}
-
+ *msec = (int)divRound(tempUs, (int64_t)(1000));
return OK;
}
@@ -327,6 +417,7 @@ status_t NuPlayerDriver::getDuration(int *msec) {
}
status_t NuPlayerDriver::reset() {
+ ALOGD("reset(%p)", this);
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -341,7 +432,7 @@ status_t NuPlayerDriver::reset() {
{
CHECK(mIsAsyncPrepare);
- notifyListener(MEDIA_PREPARED);
+ notifyListener_l(MEDIA_PREPARED);
break;
}
@@ -349,7 +440,9 @@ status_t NuPlayerDriver::reset() {
break;
}
- notifyListener(MEDIA_STOPPED);
+ if (mState != STATE_STOPPED) {
+ notifyListener_l(MEDIA_STOPPED);
+ }
mState = STATE_RESET_IN_PROGRESS;
mPlayer->resetAsync();
@@ -361,12 +454,14 @@ status_t NuPlayerDriver::reset() {
mDurationUs = -1;
mPositionUs = -1;
mStartupSeekTimeUs = -1;
+ mLooping = false;
return OK;
}
status_t NuPlayerDriver::setLooping(int loop) {
- return INVALID_OPERATION;
+ mLooping = loop != 0;
+ return OK;
}
player_type NuPlayerDriver::playerType() {
@@ -401,13 +496,22 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
case INVOKE_ID_SELECT_TRACK:
{
int trackIndex = request.readInt32();
- return mPlayer->selectTrack(trackIndex, true /* select */);
+ int msec = 0;
+ // getCurrentPosition should always return OK
+ getCurrentPosition(&msec);
+ return mPlayer->selectTrack(trackIndex, true /* select */, msec * 1000ll);
}
case INVOKE_ID_UNSELECT_TRACK:
{
int trackIndex = request.readInt32();
- return mPlayer->selectTrack(trackIndex, false /* select */);
+ return mPlayer->selectTrack(trackIndex, false /* select */, 0xdeadbeef /* not used */);
+ }
+
+ case INVOKE_ID_GET_SELECTED_TRACK:
+ {
+ int32_t type = request.readInt32();
+ return mPlayer->getSelectedTrack(type, reply);
}
default:
@@ -419,18 +523,20 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) {
void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) {
mPlayer->setAudioSink(audioSink);
+ mAudioSink = audioSink;
}
-status_t NuPlayerDriver::setParameter(int key, const Parcel &request) {
+status_t NuPlayerDriver::setParameter(
+ int /* key */, const Parcel & /* request */) {
return INVALID_OPERATION;
}
-status_t NuPlayerDriver::getParameter(int key, Parcel *reply) {
+status_t NuPlayerDriver::getParameter(int /* key */, Parcel * /* reply */) {
return INVALID_OPERATION;
}
status_t NuPlayerDriver::getMetadata(
- const media::Metadata::Filter& ids, Parcel *records) {
+ const media::Metadata::Filter& /* ids */, Parcel *records) {
Mutex::Autolock autoLock(mLock);
using media::Metadata;
@@ -457,6 +563,7 @@ status_t NuPlayerDriver::getMetadata(
}
void NuPlayerDriver::notifyResetComplete() {
+ ALOGD("notifyResetComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
CHECK_EQ(mState, STATE_RESET_IN_PROGRESS);
@@ -465,6 +572,7 @@ void NuPlayerDriver::notifyResetComplete() {
}
void NuPlayerDriver::notifySetSurfaceComplete() {
+ ALOGV("notifySetSurfaceComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
CHECK(mSetSurfaceInProgress);
@@ -478,34 +586,45 @@ void NuPlayerDriver::notifyDuration(int64_t durationUs) {
mDurationUs = durationUs;
}
-void NuPlayerDriver::notifyPosition(int64_t positionUs) {
- Mutex::Autolock autoLock(mLock);
- mPositionUs = positionUs;
-}
-
void NuPlayerDriver::notifySeekComplete() {
- notifyListener(MEDIA_SEEK_COMPLETE);
-}
-
-void NuPlayerDriver::notifyFrameStats(
- int64_t numFramesTotal, int64_t numFramesDropped) {
+ ALOGV("notifySeekComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
- mNumFramesTotal = numFramesTotal;
- mNumFramesDropped = numFramesDropped;
+ mSeekInProgress = false;
+ notifySeekComplete_l();
+}
+
+void NuPlayerDriver::notifySeekComplete_l() {
+ bool wasSeeking = true;
+ if (mState == STATE_STOPPED_AND_PREPARING) {
+ wasSeeking = false;
+ mState = STATE_STOPPED_AND_PREPARED;
+ mCondition.broadcast();
+ if (!mIsAsyncPrepare) {
+ // if we are preparing synchronously, no need to notify listener
+ return;
+ }
+ } else if (mState == STATE_STOPPED) {
+ // no need to notify listener
+ return;
+ }
+ notifyListener_l(wasSeeking ? MEDIA_SEEK_COMPLETE : MEDIA_PREPARED);
}
-status_t NuPlayerDriver::dump(int fd, const Vector<String16> &args) const {
- Mutex::Autolock autoLock(mLock);
+status_t NuPlayerDriver::dump(
+ int fd, const Vector<String16> & /* args */) const {
+ int64_t numFramesTotal;
+ int64_t numFramesDropped;
+ mPlayer->getStats(&numFramesTotal, &numFramesDropped);
FILE *out = fdopen(dup(fd), "w");
fprintf(out, " NuPlayer\n");
fprintf(out, " numFramesTotal(%" PRId64 "), numFramesDropped(%" PRId64 "), "
"percentageDropped(%.2f)\n",
- mNumFramesTotal,
- mNumFramesDropped,
- mNumFramesTotal == 0
- ? 0.0 : (double)mNumFramesDropped / mNumFramesTotal);
+ numFramesTotal,
+ numFramesDropped,
+ numFramesTotal == 0
+ ? 0.0 : (double)numFramesDropped / numFramesTotal);
fclose(out);
out = NULL;
@@ -515,11 +634,55 @@ status_t NuPlayerDriver::dump(int fd, const Vector<String16> &args) const {
void NuPlayerDriver::notifyListener(
int msg, int ext1, int ext2, const Parcel *in) {
- if (msg == MEDIA_PLAYBACK_COMPLETE || msg == MEDIA_ERROR) {
- mAtEOS = true;
+ Mutex::Autolock autoLock(mLock);
+ notifyListener_l(msg, ext1, ext2, in);
+}
+
+void NuPlayerDriver::notifyListener_l(
+ int msg, int ext1, int ext2, const Parcel *in) {
+ switch (msg) {
+ case MEDIA_PLAYBACK_COMPLETE:
+ {
+ if (mState != STATE_RESET_IN_PROGRESS) {
+ if (mAutoLoop) {
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+ if (streamType == AUDIO_STREAM_NOTIFICATION) {
+ ALOGW("disabling auto-loop for notification");
+ mAutoLoop = false;
+ }
+ }
+ if (mLooping || mAutoLoop) {
+ mPlayer->seekToAsync(0);
+ if (mAudioSink != NULL) {
+ // The renderer has stopped the sink at the end in order to play out
+ // the last little bit of audio. If we're looping, we need to restart it.
+ mAudioSink->start();
+ }
+ break;
+ }
+
+ mPlayer->pause();
+ mState = STATE_PAUSED;
+ }
+ // fall through
+ }
+
+ case MEDIA_ERROR:
+ {
+ mAtEOS = true;
+ break;
+ }
+
+ default:
+ break;
}
+ mLock.unlock();
sendEvent(msg, ext1, ext2, in);
+ mLock.lock();
}
void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) {
@@ -548,15 +711,24 @@ void NuPlayerDriver::notifyPrepareCompleted(status_t err) {
mAsyncResult = err;
if (err == OK) {
+ // update state before notifying client, so that if client calls back into NuPlayerDriver
+ // in response, NuPlayerDriver has the right state
+ mState = STATE_PREPARED;
if (mIsAsyncPrepare) {
- notifyListener(MEDIA_PREPARED);
+ notifyListener_l(MEDIA_PREPARED);
}
- mState = STATE_PREPARED;
} else {
+ mState = STATE_UNPREPARED;
if (mIsAsyncPrepare) {
- notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+ notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
}
- mState = STATE_UNPREPARED;
+ }
+
+ sp<MetaData> meta = mPlayer->getFileMeta();
+ int32_t loop;
+ if (meta != NULL
+ && meta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
+ mAutoLoop = true;
}
mCondition.broadcast();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 99f72a6..5cba7d9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -31,7 +31,9 @@ struct NuPlayerDriver : public MediaPlayerInterface {
virtual status_t setUID(uid_t uid);
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
@@ -66,9 +68,8 @@ struct NuPlayerDriver : public MediaPlayerInterface {
void notifyResetComplete();
void notifySetSurfaceComplete();
void notifyDuration(int64_t durationUs);
- void notifyPosition(int64_t positionUs);
void notifySeekComplete();
- void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped);
+ void notifySeekComplete_l();
void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
void notifyFlagsChanged(uint32_t flags);
@@ -85,6 +86,9 @@ private:
STATE_RUNNING,
STATE_PAUSED,
STATE_RESET_IN_PROGRESS,
+ STATE_STOPPED, // equivalent to PAUSED
+ STATE_STOPPED_AND_PREPARING, // equivalent to PAUSED, but seeking
+ STATE_STOPPED_AND_PREPARED, // equivalent to PAUSED, but seek complete
};
mutable Mutex mLock;
@@ -100,19 +104,22 @@ private:
bool mSetSurfaceInProgress;
int64_t mDurationUs;
int64_t mPositionUs;
- int64_t mNumFramesTotal;
- int64_t mNumFramesDropped;
+ bool mSeekInProgress;
// <<<
sp<ALooper> mLooper;
sp<NuPlayer> mPlayer;
+ sp<AudioSink> mAudioSink;
uint32_t mPlayerFlags;
bool mAtEOS;
+ bool mLooping;
+ bool mAutoLoop;
int64_t mStartupSeekTimeUs;
status_t prepare_l();
+ void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bf5271e..25225a8 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,14 +20,34 @@
#include "NuPlayerRenderer.h"
-#include "SoftwareRenderer.h"
-
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/AWakeLock.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+#include <VideoFrameScheduler.h>
+
+#include <inttypes.h>
namespace android {
+// Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
+// is closed to allow the audio DSP to power down.
+static const int64_t kOffloadPauseMaxUs = 10000000ll;
+
+// static
+const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
+ AUDIO_CHANNEL_NONE,
+ AUDIO_OUTPUT_FLAG_NONE,
+ AUDIO_FORMAT_INVALID,
+ 0, // mNumChannels
+ 0 // mSampleRate
+};
+
// static
const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
@@ -36,7 +56,6 @@ NuPlayer::Renderer::Renderer(
const sp<AMessage> &notify,
uint32_t flags)
: mAudioSink(sink),
- mSoftRenderer(NULL),
mNotify(notify),
mFlags(flags),
mNumFramesWritten(0),
@@ -44,28 +63,42 @@ NuPlayer::Renderer::Renderer(
mDrainVideoQueuePending(false),
mAudioQueueGeneration(0),
mVideoQueueGeneration(0),
+ mAudioFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
mAnchorTimeRealUs(-1),
- mFlushingAudio(false),
- mFlushingVideo(false),
+ mAnchorNumFramesWritten(-1),
+ mAnchorMaxMediaUs(-1),
+ mVideoLateByUs(0ll),
mHasAudio(false),
mHasVideo(false),
+ mPauseStartedTimeRealUs(-1),
+ mFlushingAudio(false),
+ mFlushingVideo(false),
+ mNotifyCompleteAudio(false),
+ mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
+ mPausePositionMediaTimeUs(-1),
+ mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
mAudioRenderingStartGeneration(0),
- mLastPositionUpdateUs(-1ll),
- mVideoLateByUs(0ll) {
-}
+ mAudioOffloadPauseTimeoutGeneration(0),
+ mAudioOffloadTornDown(false),
+ mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
+ mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
+ mTotalBuffersQueued(0),
+ mLastAudioBufferDrained(0),
+ mWakeLock(new AWakeLock()) {
-NuPlayer::Renderer::~Renderer() {
- delete mSoftRenderer;
}
-void NuPlayer::Renderer::setSoftRenderer(SoftwareRenderer *softRenderer) {
- delete mSoftRenderer;
- mSoftRenderer = softRenderer;
+NuPlayer::Renderer::~Renderer() {
+ if (offloadingAudio()) {
+ mAudioSink->stop();
+ mAudioSink->flush();
+ mAudioSink->close();
+ }
}
void NuPlayer::Renderer::queueBuffer(
@@ -88,14 +121,20 @@ void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
msg->post();
}
-void NuPlayer::Renderer::flush(bool audio) {
+void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
{
Mutex::Autolock autoLock(mFlushLock);
if (audio) {
- CHECK(!mFlushingAudio);
+ mNotifyCompleteAudio |= notifyComplete;
+ if (mFlushingAudio) {
+ return;
+ }
mFlushingAudio = true;
} else {
- CHECK(!mFlushingVideo);
+ mNotifyCompleteVideo |= notifyComplete;
+ if (mFlushingVideo) {
+ return;
+ }
mFlushingVideo = true;
}
}
@@ -106,13 +145,27 @@ void NuPlayer::Renderer::flush(bool audio) {
}
void NuPlayer::Renderer::signalTimeDiscontinuity() {
+ Mutex::Autolock autoLock(mLock);
// CHECK(mAudioQueue.empty());
// CHECK(mVideoQueue.empty());
- mAnchorTimeMediaUs = -1;
- mAnchorTimeRealUs = -1;
+ setAudioFirstAnchorTime(-1);
+ setAnchorTime(-1, -1);
+ setVideoLateByUs(0);
mSyncQueues = false;
}
+void NuPlayer::Renderer::signalAudioSinkChanged() {
+ (new AMessage(kWhatAudioSinkChanged, id()))->post();
+}
+
+void NuPlayer::Renderer::signalDisableOffloadAudio() {
+ (new AMessage(kWhatDisableOffloadAudio, id()))->post();
+}
+
+void NuPlayer::Renderer::signalEnableOffloadAudio() {
+ (new AMessage(kWhatEnableOffloadAudio, id()))->post();
+}
+
void NuPlayer::Renderer::pause() {
(new AMessage(kWhatPause, id()))->post();
}
@@ -121,8 +174,213 @@ void NuPlayer::Renderer::resume() {
(new AMessage(kWhatResume, id()))->post();
}
+void NuPlayer::Renderer::setVideoFrameRate(float fps) {
+ sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id());
+ msg->setFloat("frame-rate", fps);
+ msg->post();
+}
+
+// Called on any threads, except renderer's thread.
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
+ {
+ Mutex::Autolock autoLock(mLock);
+ int64_t currentPositionUs;
+ if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
+ *mediaUs = currentPositionUs;
+ return OK;
+ }
+ }
+ return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs());
+}
+
+// Called on only renderer's thread.
+status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) {
+ return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs());
+}
+
+// Called on only renderer's thread.
+// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's
+// thread, no need to acquire mLock.
+status_t NuPlayer::Renderer::getCurrentPositionOnLooper(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
+ int64_t currentPositionUs;
+ if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
+ *mediaUs = currentPositionUs;
+ return OK;
+ }
+ return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo);
+}
+
+// Called either with mLock acquired or on renderer's thread.
+bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) {
+ if (!mPaused || mPausePositionMediaTimeUs < 0ll) {
+ return false;
+ }
+ *mediaUs = mPausePositionMediaTimeUs;
+ return true;
+}
+
+// Called on any threads.
+status_t NuPlayer::Renderer::getCurrentPositionFromAnchor(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
+ Mutex::Autolock autoLock(mTimeLock);
+ if (!mHasAudio && !mHasVideo) {
+ return NO_INIT;
+ }
+
+ if (mAnchorTimeMediaUs < 0) {
+ return NO_INIT;
+ }
+
+ int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+
+ if (mPauseStartedTimeRealUs != -1) {
+ positionUs -= (nowUs - mPauseStartedTimeRealUs);
+ }
+
+ // limit position to the last queued media time (for video only stream
+ // position will be discrete as we don't know how long each frame lasts)
+ if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
+ if (positionUs > mAnchorMaxMediaUs) {
+ positionUs = mAnchorMaxMediaUs;
+ }
+ }
+
+ if (positionUs < mAudioFirstAnchorTimeMediaUs) {
+ positionUs = mAudioFirstAnchorTimeMediaUs;
+ }
+
+ *mediaUs = (positionUs <= 0) ? 0 : positionUs;
+ return OK;
+}
+
+void NuPlayer::Renderer::setHasMedia(bool audio) {
+ Mutex::Autolock autoLock(mTimeLock);
+ if (audio) {
+ mHasAudio = true;
+ } else {
+ mHasVideo = true;
+ }
+}
+
+void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mAudioFirstAnchorTimeMediaUs = mediaUs;
+}
+
+void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ if (mAudioFirstAnchorTimeMediaUs == -1) {
+ mAudioFirstAnchorTimeMediaUs = mediaUs;
+ }
+}
+
+void NuPlayer::Renderer::setAnchorTime(
+ int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mAnchorTimeMediaUs = mediaUs;
+ mAnchorTimeRealUs = realUs;
+ mAnchorNumFramesWritten = numFramesWritten;
+ if (resume) {
+ mPauseStartedTimeRealUs = -1;
+ }
+}
+
+void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mVideoLateByUs = lateUs;
+}
+
+int64_t NuPlayer::Renderer::getVideoLateByUs() {
+ Mutex::Autolock autoLock(mTimeLock);
+ return mVideoLateByUs;
+}
+
+void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
+ Mutex::Autolock autoLock(mTimeLock);
+ mPauseStartedTimeRealUs = realUs;
+}
+
+status_t NuPlayer::Renderer::openAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags,
+ bool *isOffloaded) {
+ sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id());
+ msg->setMessage("format", format);
+ msg->setInt32("offload-only", offloadOnly);
+ msg->setInt32("has-video", hasVideo);
+ msg->setInt32("flags", flags);
+
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+
+ int32_t err;
+ if (!response->findInt32("err", &err)) {
+ err = INVALID_OPERATION;
+ } else if (err == OK && isOffloaded != NULL) {
+ int32_t offload;
+ CHECK(response->findInt32("offload", &offload));
+ *isOffloaded = (offload != 0);
+ }
+ return err;
+}
+
+void NuPlayer::Renderer::closeAudioSink() {
+ sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id());
+
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+}
+
void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatOpenAudioSink:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+
+ int32_t offloadOnly;
+ CHECK(msg->findInt32("offload-only", &offloadOnly));
+
+ int32_t hasVideo;
+ CHECK(msg->findInt32("has-video", &hasVideo));
+
+ uint32_t flags;
+ CHECK(msg->findInt32("flags", (int32_t *)&flags));
+
+ status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->setInt32("offload", offloadingAudio());
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+
+ break;
+ }
+
+ case kWhatCloseAudioSink:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ onCloseAudioSink();
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStopAudioSink:
+ {
+ mAudioSink->stop();
+ break;
+ }
+
case kWhatDrainAudioQueue:
{
int32_t generation;
@@ -149,7 +407,10 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
// Let's give it more data after about half that time
// has elapsed.
- postDrainAudioQueue(delayUs / 2);
+ // kWhatDrainAudioQueue is used for non-offloading mode,
+ // and mLock is used only for offloading mode. Therefore,
+ // no need to acquire mLock here.
+ postDrainAudioQueue_l(delayUs / 2);
}
break;
}
@@ -166,7 +427,22 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
onDrainVideoQueue();
- postDrainVideoQueue();
+ Mutex::Autolock autoLock(mLock);
+ postDrainVideoQueue_l();
+ break;
+ }
+
+ case kWhatPostDrainVideoQueue:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation != mVideoQueueGeneration) {
+ break;
+ }
+
+ mDrainVideoQueuePending = false;
+ Mutex::Autolock autoLock(mLock);
+ postDrainVideoQueue_l();
break;
}
@@ -194,6 +470,18 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatDisableOffloadAudio:
+ {
+ onDisableOffloadAudio();
+ break;
+ }
+
+ case kWhatEnableOffloadAudio:
+ {
+ onEnableOffloadAudio();
+ break;
+ }
+
case kWhatPause:
{
onPause();
@@ -206,14 +494,42 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetVideoFrameRate:
+ {
+ float fps;
+ CHECK(msg->findFloat("frame-rate", &fps));
+ onSetVideoFrameRate(fps);
+ break;
+ }
+
+ case kWhatAudioOffloadTearDown:
+ {
+ onAudioOffloadTearDown(kDueToError);
+ break;
+ }
+
+ case kWhatAudioOffloadPauseTimeout:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation != mAudioOffloadPauseTimeoutGeneration) {
+ break;
+ }
+ ALOGV("Audio Offload tear down due to pause timeout.");
+ onAudioOffloadTearDown(kDueToTimeout);
+ mWakeLock->release();
+ break;
+ }
+
default:
TRESPASS();
break;
}
}
-void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
- if (mDrainAudioQueuePending || mSyncQueues || mPaused) {
+void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
+ if (mDrainAudioQueuePending || mSyncQueues || mPaused
+ || offloadingAudio()) {
return;
}
@@ -227,10 +543,6 @@ void NuPlayer::Renderer::postDrainAudioQueue(int64_t delayUs) {
msg->post(delayUs);
}
-void NuPlayer::Renderer::signalAudioSinkChanged() {
- (new AMessage(kWhatAudioSinkChanged, id()))->post();
-}
-
void NuPlayer::Renderer::prepareForMediaRenderingStart() {
mAudioRenderingStartGeneration = mAudioQueueGeneration;
mVideoRenderingStartGeneration = mVideoQueueGeneration;
@@ -248,6 +560,102 @@ void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
}
}
+// static
+size_t NuPlayer::Renderer::AudioSinkCallback(
+ MediaPlayerBase::AudioSink * /* audioSink */,
+ void *buffer,
+ size_t size,
+ void *cookie,
+ MediaPlayerBase::AudioSink::cb_event_t event) {
+ NuPlayer::Renderer *me = (NuPlayer::Renderer *)cookie;
+
+ switch (event) {
+ case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
+ {
+ return me->fillAudioBuffer(buffer, size);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
+ {
+ me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);
+ break;
+ }
+
+ case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
+ {
+ me->notifyAudioOffloadTearDown();
+ break;
+ }
+ }
+
+ return 0;
+}
+
+size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (!offloadingAudio() || mPaused) {
+ return 0;
+ }
+
+ bool hasEOS = false;
+
+ size_t sizeCopied = 0;
+ bool firstEntry = true;
+ while (sizeCopied < size && !mAudioQueue.empty()) {
+ QueueEntry *entry = &*mAudioQueue.begin();
+
+ if (entry->mBuffer == NULL) { // EOS
+ hasEOS = true;
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ break;
+ }
+
+ if (firstEntry && entry->mOffset == 0) {
+ firstEntry = false;
+ int64_t mediaTimeUs;
+ CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
+ ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
+ setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ }
+
+ size_t copy = entry->mBuffer->size() - entry->mOffset;
+ size_t sizeRemaining = size - sizeCopied;
+ if (copy > sizeRemaining) {
+ copy = sizeRemaining;
+ }
+
+ memcpy((char *)buffer + sizeCopied,
+ entry->mBuffer->data() + entry->mOffset,
+ copy);
+
+ entry->mOffset += copy;
+ if (entry->mOffset == entry->mBuffer->size()) {
+ entry->mNotifyConsumed->post();
+ mAudioQueue.erase(mAudioQueue.begin());
+ entry = NULL;
+ }
+ sizeCopied += copy;
+ notifyIfMediaRenderingStarted();
+ }
+
+ if (mAudioFirstAnchorTimeMediaUs >= 0) {
+ int64_t nowUs = ALooper::GetNowUs();
+ setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
+ }
+
+ // we don't know how much data we are queueing for offloaded tracks
+ mAnchorMaxMediaUs = -1;
+
+ if (hasEOS) {
+ (new AMessage(kWhatStopAudioSink, id()))->post();
+ }
+
+ return sizeCopied;
+}
+
bool NuPlayer::Renderer::onDrainAudioQueue() {
uint32_t numFramesPlayed;
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
@@ -272,39 +680,33 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) {
QueueEntry *entry = &*mAudioQueue.begin();
+ mLastAudioBufferDrained = entry->mBufferOrdinal;
+
if (entry->mBuffer == NULL) {
// EOS
-
- notifyEOS(true /* audio */, entry->mFinalResult);
+ int64_t postEOSDelayUs = 0;
+ if (mAudioSink->needsTrailingPadding()) {
+ postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
+ }
+ notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
mAudioQueue.erase(mAudioQueue.begin());
entry = NULL;
+ if (mAudioSink->needsTrailingPadding()) {
+ // If we're not in gapless playback (i.e. through setNextPlayer), we
+ // need to stop the track here, because that will play out the last
+ // little bit at the end of the file. Otherwise short files won't play.
+ mAudioSink->stop();
+ mNumFramesWritten = 0;
+ }
return false;
}
if (entry->mOffset == 0) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
-
- mAnchorTimeMediaUs = mediaTimeUs;
-
- uint32_t numFramesPlayed;
- CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK);
-
- uint32_t numFramesPendingPlayout =
- mNumFramesWritten - numFramesPlayed;
-
- int64_t realTimeOffsetUs =
- (mAudioSink->latency() / 2 /* XXX */
- + numFramesPendingPlayout
- * mAudioSink->msecsPerFrame()) * 1000ll;
-
- // ALOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs);
-
- mAnchorTimeRealUs =
- ALooper::GetNowUs() + realTimeOffsetUs;
+ onNewAudioMediaTime(mediaTimeUs);
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -312,11 +714,14 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
copy = numBytesAvailableToWrite;
}
- CHECK_EQ(mAudioSink->write(
- entry->mBuffer->data() + entry->mOffset, copy),
- (ssize_t)copy);
+ ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy);
+ if (written < 0) {
+ // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
+ ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ break;
+ }
- entry->mOffset += copy;
+ entry->mOffset += written;
if (entry->mOffset == entry->mBuffer->size()) {
entry->mNotifyConsumed->post();
mAudioQueue.erase(mAudioQueue.begin());
@@ -324,20 +729,74 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
entry = NULL;
}
- numBytesAvailableToWrite -= copy;
- size_t copiedFrames = copy / mAudioSink->frameSize();
+ numBytesAvailableToWrite -= written;
+ size_t copiedFrames = written / mAudioSink->frameSize();
mNumFramesWritten += copiedFrames;
notifyIfMediaRenderingStarted();
- }
- notifyPosition();
+ if (written != (ssize_t)copy) {
+ // A short count was received from AudioSink::write()
+ //
+ // AudioSink write should block until exactly the number of bytes are delivered.
+ // But it may return with a short count (without an error) when:
+ //
+ // 1) Size to be copied is not a multiple of the frame size. We consider this fatal.
+ // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
+
+ // (Case 1)
+ // Must be a multiple of the frame size. If it is not a multiple of a frame size, it
+ // needs to fail, as we should not carry over fractional frames between calls.
+ CHECK_EQ(copy % mAudioSink->frameSize(), 0);
+
+ // (Case 2)
+ // Return early to the caller.
+ // Beware of calling immediately again as this may busy-loop if you are not careful.
+ ALOGW("AudioSink write short frame count %zd < %zu", written, copy);
+ break;
+ }
+ }
+ mAnchorMaxMediaUs =
+ mAnchorTimeMediaUs +
+ (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+ * 1000LL * mAudioSink->msecsPerFrame());
return !mAudioQueue.empty();
}
-void NuPlayer::Renderer::postDrainVideoQueue() {
- if (mDrainVideoQueuePending || mSyncQueues || mPaused) {
+int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
+ int64_t writtenAudioDurationUs =
+ mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame();
+ return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);
+}
+
+int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
+ int64_t currentPositionUs;
+ if (mPaused || getCurrentPositionOnLooper(
+ &currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
+ // If failed to get current position, e.g. due to audio clock is not ready, then just
+ // play out video immediately without delay.
+ return nowUs;
+ }
+ return (mediaTimeUs - currentPositionUs) + nowUs;
+}
+
+void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+ // TRICKY: vorbis decoder generates multiple frames with the same
+ // timestamp, so only update on the first frame with a given timestamp
+ if (mediaTimeUs == mAnchorTimeMediaUs) {
+ return;
+ }
+ setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ int64_t nowUs = ALooper::GetNowUs();
+ setAnchorTime(
+ mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
+}
+
+void NuPlayer::Renderer::postDrainVideoQueue_l() {
+ if (mDrainVideoQueuePending
+ || mSyncQueues
+ || (mPaused && mVideoSampleReceived)) {
return;
}
@@ -350,36 +809,64 @@ void NuPlayer::Renderer::postDrainVideoQueue() {
sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
msg->setInt32("generation", mVideoQueueGeneration);
- int64_t delayUs;
-
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
- delayUs = 0;
- } else if (mFlags & FLAG_REAL_TIME) {
+ msg->post();
+ mDrainVideoQueuePending = true;
+ return;
+ }
+
+ int64_t delayUs;
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t realTimeUs;
+ if (mFlags & FLAG_REAL_TIME) {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
-
- delayUs = mediaTimeUs - ALooper::GetNowUs();
+ realTimeUs = mediaTimeUs;
} else {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
if (mAnchorTimeMediaUs < 0) {
- delayUs = 0;
-
- if (!mHasAudio) {
- mAnchorTimeMediaUs = mediaTimeUs;
- mAnchorTimeRealUs = ALooper::GetNowUs();
- }
+ setAnchorTime(mediaTimeUs, nowUs);
+ mPausePositionMediaTimeUs = mediaTimeUs;
+ mAnchorMaxMediaUs = mediaTimeUs;
+ realTimeUs = nowUs;
} else {
- int64_t realTimeUs =
- (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs;
+ realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+ }
+ if (!mHasAudio) {
+ mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
+ }
- delayUs = realTimeUs - ALooper::GetNowUs();
+ // Heuristics to handle situation when media time changed without a
+ // discontinuity. If we have not drained an audio buffer that was
+ // received after this buffer, repost in 10 msec. Otherwise repost
+ // in 500 msec.
+ delayUs = realTimeUs - nowUs;
+ if (delayUs > 500000) {
+ int64_t postDelayUs = 500000;
+ if (mHasAudio && (mLastAudioBufferDrained - entry.mBufferOrdinal) <= 0) {
+ postDelayUs = 10000;
+ }
+ msg->setWhat(kWhatPostDrainVideoQueue);
+ msg->post(postDelayUs);
+ mVideoScheduler->restart();
+ ALOGI("possible video time jump of %dms, retrying in %dms",
+ (int)(delayUs / 1000), (int)(postDelayUs / 1000));
+ mDrainVideoQueuePending = true;
+ return;
}
}
- msg->post(delayUs);
+ realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
+ int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
+
+ delayUs = realTimeUs - nowUs;
+
+ ALOGW_IF(delayUs > 500000, "unusually high delayUs: %" PRId64, delayUs);
+ // post 2 display refreshes before rendering is due
+ msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
mDrainVideoQueuePending = true;
}
@@ -399,12 +886,11 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
- mVideoLateByUs = 0ll;
-
- notifyPosition();
+ setVideoLateByUs(0);
return;
}
+ int64_t nowUs = -1;
int64_t realTimeUs;
if (mFlags & FLAG_REAL_TIME) {
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
@@ -412,37 +898,51 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
- realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs;
+ nowUs = ALooper::GetNowUs();
+ realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
}
- mVideoLateByUs = ALooper::GetNowUs() - realTimeUs;
- bool tooLate = (mVideoLateByUs > 40000);
+ bool tooLate = false;
+
+ if (!mPaused) {
+ if (nowUs == -1) {
+ nowUs = ALooper::GetNowUs();
+ }
+ setVideoLateByUs(nowUs - realTimeUs);
+ tooLate = (mVideoLateByUs > 40000);
- if (tooLate) {
- ALOGV("video late by %lld us (%.2f secs)",
- mVideoLateByUs, mVideoLateByUs / 1E6);
+ if (tooLate) {
+ ALOGV("video late by %lld us (%.2f secs)",
+ mVideoLateByUs, mVideoLateByUs / 1E6);
+ } else {
+ ALOGV("rendering video at media time %.2f secs",
+ (mFlags & FLAG_REAL_TIME ? realTimeUs :
+ (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
+ }
} else {
- ALOGV("rendering video at media time %.2f secs",
- (mFlags & FLAG_REAL_TIME ? realTimeUs :
- (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
- if (mSoftRenderer != NULL) {
- mSoftRenderer->render(entry->mBuffer->data(), entry->mBuffer->size(), NULL);
+ setVideoLateByUs(0);
+ if (!mVideoSampleReceived && !mHasAudio) {
+ // This will ensure that the first frame after a flush won't be used as anchor
+ // when renderer is in paused state, because resume can happen any time after seek.
+ setAnchorTime(-1, -1);
}
}
+ entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
entry->mNotifyConsumed->setInt32("render", !tooLate);
entry->mNotifyConsumed->post();
mVideoQueue.erase(mVideoQueue.begin());
entry = NULL;
- if (!mVideoRenderingStarted) {
- mVideoRenderingStarted = true;
- notifyVideoRenderingStart();
- }
-
- notifyIfMediaRenderingStarted();
+ mVideoSampleReceived = true;
- notifyPosition();
+ if (!mPaused) {
+ if (!mVideoRenderingStarted) {
+ mVideoRenderingStarted = true;
+ notifyVideoRenderingStart();
+ }
+ notifyIfMediaRenderingStarted();
+ }
}
void NuPlayer::Renderer::notifyVideoRenderingStart() {
@@ -451,22 +951,29 @@ void NuPlayer::Renderer::notifyVideoRenderingStart() {
notify->post();
}
-void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult) {
+void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatEOS);
notify->setInt32("audio", static_cast<int32_t>(audio));
notify->setInt32("finalResult", finalResult);
- notify->post();
+ notify->post(delayUs);
+}
+
+void NuPlayer::Renderer::notifyAudioOffloadTearDown() {
+ (new AMessage(kWhatAudioOffloadTearDown, id()))->post();
}
void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
- if (audio) {
- mHasAudio = true;
- } else {
- mHasVideo = true;
+ setHasMedia(audio);
+
+ if (mHasVideo) {
+ if (mVideoScheduler == NULL) {
+ mVideoScheduler = new VideoFrameScheduler();
+ mVideoScheduler->init();
+ }
}
if (dropBufferWhileFlushing(audio, msg)) {
@@ -484,13 +991,15 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
entry.mNotifyConsumed = notifyConsumed;
entry.mOffset = 0;
entry.mFinalResult = OK;
+ entry.mBufferOrdinal = ++mTotalBuffersQueued;
+ Mutex::Autolock autoLock(mLock);
if (audio) {
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
mVideoQueue.push_back(entry);
- postDrainVideoQueue();
+ postDrainVideoQueue_l();
}
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
@@ -502,7 +1011,7 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
// EOS signalled on either queue.
- syncQueuesDone();
+ syncQueuesDone_l();
return;
}
@@ -526,10 +1035,10 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
return;
}
- syncQueuesDone();
+ syncQueuesDone_l();
}
-void NuPlayer::Renderer::syncQueuesDone() {
+void NuPlayer::Renderer::syncQueuesDone_l() {
if (!mSyncQueues) {
return;
}
@@ -537,11 +1046,11 @@ void NuPlayer::Renderer::syncQueuesDone() {
mSyncQueues = false;
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
- postDrainVideoQueue();
+ postDrainVideoQueue_l();
}
}
@@ -560,25 +1069,39 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
entry.mOffset = 0;
entry.mFinalResult = finalResult;
+ Mutex::Autolock autoLock(mLock);
if (audio) {
if (mAudioQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ syncQueuesDone_l();
}
mAudioQueue.push_back(entry);
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
} else {
if (mVideoQueue.empty() && mSyncQueues) {
- syncQueuesDone();
+ syncQueuesDone_l();
}
mVideoQueue.push_back(entry);
- postDrainVideoQueue();
+ postDrainVideoQueue_l();
}
}
void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
- int32_t audio;
+ int32_t audio, notifyComplete;
CHECK(msg->findInt32("audio", &audio));
+ {
+ Mutex::Autolock autoLock(mFlushLock);
+ if (audio) {
+ mFlushingAudio = false;
+ notifyComplete = mNotifyCompleteAudio;
+ mNotifyCompleteAudio = false;
+ } else {
+ mFlushingVideo = false;
+ notifyComplete = mNotifyCompleteVideo;
+ mNotifyCompleteVideo = false;
+ }
+ }
+
// If we're currently syncing the queues, i.e. dropping audio while
// aligning the first audio/video buffer times and only one of the
// two queues has data, we may starve that queue by not requesting
@@ -587,32 +1110,52 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
// corresponding discontinuity on the other queue.
// Therefore we'll stop syncing the queues if at least one of them
// is flushed.
- syncQueuesDone();
+ {
+ Mutex::Autolock autoLock(mLock);
+ syncQueuesDone_l();
+ setPauseStartedTimeRealUs(-1);
+ setAnchorTime(-1, -1);
+ }
ALOGV("flushing %s", audio ? "audio" : "video");
if (audio) {
- flushQueue(&mAudioQueue);
+ {
+ Mutex::Autolock autoLock(mLock);
+ flushQueue(&mAudioQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingAudio = false;
+ ++mAudioQueueGeneration;
+ prepareForMediaRenderingStart();
+
+ if (offloadingAudio()) {
+ setAudioFirstAnchorTime(-1);
+ }
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
- prepareForMediaRenderingStart();
+ if (offloadingAudio()) {
+ mAudioSink->pause();
+ mAudioSink->flush();
+ mAudioSink->start();
+ }
} else {
flushQueue(&mVideoQueue);
- Mutex::Autolock autoLock(mFlushLock);
- mFlushingVideo = false;
-
mDrainVideoQueuePending = false;
++mVideoQueueGeneration;
+ if (mVideoScheduler != NULL) {
+ mVideoScheduler->restart();
+ }
+
prepareForMediaRenderingStart();
}
- notifyFlushComplete(audio);
+ mVideoSampleReceived = false;
+
+ if (notifyComplete) {
+ notifyFlushComplete(audio);
+ }
}
void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
@@ -661,55 +1204,67 @@ bool NuPlayer::Renderer::dropBufferWhileFlushing(
}
void NuPlayer::Renderer::onAudioSinkChanged() {
+ if (offloadingAudio()) {
+ return;
+ }
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
+ mAnchorNumFramesWritten = -1;
uint32_t written;
if (mAudioSink->getFramesWritten(&written) == OK) {
mNumFramesWritten = written;
}
}
-void NuPlayer::Renderer::notifyPosition() {
- if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) {
- return;
- }
-
- int64_t nowUs = ALooper::GetNowUs();
-
- if (mLastPositionUpdateUs >= 0
- && nowUs < mLastPositionUpdateUs + kMinPositionUpdateDelayUs) {
- return;
- }
- mLastPositionUpdateUs = nowUs;
-
- int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
+void NuPlayer::Renderer::onDisableOffloadAudio() {
+ Mutex::Autolock autoLock(mLock);
+ mFlags &= ~FLAG_OFFLOAD_AUDIO;
+ ++mAudioQueueGeneration;
+}
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatPosition);
- notify->setInt64("positionUs", positionUs);
- notify->setInt64("videoLateByUs", mVideoLateByUs);
- notify->post();
+void NuPlayer::Renderer::onEnableOffloadAudio() {
+ Mutex::Autolock autoLock(mLock);
+ mFlags |= FLAG_OFFLOAD_AUDIO;
+ ++mAudioQueueGeneration;
}
void NuPlayer::Renderer::onPause() {
- CHECK(!mPaused);
+ if (mPaused) {
+ ALOGW("Renderer::onPause() called while already paused!");
+ return;
+ }
+ int64_t currentPositionUs;
+ int64_t pausePositionMediaTimeUs;
+ if (getCurrentPositionFromAnchor(
+ &currentPositionUs, ALooper::GetNowUs()) == OK) {
+ pausePositionMediaTimeUs = currentPositionUs;
+ } else {
+ // Set paused position to -1 (unavailabe) if we don't have anchor time
+ // This could happen if client does a seekTo() immediately followed by
+ // pause(). Renderer will be flushed with anchor time cleared. We don't
+ // want to leave stale value in mPausePositionMediaTimeUs.
+ pausePositionMediaTimeUs = -1;
+ }
+ {
+ Mutex::Autolock autoLock(mLock);
+ mPausePositionMediaTimeUs = pausePositionMediaTimeUs;
+ ++mAudioQueueGeneration;
+ ++mVideoQueueGeneration;
+ prepareForMediaRenderingStart();
+ mPaused = true;
+ setPauseStartedTimeRealUs(ALooper::GetNowUs());
+ }
mDrainAudioQueuePending = false;
- ++mAudioQueueGeneration;
-
mDrainVideoQueuePending = false;
- ++mVideoQueueGeneration;
-
- prepareForMediaRenderingStart();
if (mHasAudio) {
mAudioSink->pause();
+ startAudioOffloadPauseTimeout();
}
ALOGV("now paused audio queue has %d entries, video has %d entries",
mAudioQueue.size(), mVideoQueue.size());
-
- mPaused = true;
}
void NuPlayer::Renderer::onResume() {
@@ -718,19 +1273,291 @@ void NuPlayer::Renderer::onResume() {
}
if (mHasAudio) {
+ cancelAudioOffloadPauseTimeout();
mAudioSink->start();
}
+ Mutex::Autolock autoLock(mLock);
mPaused = false;
+ if (mPauseStartedTimeRealUs != -1) {
+ int64_t newAnchorRealUs =
+ mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
+ setAnchorTime(
+ mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
+ }
if (!mAudioQueue.empty()) {
- postDrainAudioQueue();
+ postDrainAudioQueue_l();
}
if (!mVideoQueue.empty()) {
- postDrainVideoQueue();
+ postDrainVideoQueue_l();
}
}
+void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
+ if (mVideoScheduler == NULL) {
+ mVideoScheduler = new VideoFrameScheduler();
+ }
+ mVideoScheduler->init(fps);
+}
+
+// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
+// as it acquires locks and may query the audio driver.
+//
+// Some calls could conceivably retrieve extrapolated data instead of
+// accessing getTimestamp() or getPosition() every time a data buffer with
+// a media time is received.
+//
+int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {
+ uint32_t numFramesPlayed;
+ int64_t numFramesPlayedAt;
+ AudioTimestamp ts;
+ static const int64_t kStaleTimestamp100ms = 100000;
+
+ status_t res = mAudioSink->getTimestamp(ts);
+ if (res == OK) { // case 1: mixing audio tracks and offloaded tracks.
+ numFramesPlayed = ts.mPosition;
+ numFramesPlayedAt =
+ ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+ const int64_t timestampAge = nowUs - numFramesPlayedAt;
+ if (timestampAge > kStaleTimestamp100ms) {
+ // This is an audio FIXME.
+ // getTimestamp returns a timestamp which may come from audio mixing threads.
+ // After pausing, the MixerThread may go idle, thus the mTime estimate may
+ // become stale. Assuming that the MixerThread runs 20ms, with FastMixer at 5ms,
+ // the max latency should be about 25ms with an average around 12ms (to be verified).
+ // For safety we use 100ms.
+ ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) numFramesPlayedAt(%lld)",
+ (long long)nowUs, (long long)numFramesPlayedAt);
+ numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
+ }
+ //ALOGD("getTimestamp: OK %d %lld", numFramesPlayed, (long long)numFramesPlayedAt);
+ } else if (res == WOULD_BLOCK) { // case 2: transitory state on start of a new track
+ numFramesPlayed = 0;
+ numFramesPlayedAt = nowUs;
+ //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else { // case 3: transitory at new track or audio fast tracks.
+ res = mAudioSink->getPosition(&numFramesPlayed);
+ CHECK_EQ(res, (status_t)OK);
+ numFramesPlayedAt = nowUs;
+ numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ }
+
+ // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
+ //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test
+ int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame())
+ + nowUs - numFramesPlayedAt;
+ if (durationUs < 0) {
+ // Occurs when numFramesPlayed position is very small and the following:
+ // (1) In case 1, the time nowUs is computed before getTimestamp() is called and
+ // numFramesPlayedAt is greater than nowUs by time more than numFramesPlayed.
+ // (2) In case 3, using getPosition and adding mAudioSink->latency() to
+ // numFramesPlayedAt, by a time amount greater than numFramesPlayed.
+ //
+ // Both of these are transitory conditions.
+ ALOGV("getPlayedOutAudioDurationUs: negative duration %lld set to zero", (long long)durationUs);
+ durationUs = 0;
+ }
+ ALOGV("getPlayedOutAudioDurationUs(%lld) nowUs(%lld) frames(%u) framesAt(%lld)",
+ (long long)durationUs, (long long)nowUs, numFramesPlayed, (long long)numFramesPlayedAt);
+ return durationUs;
+}
+
+void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) {
+ if (mAudioOffloadTornDown) {
+ return;
+ }
+ mAudioOffloadTornDown = true;
+
+ int64_t currentPositionUs;
+ if (getCurrentPositionOnLooper(&currentPositionUs) != OK) {
+ currentPositionUs = 0;
+ }
+
+ mAudioSink->stop();
+ mAudioSink->flush();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatAudioOffloadTearDown);
+ notify->setInt64("positionUs", currentPositionUs);
+ notify->setInt32("reason", reason);
+ notify->post();
+}
+
+void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
+ if (offloadingAudio()) {
+ mWakeLock->acquire();
+ sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
+ msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
+ msg->post(kOffloadPauseMaxUs);
+ }
+}
+
+void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
+ if (offloadingAudio()) {
+ mWakeLock->release(true);
+ ++mAudioOffloadPauseTimeoutGeneration;
+ }
+}
+
+status_t NuPlayer::Renderer::onOpenAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags) {
+ ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
+ offloadOnly, offloadingAudio());
+ bool audioSinkChanged = false;
+
+ int32_t numChannels;
+ CHECK(format->findInt32("channel-count", &numChannels));
+
+ int32_t channelMask;
+ if (!format->findInt32("channel-mask", &channelMask)) {
+ // signal to the AudioSink to derive the mask from count.
+ channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
+ }
+
+ int32_t sampleRate;
+ CHECK(format->findInt32("sample-rate", &sampleRate));
+
+ if (offloadingAudio()) {
+ audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT;
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+
+ if (err != OK) {
+ ALOGE("Couldn't map mime \"%s\" to a valid "
+ "audio_format", mime.c_str());
+ onDisableOffloadAudio();
+ } else {
+ ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
+ mime.c_str(), audioFormat);
+
+ int avgBitRate = -1;
+ format->findInt32("bit-rate", &avgBitRate);
+
+ int32_t aacProfile = -1;
+ if (audioFormat == AUDIO_FORMAT_AAC
+ && format->findInt32("aac-profile", &aacProfile)) {
+ // Redefine AAC format as per aac profile
+ mapAACProfileToAudioFormat(
+ audioFormat,
+ aacProfile);
+ }
+
+ audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
+ offloadInfo.duration_us = -1;
+ format->findInt64(
+ "durationUs", &offloadInfo.duration_us);
+ offloadInfo.sample_rate = sampleRate;
+ offloadInfo.channel_mask = channelMask;
+ offloadInfo.format = audioFormat;
+ offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
+ offloadInfo.bit_rate = avgBitRate;
+ offloadInfo.has_video = hasVideo;
+ offloadInfo.is_streaming = true;
+
+ if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
+ ALOGV("openAudioSink: no change in offload mode");
+ // no change from previous configuration, everything ok.
+ return OK;
+ }
+ mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+
+ ALOGV("openAudioSink: try to open AudioSink in offload mode");
+ uint32_t offloadFlags = flags;
+ offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+ offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
+ audioSinkChanged = true;
+ mAudioSink->close();
+ err = mAudioSink->open(
+ sampleRate,
+ numChannels,
+ (audio_channel_mask_t)channelMask,
+ audioFormat,
+ 8 /* bufferCount */,
+ &NuPlayer::Renderer::AudioSinkCallback,
+ this,
+ (audio_output_flags_t)offloadFlags,
+ &offloadInfo);
+
+ if (err == OK) {
+ // If the playback is offloaded to h/w, we pass
+ // the HAL some metadata information.
+ // We don't want to do this for PCM because it
+ // will be going through the AudioFlinger mixer
+ // before reaching the hardware.
+ // TODO
+ mCurrentOffloadInfo = offloadInfo;
+ err = mAudioSink->start();
+ ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
+ }
+ if (err != OK) {
+ // Clean up, fall back to non offload mode.
+ mAudioSink->close();
+ onDisableOffloadAudio();
+ mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+ ALOGV("openAudioSink: offload failed");
+ }
+ }
+ }
+ if (!offloadOnly && !offloadingAudio()) {
+ ALOGV("openAudioSink: open AudioSink in NON-offload mode");
+ uint32_t pcmFlags = flags;
+ pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
+
+ const PcmInfo info = {
+ (audio_channel_mask_t)channelMask,
+ (audio_output_flags_t)pcmFlags,
+ AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
+ numChannels,
+ sampleRate
+ };
+ if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
+ ALOGV("openAudioSink: no change in pcm mode");
+ // no change from previous configuration, everything ok.
+ return OK;
+ }
+
+ audioSinkChanged = true;
+ mAudioSink->close();
+ mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+ status_t err = mAudioSink->open(
+ sampleRate,
+ numChannels,
+ (audio_channel_mask_t)channelMask,
+ AUDIO_FORMAT_PCM_16_BIT,
+ 8 /* bufferCount */,
+ NULL,
+ NULL,
+ (audio_output_flags_t)pcmFlags);
+ if (err != OK) {
+ ALOGW("openAudioSink: non offloaded open failed status: %d", err);
+ mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+ return err;
+ }
+ mCurrentPcmInfo = info;
+ mAudioSink->start();
+ }
+ if (audioSinkChanged) {
+ onAudioSinkChanged();
+ }
+ if (offloadingAudio()) {
+ mAudioOffloadTornDown = false;
+ }
+ return OK;
+}
+
+void NuPlayer::Renderer::onCloseAudioSink() {
+ mAudioSink->close();
+ mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
+ mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 9124e03..003d1d0 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -23,16 +23,23 @@
namespace android {
struct ABuffer;
-class SoftwareRenderer;
+class AWakeLock;
+struct VideoFrameScheduler;
struct NuPlayer::Renderer : public AHandler {
enum Flags {
FLAG_REAL_TIME = 1,
+ FLAG_OFFLOAD_AUDIO = 2,
};
Renderer(const sp<MediaPlayerBase::AudioSink> &sink,
const sp<AMessage> &notify,
uint32_t flags = 0);
+ static size_t AudioSinkCallback(
+ MediaPlayerBase::AudioSink *audioSink,
+ void *data, size_t size, void *me,
+ MediaPlayerBase::AudioSink::cb_event_t event);
+
void queueBuffer(
bool audio,
const sp<ABuffer> &buffer,
@@ -40,24 +47,53 @@ struct NuPlayer::Renderer : public AHandler {
void queueEOS(bool audio, status_t finalResult);
- void flush(bool audio);
+ void flush(bool audio, bool notifyComplete);
void signalTimeDiscontinuity();
void signalAudioSinkChanged();
+ void signalDisableOffloadAudio();
+ void signalEnableOffloadAudio();
+
void pause();
void resume();
+ void setVideoFrameRate(float fps);
+
+ // Following setters and getters are protected by mTimeLock.
+ status_t getCurrentPosition(int64_t *mediaUs);
+ void setHasMedia(bool audio);
+ void setAudioFirstAnchorTime(int64_t mediaUs);
+ void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
+ void setAnchorTime(
+ int64_t mediaUs, int64_t realUs, int64_t numFramesWritten = -1, bool resume = false);
+ void setVideoLateByUs(int64_t lateUs);
+ int64_t getVideoLateByUs();
+ void setPauseStartedTimeRealUs(int64_t realUs);
+
+ status_t openAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags,
+ bool *isOffloaded);
+ void closeAudioSink();
+
enum {
kWhatEOS = 'eos ',
kWhatFlushComplete = 'fluC',
kWhatPosition = 'posi',
kWhatVideoRenderingStart = 'vdrd',
kWhatMediaRenderingStart = 'mdrd',
+ kWhatAudioOffloadTearDown = 'aOTD',
+ kWhatAudioOffloadPauseTimeout = 'aOPT',
};
- void setSoftRenderer(SoftwareRenderer *softRenderer);
+ enum AudioOffloadTearDownReason {
+ kDueToError = 0,
+ kDueToTimeout,
+ };
protected:
virtual ~Renderer();
@@ -66,14 +102,21 @@ protected:
private:
enum {
- kWhatDrainAudioQueue = 'draA',
- kWhatDrainVideoQueue = 'draV',
- kWhatQueueBuffer = 'queB',
- kWhatQueueEOS = 'qEOS',
- kWhatFlush = 'flus',
- kWhatAudioSinkChanged = 'auSC',
- kWhatPause = 'paus',
- kWhatResume = 'resm',
+ kWhatDrainAudioQueue = 'draA',
+ kWhatDrainVideoQueue = 'draV',
+ kWhatPostDrainVideoQueue = 'pDVQ',
+ kWhatQueueBuffer = 'queB',
+ kWhatQueueEOS = 'qEOS',
+ kWhatFlush = 'flus',
+ kWhatAudioSinkChanged = 'auSC',
+ kWhatPause = 'paus',
+ kWhatResume = 'resm',
+ kWhatOpenAudioSink = 'opnA',
+ kWhatCloseAudioSink = 'clsA',
+ kWhatStopAudioSink = 'stpA',
+ kWhatDisableOffloadAudio = 'noOA',
+ kWhatEnableOffloadAudio = 'enOA',
+ kWhatSetVideoFrameRate = 'sVFR',
};
struct QueueEntry {
@@ -81,47 +124,98 @@ private:
sp<AMessage> mNotifyConsumed;
size_t mOffset;
status_t mFinalResult;
+ int32_t mBufferOrdinal;
};
static const int64_t kMinPositionUpdateDelayUs;
sp<MediaPlayerBase::AudioSink> mAudioSink;
- SoftwareRenderer *mSoftRenderer;
sp<AMessage> mNotify;
+ Mutex mLock;
uint32_t mFlags;
List<QueueEntry> mAudioQueue;
List<QueueEntry> mVideoQueue;
uint32_t mNumFramesWritten;
+ sp<VideoFrameScheduler> mVideoScheduler;
bool mDrainAudioQueuePending;
bool mDrainVideoQueuePending;
int32_t mAudioQueueGeneration;
int32_t mVideoQueueGeneration;
+ Mutex mTimeLock;
+ // |mTimeLock| protects the following 7 member vars that are related to time.
+ // Note: those members are only written on Renderer thread, so reading on Renderer thread
+ // doesn't need to be protected. Otherwise accessing those members must be protected by
+ // |mTimeLock|.
+ // TODO: move those members to a seperated media clock class.
+ int64_t mAudioFirstAnchorTimeMediaUs;
int64_t mAnchorTimeMediaUs;
int64_t mAnchorTimeRealUs;
+ int64_t mAnchorNumFramesWritten;
+ int64_t mAnchorMaxMediaUs;
+ int64_t mVideoLateByUs;
+ bool mHasAudio;
+ bool mHasVideo;
+ int64_t mPauseStartedTimeRealUs;
Mutex mFlushLock; // protects the following 2 member vars.
bool mFlushingAudio;
bool mFlushingVideo;
+ bool mNotifyCompleteAudio;
+ bool mNotifyCompleteVideo;
- bool mHasAudio;
- bool mHasVideo;
bool mSyncQueues;
+ // modified on only renderer's thread.
bool mPaused;
+ int64_t mPausePositionMediaTimeUs;
+
+ bool mVideoSampleReceived;
bool mVideoRenderingStarted;
int32_t mVideoRenderingStartGeneration;
int32_t mAudioRenderingStartGeneration;
int64_t mLastPositionUpdateUs;
- int64_t mVideoLateByUs;
+
+ int32_t mAudioOffloadPauseTimeoutGeneration;
+ bool mAudioOffloadTornDown;
+ audio_offload_info_t mCurrentOffloadInfo;
+
+ struct PcmInfo {
+ audio_channel_mask_t mChannelMask;
+ audio_output_flags_t mFlags;
+ audio_format_t mFormat;
+ int32_t mNumChannels;
+ int32_t mSampleRate;
+ };
+ PcmInfo mCurrentPcmInfo;
+ static const PcmInfo AUDIO_PCMINFO_INITIALIZER;
+
+ int32_t mTotalBuffersQueued;
+ int32_t mLastAudioBufferDrained;
+
+ sp<AWakeLock> mWakeLock;
+
+ status_t getCurrentPositionOnLooper(int64_t *mediaUs);
+ status_t getCurrentPositionOnLooper(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+ bool getCurrentPositionIfPaused_l(int64_t *mediaUs);
+ status_t getCurrentPositionFromAnchor(
+ int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
+
+ size_t fillAudioBuffer(void *buffer, size_t size);
bool onDrainAudioQueue();
- void postDrainAudioQueue(int64_t delayUs = 0);
+ int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs);
+ int64_t getPlayedOutAudioDurationUs(int64_t nowUs);
+ void postDrainAudioQueue_l(int64_t delayUs = 0);
+
+ void onNewAudioMediaTime(int64_t mediaTimeUs);
+ int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);
void onDrainVideoQueue();
- void postDrainVideoQueue();
+ void postDrainVideoQueue_l();
void prepareForMediaRenderingStart();
void notifyIfMediaRenderingStarted();
@@ -130,18 +224,34 @@ private:
void onQueueEOS(const sp<AMessage> &msg);
void onFlush(const sp<AMessage> &msg);
void onAudioSinkChanged();
+ void onDisableOffloadAudio();
+ void onEnableOffloadAudio();
void onPause();
void onResume();
-
- void notifyEOS(bool audio, status_t finalResult);
+ void onSetVideoFrameRate(float fps);
+ void onAudioOffloadTearDown(AudioOffloadTearDownReason reason);
+ status_t onOpenAudioSink(
+ const sp<AMessage> &format,
+ bool offloadOnly,
+ bool hasVideo,
+ uint32_t flags);
+ void onCloseAudioSink();
+
+ void notifyEOS(bool audio, status_t finalResult, int64_t delayUs = 0);
void notifyFlushComplete(bool audio);
void notifyPosition();
void notifyVideoLateBy(int64_t lateByUs);
void notifyVideoRenderingStart();
+ void notifyAudioOffloadTearDown();
void flushQueue(List<QueueEntry> *queue);
bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg);
- void syncQueuesDone();
+ void syncQueuesDone_l();
+
+ bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; }
+
+ void startAudioOffloadPauseTimeout();
+ void cancelAudioOffloadPauseTimeout();
DISALLOW_EVIL_CONSTRUCTORS(Renderer);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index e50533a..d9f14a2 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -21,11 +21,14 @@
#include "NuPlayer.h"
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/mediaplayer.h>
+#include <utils/Vector.h>
namespace android {
struct ABuffer;
-struct MetaData;
+struct MediaBuffer;
struct NuPlayer::Source : public AHandler {
enum Flags {
@@ -34,16 +37,25 @@ struct NuPlayer::Source : public AHandler {
FLAG_CAN_SEEK_FORWARD = 4, // the "10 sec forward button"
FLAG_CAN_SEEK = 8, // the "seek bar"
FLAG_DYNAMIC_DURATION = 16,
+ FLAG_SECURE = 32,
+ FLAG_PROTECTED = 64,
};
enum {
kWhatPrepared,
kWhatFlagsChanged,
kWhatVideoSizeChanged,
+ kWhatBufferingUpdate,
kWhatBufferingStart,
kWhatBufferingEnd,
+ kWhatPauseOnBufferingStart,
+ kWhatResumeOnBufferingEnd,
+ kWhatCacheStats,
kWhatSubtitleData,
+ kWhatTimedTextData,
kWhatQueueDecoderShutdown,
+ kWhatDrmNoLicense,
+ kWhatInstantiateSecureDecoders,
};
// The provides message is used to notify the player about various
@@ -59,28 +71,45 @@ struct NuPlayer::Source : public AHandler {
virtual void pause() {}
virtual void resume() {}
+ // Explicitly disconnect the underling data source
+ virtual void disconnect() {}
+
// Returns OK iff more data was available,
// an error or ERROR_END_OF_STREAM if not.
virtual status_t feedMoreTSData() = 0;
virtual sp<AMessage> getFormat(bool audio);
+ virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
+ virtual sp<MetaData> getFileFormatMeta() const { return NULL; }
virtual status_t dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) = 0;
- virtual status_t getDuration(int64_t *durationUs) {
+ virtual status_t getDuration(int64_t * /* durationUs */) {
return INVALID_OPERATION;
}
- virtual status_t getTrackInfo(Parcel* reply) const {
+ virtual size_t getTrackCount() const {
+ return 0;
+ }
+
+ virtual sp<AMessage> getTrackInfo(size_t /* trackIndex */) const {
+ return NULL;
+ }
+
+ virtual ssize_t getSelectedTrack(media_track_type /* type */) const {
return INVALID_OPERATION;
}
- virtual status_t selectTrack(size_t trackIndex, bool select) {
+ virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */, int64_t /* timeUs*/) {
return INVALID_OPERATION;
}
- virtual status_t seekTo(int64_t seekTimeUs) {
+ virtual status_t seekTo(int64_t /* seekTimeUs */) {
+ return INVALID_OPERATION;
+ }
+
+ virtual status_t setBuffers(bool /* audio */, Vector<MediaBuffer *> &/* buffers */) {
return INVALID_OPERATION;
}
@@ -93,12 +122,11 @@ protected:
virtual void onMessageReceived(const sp<AMessage> &msg);
- virtual sp<MetaData> getFormatMeta(bool audio) { return NULL; }
-
sp<AMessage> dupNotify() const { return mNotify->dup(); }
void notifyFlagsChanged(uint32_t flags);
- void notifyVideoSizeChanged(int32_t width, int32_t height);
+ void notifyVideoSizeChanged(const sp<AMessage> &format = NULL);
+ void notifyInstantiateSecureDecoders(const sp<AMessage> &reply);
void notifyPrepared(status_t err = OK);
private:
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 18cf6d1..0282a9f 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -24,6 +24,7 @@
#include "MyHandler.h"
#include "SDPLoader.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
@@ -33,12 +34,14 @@ const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
NuPlayer::RTSPSource::RTSPSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid,
uid_t uid,
bool isSDP)
: Source(notify),
+ mHTTPService(httpService),
mURL(url),
mUIDValid(uidValid),
mUID(uid),
@@ -47,7 +50,7 @@ NuPlayer::RTSPSource::RTSPSource(
mState(DISCONNECTED),
mFinalResult(OK),
mDisconnectReplyID(0),
- mBuffering(true),
+ mBuffering(false),
mSeekGeneration(0),
mEOSTimeoutAudio(0),
mEOSTimeoutVideo(0) {
@@ -67,6 +70,7 @@ NuPlayer::RTSPSource::RTSPSource(
NuPlayer::RTSPSource::~RTSPSource() {
if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
mLooper->stop();
}
}
@@ -77,14 +81,13 @@ void NuPlayer::RTSPSource::prepareAsync() {
mLooper->setName("rtsp");
mLooper->start();
- mReflector = new AHandlerReflector<RTSPSource>(this);
- mLooper->registerHandler(mReflector);
+ mLooper->registerHandler(this);
}
CHECK(mHandler == NULL);
CHECK(mSDPLoader == NULL);
- sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
+ sp<AMessage> notify = new AMessage(kWhatNotify, id());
CHECK_EQ(mState, (int)DISCONNECTED);
mState = CONNECTING;
@@ -92,7 +95,7 @@ void NuPlayer::RTSPSource::prepareAsync() {
if (mIsSDP) {
mSDPLoader = new SDPLoader(notify,
(mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
- mUIDValid, mUID);
+ mHTTPService);
mSDPLoader->load(
mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
@@ -103,9 +106,7 @@ void NuPlayer::RTSPSource::prepareAsync() {
mHandler->connect();
}
- sp<AMessage> notifyStart = dupNotify();
- notifyStart->setInt32("what", kWhatBufferingStart);
- notifyStart->post();
+ startBufferingIfNecessary();
}
void NuPlayer::RTSPSource::start() {
@@ -115,7 +116,7 @@ void NuPlayer::RTSPSource::stop() {
if (mLooper == NULL) {
return;
}
- sp<AMessage> msg = new AMessage(kWhatDisconnect, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatDisconnect, id());
sp<AMessage> dummy;
msg->postAndAwaitResponse(&dummy);
@@ -141,6 +142,7 @@ void NuPlayer::RTSPSource::resume() {
}
status_t NuPlayer::RTSPSource::feedMoreTSData() {
+ Mutex::Autolock _l(mBufferingLock);
return mFinalResult;
}
@@ -192,16 +194,8 @@ bool NuPlayer::RTSPSource::haveSufficientDataOnAllTracks() {
status_t NuPlayer::RTSPSource::dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) {
- if (mBuffering) {
- if (!haveSufficientDataOnAllTracks()) {
- return -EWOULDBLOCK;
- }
-
- mBuffering = false;
-
- sp<AMessage> notify = dupNotify();
- notify->setInt32("what", kWhatBufferingEnd);
- notify->post();
+ if (!stopBufferingIfNecessary()) {
+ return -EWOULDBLOCK;
}
sp<AnotherPacketSource> source = getSource(audio);
@@ -243,11 +237,7 @@ status_t NuPlayer::RTSPSource::dequeueAccessUnit(
if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
// We should not enter buffering mode
// if any of the sources already have detected EOS.
- mBuffering = true;
-
- sp<AMessage> notify = dupNotify();
- notify->setInt32("what", kWhatBufferingStart);
- notify->post();
+ startBufferingIfNecessary();
}
return -EWOULDBLOCK;
@@ -302,7 +292,7 @@ status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {
}
status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) {
- sp<AMessage> msg = new AMessage(kWhatPerformSeek, mReflector->id());
+ sp<AMessage> msg = new AMessage(kWhatPerformSeek, id());
msg->setInt32("generation", ++mSeekGeneration);
msg->setInt64("timeUs", seekTimeUs);
msg->post(200000ll);
@@ -353,7 +343,7 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
{
onConnected();
- notifyVideoSizeChanged(0, 0);
+ notifyVideoSizeChanged();
uint32_t flags = 0;
@@ -502,7 +492,10 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {
TrackInfo *info = &mTracks.editItemAt(trackIndex);
sp<AnotherPacketSource> source = info->mSource;
if (source != NULL) {
- source->queueDiscontinuity(ATSParser::DISCONTINUITY_SEEK, NULL);
+ source->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_TIME,
+ NULL,
+ true /* discard */);
}
break;
@@ -607,7 +600,7 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {
ALOGE("Unable to find url in SDP");
err = UNKNOWN_ERROR;
} else {
- sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id());
+ sp<AMessage> notify = new AMessage(kWhatNotify, id());
mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID);
mLooper->registerHandler(mHandler);
@@ -624,7 +617,7 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {
}
mState = DISCONNECTED;
- mFinalResult = err;
+ setError(err);
if (mDisconnectReplyID != 0) {
finishDisconnectIfPossible();
@@ -651,7 +644,7 @@ void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) {
}
mState = DISCONNECTED;
- mFinalResult = err;
+ setError(err);
if (mDisconnectReplyID != 0) {
finishDisconnectIfPossible();
@@ -672,4 +665,40 @@ void NuPlayer::RTSPSource::finishDisconnectIfPossible() {
mDisconnectReplyID = 0;
}
+void NuPlayer::RTSPSource::setError(status_t err) {
+ Mutex::Autolock _l(mBufferingLock);
+ mFinalResult = err;
+}
+
+void NuPlayer::RTSPSource::startBufferingIfNecessary() {
+ Mutex::Autolock _l(mBufferingLock);
+
+ if (!mBuffering) {
+ mBuffering = true;
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatBufferingStart);
+ notify->post();
+ }
+}
+
+bool NuPlayer::RTSPSource::stopBufferingIfNecessary() {
+ Mutex::Autolock _l(mBufferingLock);
+
+ if (mBuffering) {
+ if (!haveSufficientDataOnAllTracks()) {
+ return false;
+ }
+
+ mBuffering = false;
+
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatBufferingEnd);
+ notify->post();
+ }
+
+ return true;
+}
+
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 8cf34a0..ac3299a 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -22,8 +22,6 @@
#include "ATSParser.h"
-#include <media/stagefright/foundation/AHandlerReflector.h>
-
namespace android {
struct ALooper;
@@ -34,6 +32,7 @@ struct SDPLoader;
struct NuPlayer::RTSPSource : public NuPlayer::Source {
RTSPSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid = false,
@@ -88,6 +87,7 @@ private:
bool mNPTMappingValid;
};
+ sp<IMediaHTTPService> mHTTPService;
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
bool mUIDValid;
@@ -97,10 +97,10 @@ private:
State mState;
status_t mFinalResult;
uint32_t mDisconnectReplyID;
+ Mutex mBufferingLock;
bool mBuffering;
sp<ALooper> mLooper;
- sp<AHandlerReflector<RTSPSource> > mReflector;
sp<MyHandler> mHandler;
sp<SDPLoader> mSDPLoader;
@@ -127,6 +127,9 @@ private:
bool haveSufficientDataOnAllTracks();
void setEOSTimeout(bool audio, int64_t timeout);
+ void setError(status_t err);
+ void startBufferingIfNecessary();
+ bool stopBufferingIfNecessary();
DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);
};
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index 28f0d50..b3f224d 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -37,14 +37,27 @@ NuPlayer::StreamingSource::StreamingSource(
const sp<IStreamSource> &source)
: Source(notify),
mSource(source),
- mFinalResult(OK) {
+ mFinalResult(OK),
+ mBuffering(false) {
}
NuPlayer::StreamingSource::~StreamingSource() {
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ }
}
void NuPlayer::StreamingSource::prepareAsync() {
- notifyVideoSizeChanged(0, 0);
+ if (mLooper == NULL) {
+ mLooper = new ALooper;
+ mLooper->setName("streaming");
+ mLooper->start();
+
+ mLooper->registerHandler(this);
+ }
+
+ notifyVideoSizeChanged();
notifyFlagsChanged(0);
notifyPrepared();
}
@@ -62,13 +75,15 @@ void NuPlayer::StreamingSource::start() {
mTSParser = new ATSParser(parserFlags);
mStreamListener->start();
+
+ postReadBuffer();
}
status_t NuPlayer::StreamingSource::feedMoreTSData() {
- if (mFinalResult != OK) {
- return mFinalResult;
- }
+ return postReadBuffer();
+}
+void NuPlayer::StreamingSource::onReadBuffer() {
for (int32_t i = 0; i < 50; ++i) {
char buffer[188];
sp<AMessage> extra;
@@ -77,10 +92,10 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
if (n == 0) {
ALOGI("input data EOS reached.");
mTSParser->signalEOS(ERROR_END_OF_STREAM);
- mFinalResult = ERROR_END_OF_STREAM;
+ setError(ERROR_END_OF_STREAM);
break;
} else if (n == INFO_DISCONTINUITY) {
- int32_t type = ATSParser::DISCONTINUITY_SEEK;
+ int32_t type = ATSParser::DISCONTINUITY_TIME;
int32_t mask;
if (extra != NULL
@@ -88,7 +103,8 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
IStreamListener::kKeyDiscontinuityMask, &mask)) {
if (mask == 0) {
ALOGE("Client specified an illegal discontinuity type.");
- return ERROR_UNSUPPORTED;
+ setError(ERROR_UNSUPPORTED);
+ break;
}
type = mask;
@@ -97,7 +113,6 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
mTSParser->signalDiscontinuity(
(ATSParser::DiscontinuityType)type, extra);
} else if (n < 0) {
- CHECK_EQ(n, -EWOULDBLOCK);
break;
} else {
if (buffer[0] == 0x00) {
@@ -118,7 +133,7 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
mTSParser->signalDiscontinuity(
((type & 1) == 0)
- ? ATSParser::DISCONTINUITY_SEEK
+ ? ATSParser::DISCONTINUITY_TIME
: ATSParser::DISCONTINUITY_FORMATCHANGE,
extra);
} else {
@@ -128,22 +143,80 @@ status_t NuPlayer::StreamingSource::feedMoreTSData() {
ALOGE("TS Parser returned error %d", err);
mTSParser->signalEOS(err);
- mFinalResult = err;
+ setError(err);
break;
}
}
}
}
+}
+
+status_t NuPlayer::StreamingSource::postReadBuffer() {
+ {
+ Mutex::Autolock _l(mBufferingLock);
+ if (mFinalResult != OK) {
+ return mFinalResult;
+ }
+ if (mBuffering) {
+ return OK;
+ }
+ mBuffering = true;
+ }
+ (new AMessage(kWhatReadBuffer, id()))->post();
return OK;
}
-sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
- ATSParser::SourceType type =
- audio ? ATSParser::AUDIO : ATSParser::VIDEO;
+bool NuPlayer::StreamingSource::haveSufficientDataOnAllTracks() {
+ // We're going to buffer at least 2 secs worth data on all tracks before
+ // starting playback (both at startup and after a seek).
- sp<AnotherPacketSource> source =
- static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+ static const int64_t kMinDurationUs = 2000000ll;
+
+ sp<AnotherPacketSource> audioTrack = getSource(true /*audio*/);
+ sp<AnotherPacketSource> videoTrack = getSource(false /*audio*/);
+
+ status_t err;
+ int64_t durationUs;
+ if (audioTrack != NULL
+ && (durationUs = audioTrack->getBufferedDurationUs(&err))
+ < kMinDurationUs
+ && err == OK) {
+ ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
+ durationUs / 1E6);
+ return false;
+ }
+
+ if (videoTrack != NULL
+ && (durationUs = videoTrack->getBufferedDurationUs(&err))
+ < kMinDurationUs
+ && err == OK) {
+ ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
+ durationUs / 1E6);
+ return false;
+ }
+
+ return true;
+}
+
+void NuPlayer::StreamingSource::setError(status_t err) {
+ Mutex::Autolock _l(mBufferingLock);
+ mFinalResult = err;
+}
+
+sp<AnotherPacketSource> NuPlayer::StreamingSource::getSource(bool audio) {
+ if (mTSParser == NULL) {
+ return NULL;
+ }
+
+ sp<MediaSource> source = mTSParser->getSource(
+ audio ? ATSParser::AUDIO : ATSParser::VIDEO);
+
+ return static_cast<AnotherPacketSource *>(source.get());
+}
+
+sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
+ sp<AnotherPacketSource> source = getSource(audio);
if (source == NULL) {
return NULL;
@@ -154,16 +227,16 @@ sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) {
status_t NuPlayer::StreamingSource::dequeueAccessUnit(
bool audio, sp<ABuffer> *accessUnit) {
- ATSParser::SourceType type =
- audio ? ATSParser::AUDIO : ATSParser::VIDEO;
-
- sp<AnotherPacketSource> source =
- static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get());
+ sp<AnotherPacketSource> source = getSource(audio);
if (source == NULL) {
return -EWOULDBLOCK;
}
+ if (!haveSufficientDataOnAllTracks()) {
+ postReadBuffer();
+ }
+
status_t finalResult;
if (!source->hasBufferAvailable(&finalResult)) {
return finalResult == OK ? -EWOULDBLOCK : finalResult;
@@ -186,5 +259,26 @@ bool NuPlayer::StreamingSource::isRealTime() const {
return mSource->flags() & IStreamSource::kFlagIsRealTimeData;
}
+void NuPlayer::StreamingSource::onMessageReceived(
+ const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatReadBuffer:
+ {
+ onReadBuffer();
+
+ {
+ Mutex::Autolock _l(mBufferingLock);
+ mBuffering = false;
+ }
+ break;
+ }
+ default:
+ {
+ TRESPASS();
+ }
+ }
+}
+
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
index 412b6c4..1f95f3c 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.h
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -25,6 +25,7 @@ namespace android {
struct ABuffer;
struct ATSParser;
+struct AnotherPacketSource;
struct NuPlayer::StreamingSource : public NuPlayer::Source {
StreamingSource(
@@ -43,14 +44,29 @@ struct NuPlayer::StreamingSource : public NuPlayer::Source {
protected:
virtual ~StreamingSource();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
virtual sp<MetaData> getFormatMeta(bool audio);
private:
+ enum {
+ kWhatReadBuffer,
+ };
sp<IStreamSource> mSource;
status_t mFinalResult;
sp<NuPlayerStreamListener> mStreamListener;
sp<ATSParser> mTSParser;
+ bool mBuffering;
+ Mutex mBufferingLock;
+ sp<ALooper> mLooper;
+
+ void setError(status_t err);
+ sp<AnotherPacketSource> getSource(bool audio);
+ bool haveSufficientDataOnAllTracks();
+ status_t postReadBuffer();
+ void onReadBuffer();
+
DISALLOW_EVIL_CONSTRUCTORS(StreamingSource);
};
diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp
deleted file mode 100644
index 2aae4dd..0000000
--- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "MP4Source.h"
-
-#include "FragmentedMP4Parser.h"
-#include "../NuPlayerStreamListener.h"
-
-#include <media/IStreamSource.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-struct StreamSource : public FragmentedMP4Parser::Source {
- StreamSource(const sp<IStreamSource> &source)
- : mListener(new NuPlayer::NuPlayerStreamListener(source, 0)),
- mPosition(0) {
- mListener->start();
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- if (offset < mPosition) {
- return -EPIPE;
- }
-
- while (offset > mPosition) {
- char buffer[1024];
- off64_t skipBytes = offset - mPosition;
- if (skipBytes > sizeof(buffer)) {
- skipBytes = sizeof(buffer);
- }
-
- sp<AMessage> extra;
- ssize_t n;
- for (;;) {
- n = mListener->read(buffer, skipBytes, &extra);
-
- if (n == -EWOULDBLOCK) {
- usleep(10000);
- continue;
- }
-
- break;
- }
-
- ALOGV("skipped %ld bytes at offset %lld", n, mPosition);
-
- if (n < 0) {
- return n;
- }
-
- mPosition += n;
- }
-
- sp<AMessage> extra;
- size_t total = 0;
- while (total < size) {
- ssize_t n = mListener->read(
- (uint8_t *)data + total, size - total, &extra);
-
- if (n == -EWOULDBLOCK) {
- usleep(10000);
- continue;
- } else if (n == 0) {
- break;
- } else if (n < 0) {
- mPosition += total;
- return n;
- }
-
- total += n;
- }
-
- ALOGV("read %ld bytes at offset %lld", total, mPosition);
-
- mPosition += total;
-
- return total;
- }
-
- bool isSeekable() {
- return false;
- }
-
-private:
- sp<NuPlayer::NuPlayerStreamListener> mListener;
- off64_t mPosition;
-
- DISALLOW_EVIL_CONSTRUCTORS(StreamSource);
-};
-
-MP4Source::MP4Source(
- const sp<AMessage> &notify, const sp<IStreamSource> &source)
- : Source(notify),
- mSource(source),
- mLooper(new ALooper),
- mParser(new FragmentedMP4Parser),
- mEOS(false) {
- mLooper->registerHandler(mParser);
-}
-
-MP4Source::~MP4Source() {
-}
-
-void MP4Source::prepareAsync() {
- notifyVideoSizeChanged(0, 0);
- notifyFlagsChanged(0);
- notifyPrepared();
-}
-
-void MP4Source::start() {
- mLooper->start(false /* runOnCallingThread */);
- mParser->start(new StreamSource(mSource));
-}
-
-status_t MP4Source::feedMoreTSData() {
- return mEOS ? ERROR_END_OF_STREAM : (status_t)OK;
-}
-
-sp<AMessage> MP4Source::getFormat(bool audio) {
- return mParser->getFormat(audio);
-}
-
-status_t MP4Source::dequeueAccessUnit(
- bool audio, sp<ABuffer> *accessUnit) {
- return mParser->dequeueAccessUnit(audio, accessUnit);
-}
-
-} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h
deleted file mode 100644
index a6ef622..0000000
--- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MP4_SOURCE_H
-#define MP4_SOURCE_H
-
-#include "NuPlayerSource.h"
-
-namespace android {
-
-struct FragmentedMP4Parser;
-
-struct MP4Source : public NuPlayer::Source {
- MP4Source(const sp<AMessage> &notify, const sp<IStreamSource> &source);
-
- virtual void prepareAsync();
- virtual void start();
-
- virtual status_t feedMoreTSData();
-
- virtual sp<AMessage> getFormat(bool audio);
-
- virtual status_t dequeueAccessUnit(
- bool audio, sp<ABuffer> *accessUnit);
-
-protected:
- virtual ~MP4Source();
-
-private:
- sp<IStreamSource> mSource;
- sp<ALooper> mLooper;
- sp<FragmentedMP4Parser> mParser;
- bool mEOS;
-
- DISALLOW_EVIL_CONSTRUCTORS(MP4Source);
-};
-
-} // namespace android
-
-#endif // MP4_SOURCE_H
diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk
index 69c75b8..9707c4a 100644
--- a/media/libnbaio/Android.mk
+++ b/media/libnbaio/Android.mk
@@ -31,9 +31,8 @@ LOCAL_SHARED_LIBRARIES := \
libcommon_time_client \
libcutils \
libutils \
- liblog \
- libmedia
-# This dependency on libmedia is for SingleStateQueueInstantiations.
-# Consider a separate a library for SingleStateQueueInstantiations.
+ liblog
+
+LOCAL_STATIC_LIBRARIES += libinstantssq
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libnbaio/AudioBufferProviderSource.cpp b/media/libnbaio/AudioBufferProviderSource.cpp
index 74a6fdb..551f516 100644
--- a/media/libnbaio/AudioBufferProviderSource.cpp
+++ b/media/libnbaio/AudioBufferProviderSource.cpp
@@ -24,11 +24,11 @@
namespace android {
AudioBufferProviderSource::AudioBufferProviderSource(AudioBufferProvider *provider,
- NBAIO_Format format) :
+ const NBAIO_Format& format) :
NBAIO_Source(format), mProvider(provider), mConsumed(0)
{
ALOG_ASSERT(provider != NULL);
- ALOG_ASSERT(format != Format_Invalid);
+ ALOG_ASSERT(Format_isValid(format));
}
AudioBufferProviderSource::~AudioBufferProviderSource()
@@ -68,7 +68,7 @@ ssize_t AudioBufferProviderSource::read(void *buffer,
}
// count could be zero, either because count was zero on entry or
// available is zero, but both are unlikely so don't check for that
- memcpy(buffer, (char *) mBuffer.raw + (mConsumed << mBitShift), count << mBitShift);
+ memcpy(buffer, (char *) mBuffer.raw + (mConsumed * mFrameSize), count * mFrameSize);
if (CC_UNLIKELY((mConsumed += count) >= mBuffer.frameCount)) {
mProvider->releaseBuffer(&mBuffer);
mBuffer.raw = NULL;
@@ -120,7 +120,7 @@ ssize_t AudioBufferProviderSource::readVia(readVia_t via, size_t total, void *us
count = available;
}
if (CC_LIKELY(count > 0)) {
- char* readTgt = (char *) mBuffer.raw + (mConsumed << mBitShift);
+ char* readTgt = (char *) mBuffer.raw + (mConsumed * mFrameSize);
ssize_t ret = via(user, readTgt, count, readPTS);
if (CC_UNLIKELY(ret <= 0)) {
if (CC_LIKELY(accumulator > 0)) {
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 05273f6..6aab48a 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -40,16 +40,15 @@ AudioStreamInSource::~AudioStreamInSource()
ssize_t AudioStreamInSource::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
- if (mFormat == Format_Invalid) {
+ if (!Format_isValid(mFormat)) {
mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
- if (streamFormat == AUDIO_FORMAT_PCM_16_BIT) {
- uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
- audio_channel_mask_t channelMask =
- (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
- mFormat = Format_from_SR_C(sampleRate, popcount(channelMask));
- mBitShift = Format_frameBitShift(mFormat);
- }
+ uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
+ audio_channel_mask_t channelMask =
+ (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
+ mFormat = Format_from_SR_C(sampleRate,
+ audio_channel_count_from_in_mask(channelMask), streamFormat);
+ mFrameSize = Format_frameSize(mFormat);
}
return NBAIO_Source::negotiate(offers, numOffers, counterOffers, numCounterOffers);
}
@@ -65,14 +64,14 @@ size_t AudioStreamInSource::framesOverrun()
return mFramesOverrun;
}
-ssize_t AudioStreamInSource::read(void *buffer, size_t count)
+ssize_t AudioStreamInSource::read(void *buffer, size_t count, int64_t readPTS __unused)
{
- if (CC_UNLIKELY(mFormat == Format_Invalid)) {
+ if (CC_UNLIKELY(!Format_isValid(mFormat))) {
return NEGOTIATE;
}
- ssize_t bytesRead = mStream->read(mStream, buffer, count << mBitShift);
+ ssize_t bytesRead = mStream->read(mStream, buffer, count * mFrameSize);
if (bytesRead > 0) {
- size_t framesRead = bytesRead >> mBitShift;
+ size_t framesRead = bytesRead / mFrameSize;
mFramesRead += framesRead;
return framesRead;
} else {
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index e4341d7..0d5f935 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -37,16 +37,15 @@ AudioStreamOutSink::~AudioStreamOutSink()
ssize_t AudioStreamOutSink::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
- if (mFormat == Format_Invalid) {
+ if (!Format_isValid(mFormat)) {
mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
- if (streamFormat == AUDIO_FORMAT_PCM_16_BIT) {
- uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
- audio_channel_mask_t channelMask =
- (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
- mFormat = Format_from_SR_C(sampleRate, popcount(channelMask));
- mBitShift = Format_frameBitShift(mFormat);
- }
+ uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
+ audio_channel_mask_t channelMask =
+ (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
+ mFormat = Format_from_SR_C(sampleRate,
+ audio_channel_count_from_out_mask(channelMask), streamFormat);
+ mFrameSize = Format_frameSize(mFormat);
}
return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
}
@@ -56,10 +55,10 @@ ssize_t AudioStreamOutSink::write(const void *buffer, size_t count)
if (!mNegotiated) {
return NEGOTIATE;
}
- ALOG_ASSERT(mFormat != Format_Invalid);
- ssize_t ret = mStream->write(mStream, buffer, count << mBitShift);
+ ALOG_ASSERT(Format_isValid(mFormat));
+ ssize_t ret = mStream->write(mStream, buffer, count * mFrameSize);
if (ret > 0) {
- ret >>= mBitShift;
+ ret /= mFrameSize;
mFramesWritten += ret;
} else {
// FIXME verify HAL implementations are returning the correct error codes e.g. WOULD_BLOCK
diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp
index 3c61b60..0b65861 100644
--- a/media/libnbaio/MonoPipe.cpp
+++ b/media/libnbaio/MonoPipe.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
#define LOG_TAG "MonoPipe"
//#define LOG_NDEBUG 0
@@ -30,7 +32,24 @@
namespace android {
-MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
+static uint64_t cacheN; // output of CCHelper::getLocalFreq()
+static bool cacheValid; // whether cacheN is valid
+static pthread_once_t cacheOnceControl = PTHREAD_ONCE_INIT;
+
+static void cacheOnceInit()
+{
+ CCHelper tmpHelper;
+ status_t res;
+ if (OK != (res = tmpHelper.getLocalFreq(&cacheN))) {
+ ALOGE("Failed to fetch local time frequency when constructing a"
+ " MonoPipe (res = %d). getNextWriteTimestamp calls will be"
+ " non-functional", res);
+ return;
+ }
+ cacheValid = true;
+}
+
+MonoPipe::MonoPipe(size_t reqFrames, const NBAIO_Format& format, bool writeCanBlock) :
NBAIO_Sink(format),
mUpdateSeq(0),
mReqFrames(reqFrames),
@@ -47,8 +66,6 @@ MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
mTimestampMutator(&mTimestampShared),
mTimestampObserver(&mTimestampShared)
{
- CCHelper tmpHelper;
- status_t res;
uint64_t N, D;
mNextRdPTS = AudioBufferProvider::kInvalidPTS;
@@ -59,19 +76,20 @@ MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
mSamplesToLocalTime.a_to_b_denom = 0;
D = Format_sampleRate(format);
- if (OK != (res = tmpHelper.getLocalFreq(&N))) {
- ALOGE("Failed to fetch local time frequency when constructing a"
- " MonoPipe (res = %d). getNextWriteTimestamp calls will be"
- " non-functional", res);
+
+ (void) pthread_once(&cacheOnceControl, cacheOnceInit);
+ if (!cacheValid) {
+ // log has already been done
return;
}
+ N = cacheN;
LinearTransform::reduce(&N, &D);
static const uint64_t kSignedHiBitsMask = ~(0x7FFFFFFFull);
static const uint64_t kUnsignedHiBitsMask = ~(0xFFFFFFFFull);
if ((N & kSignedHiBitsMask) || (D & kUnsignedHiBitsMask)) {
ALOGE("Cannot reduce sample rate to local clock frequency ratio to fit"
- " in a 32/32 bit rational. (max reduction is 0x%016llx/0x%016llx"
+ " in a 32/32 bit rational. (max reduction is 0x%016" PRIx64 "/0x%016" PRIx64
"). getNextWriteTimestamp calls will be non-functional", N, D);
return;
}
@@ -115,11 +133,11 @@ ssize_t MonoPipe::write(const void *buffer, size_t count)
part1 = written;
}
if (CC_LIKELY(part1 > 0)) {
- memcpy((char *) mBuffer + (rear << mBitShift), buffer, part1 << mBitShift);
+ memcpy((char *) mBuffer + (rear * mFrameSize), buffer, part1 * mFrameSize);
if (CC_UNLIKELY(rear + part1 == mMaxFrames)) {
size_t part2 = written - part1;
if (CC_LIKELY(part2 > 0)) {
- memcpy(mBuffer, (char *) buffer + (part1 << mBitShift), part2 << mBitShift);
+ memcpy(mBuffer, (char *) buffer + (part1 * mFrameSize), part2 * mFrameSize);
}
}
android_atomic_release_store(written + mRear, &mRear);
@@ -129,7 +147,7 @@ ssize_t MonoPipe::write(const void *buffer, size_t count)
break;
}
count -= written;
- buffer = (char *) buffer + (written << mBitShift);
+ buffer = (char *) buffer + (written * mFrameSize);
// Simulate blocking I/O by sleeping at different rates, depending on a throttle.
// The throttle tries to keep the mean pipe depth near the setpoint, with a slight jitter.
uint32_t ns;
@@ -292,7 +310,7 @@ int64_t MonoPipe::offsetTimestampByAudioFrames(int64_t ts, size_t audFrames)
// error, but then zero out the ratio in the linear transform so
// that we don't try to do any conversions from now on. This
// MonoPipe's getNextWriteTimestamp is now broken for good.
- ALOGE("Overflow when attempting to convert %d audio frames to"
+ ALOGE("Overflow when attempting to convert %zu audio frames to"
" duration in local time. getNextWriteTimestamp will fail from"
" now on.", audFrames);
mSamplesToLocalTime.a_to_b_numer = 0;
diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp
index 851341a..de82229 100644
--- a/media/libnbaio/MonoPipeReader.cpp
+++ b/media/libnbaio/MonoPipeReader.cpp
@@ -73,11 +73,11 @@ ssize_t MonoPipeReader::read(void *buffer, size_t count, int64_t readPTS)
part1 = red;
}
if (CC_LIKELY(part1 > 0)) {
- memcpy(buffer, (char *) mPipe->mBuffer + (front << mBitShift), part1 << mBitShift);
+ memcpy(buffer, (char *) mPipe->mBuffer + (front * mFrameSize), part1 * mFrameSize);
if (CC_UNLIKELY(front + part1 == mPipe->mMaxFrames)) {
size_t part2 = red - part1;
if (CC_LIKELY(part2 > 0)) {
- memcpy((char *) buffer + (part1 << mBitShift), mPipe->mBuffer, part2 << mBitShift);
+ memcpy((char *) buffer + (part1 * mFrameSize), mPipe->mBuffer, part2 * mFrameSize);
}
}
mPipe->updateFrontAndNRPTS(red + mPipe->mFront, nextReadPTS);
diff --git a/media/libnbaio/NBAIO.cpp b/media/libnbaio/NBAIO.cpp
index e0d2c21..d641e74 100644
--- a/media/libnbaio/NBAIO.cpp
+++ b/media/libnbaio/NBAIO.cpp
@@ -22,119 +22,42 @@
namespace android {
-size_t Format_frameSize(NBAIO_Format format)
+size_t Format_frameSize(const NBAIO_Format& format)
{
- return Format_channelCount(format) * sizeof(short);
+ return format.mFrameSize;
}
-size_t Format_frameBitShift(NBAIO_Format format)
-{
- // sizeof(short) == 2, so frame size == 1 << channels
- return Format_channelCount(format);
-}
-
-enum {
- Format_SR_8000,
- Format_SR_11025,
- Format_SR_16000,
- Format_SR_22050,
- Format_SR_24000,
- Format_SR_32000,
- Format_SR_44100,
- Format_SR_48000,
- Format_SR_Mask = 7
-};
-
-enum {
- Format_C_1 = 0x08,
- Format_C_2 = 0x10,
- Format_C_Mask = 0x18
-};
+const NBAIO_Format Format_Invalid = { 0, 0, AUDIO_FORMAT_INVALID, 0 };
-unsigned Format_sampleRate(NBAIO_Format format)
+unsigned Format_sampleRate(const NBAIO_Format& format)
{
- if (format == Format_Invalid) {
- return 0;
- }
- switch (format & Format_SR_Mask) {
- case Format_SR_8000:
- return 8000;
- case Format_SR_11025:
- return 11025;
- case Format_SR_16000:
- return 16000;
- case Format_SR_22050:
- return 22050;
- case Format_SR_24000:
- return 24000;
- case Format_SR_32000:
- return 32000;
- case Format_SR_44100:
- return 44100;
- case Format_SR_48000:
- return 48000;
- default:
+ if (!Format_isValid(format)) {
return 0;
}
+ return format.mSampleRate;
}
-unsigned Format_channelCount(NBAIO_Format format)
+unsigned Format_channelCount(const NBAIO_Format& format)
{
- if (format == Format_Invalid) {
- return 0;
- }
- switch (format & Format_C_Mask) {
- case Format_C_1:
- return 1;
- case Format_C_2:
- return 2;
- default:
+ if (!Format_isValid(format)) {
return 0;
}
+ return format.mChannelCount;
}
-NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount)
+NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount,
+ audio_format_t format)
{
- NBAIO_Format format;
- switch (sampleRate) {
- case 8000:
- format = Format_SR_8000;
- break;
- case 11025:
- format = Format_SR_11025;
- break;
- case 16000:
- format = Format_SR_16000;
- break;
- case 22050:
- format = Format_SR_22050;
- break;
- case 24000:
- format = Format_SR_24000;
- break;
- case 32000:
- format = Format_SR_32000;
- break;
- case 44100:
- format = Format_SR_44100;
- break;
- case 48000:
- format = Format_SR_48000;
- break;
- default:
+ if (sampleRate == 0 || channelCount == 0 || !audio_is_valid_format(format)) {
return Format_Invalid;
}
- switch (channelCount) {
- case 1:
- format |= Format_C_1;
- break;
- case 2:
- format |= Format_C_2;
- break;
- default:
- return Format_Invalid;
- }
- return format;
+ NBAIO_Format ret;
+ ret.mSampleRate = sampleRate;
+ ret.mChannelCount = channelCount;
+ ret.mFormat = format;
+ ret.mFrameSize = audio_is_linear_pcm(format) ?
+ channelCount * audio_bytes_per_sample(format) : sizeof(uint8_t);
+ return ret;
}
// This is a default implementation; it is expected that subclasses will optimize this.
@@ -214,11 +137,11 @@ ssize_t NBAIO_Source::readVia(readVia_t via, size_t total, void *user,
ssize_t NBAIO_Port::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
- ALOGV("negotiate offers=%p numOffers=%u countersOffers=%p numCounterOffers=%u",
+ ALOGV("negotiate offers=%p numOffers=%zu countersOffers=%p numCounterOffers=%zu",
offers, numOffers, counterOffers, numCounterOffers);
- if (mFormat != Format_Invalid) {
+ if (Format_isValid(mFormat)) {
for (size_t i = 0; i < numOffers; ++i) {
- if (offers[i] == mFormat) {
+ if (Format_isEqual(offers[i], mFormat)) {
mNegotiated = true;
return i;
}
@@ -233,4 +156,17 @@ ssize_t NBAIO_Port::negotiate(const NBAIO_Format offers[], size_t numOffers,
return (ssize_t) NEGOTIATE;
}
+bool Format_isValid(const NBAIO_Format& format)
+{
+ return format.mSampleRate != 0 && format.mChannelCount != 0 &&
+ format.mFormat != AUDIO_FORMAT_INVALID && format.mFrameSize != 0;
+}
+
+bool Format_isEqual(const NBAIO_Format& format1, const NBAIO_Format& format2)
+{
+ return format1.mSampleRate == format2.mSampleRate &&
+ format1.mChannelCount == format2.mChannelCount && format1.mFormat == format2.mFormat &&
+ format1.mFrameSize == format2.mFrameSize;
+}
+
} // namespace android
diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp
index d74a7a6..4d14904 100644
--- a/media/libnbaio/NBLog.cpp
+++ b/media/libnbaio/NBLog.cpp
@@ -26,6 +26,7 @@
#include <cutils/atomic.h>
#include <media/nbaio/NBLog.h>
#include <utils/Log.h>
+#include <utils/String8.h>
namespace android {
@@ -337,25 +338,25 @@ void NBLog::Reader::dump(int fd, size_t indent)
}
i -= length + 3;
}
- if (i > 0) {
- lost += i;
- if (fd >= 0) {
- fdprintf(fd, "%*swarning: lost %zu bytes worth of events\n", indent, "", lost);
- } else {
- ALOGI("%*swarning: lost %u bytes worth of events\n", indent, "", lost);
- }
+ mFd = fd;
+ mIndent = indent;
+ String8 timestamp, body;
+ lost += i;
+ if (lost > 0) {
+ body.appendFormat("warning: lost %zu bytes worth of events", lost);
+ // TODO timestamp empty here, only other choice to wait for the first timestamp event in the
+ // log to push it out. Consider keeping the timestamp/body between calls to readAt().
+ dumpLine(timestamp, body);
}
size_t width = 1;
while (maxSec >= 10) {
++width;
maxSec /= 10;
}
- char prefix[32];
if (maxSec >= 0) {
- snprintf(prefix, sizeof(prefix), "[%*s] ", width + 4, "");
- } else {
- prefix[0] = '\0';
+ timestamp.appendFormat("[%*s]", (int) width + 4, "");
}
+ bool deferredTimestamp = false;
while (i < avail) {
event = (Event) copy[i];
length = copy[i + 1];
@@ -363,11 +364,8 @@ void NBLog::Reader::dump(int fd, size_t indent)
size_t advance = length + 3;
switch (event) {
case EVENT_STRING:
- if (fd >= 0) {
- fdprintf(fd, "%*s%s%.*s\n", indent, "", prefix, length, (const char *) data);
- } else {
- ALOGI("%*s%s%.*s", indent, "", prefix, length, (const char *) data);
- } break;
+ body.appendFormat("%.*s", (int) length, (const char *) data);
+ break;
case EVENT_TIMESTAMP: {
// already checked that length == sizeof(struct timespec);
memcpy(&ts, data, sizeof(struct timespec));
@@ -400,48 +398,56 @@ void NBLog::Reader::dump(int fd, size_t indent)
prevNsec = tsNext.tv_nsec;
}
size_t n = (j - i) / (sizeof(struct timespec) + 3);
+ if (deferredTimestamp) {
+ dumpLine(timestamp, body);
+ deferredTimestamp = false;
+ }
+ timestamp.clear();
if (n >= kSquashTimestamp) {
- if (fd >= 0) {
- fdprintf(fd, "%*s[%d.%03d to .%.03d by .%.03d to .%.03d]\n", indent, "",
- (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000),
- (int) ((ts.tv_nsec + deltaTotal) / 1000000),
- (int) (deltaMin / 1000000), (int) (deltaMax / 1000000));
- } else {
- ALOGI("%*s[%d.%03d to .%.03d by .%.03d to .%.03d]\n", indent, "",
- (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000),
- (int) ((ts.tv_nsec + deltaTotal) / 1000000),
- (int) (deltaMin / 1000000), (int) (deltaMax / 1000000));
- }
+ timestamp.appendFormat("[%d.%03d to .%.03d by .%.03d to .%.03d]",
+ (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000),
+ (int) ((ts.tv_nsec + deltaTotal) / 1000000),
+ (int) (deltaMin / 1000000), (int) (deltaMax / 1000000));
i = j;
advance = 0;
break;
}
- if (fd >= 0) {
- fdprintf(fd, "%*s[%d.%03d]\n", indent, "", (int) ts.tv_sec,
- (int) (ts.tv_nsec / 1000000));
- } else {
- ALOGI("%*s[%d.%03d]", indent, "", (int) ts.tv_sec,
- (int) (ts.tv_nsec / 1000000));
- }
+ timestamp.appendFormat("[%d.%03d]", (int) ts.tv_sec,
+ (int) (ts.tv_nsec / 1000000));
+ deferredTimestamp = true;
} break;
case EVENT_RESERVED:
default:
- if (fd >= 0) {
- fdprintf(fd, "%*s%swarning: unknown event %d\n", indent, "", prefix, event);
- } else {
- ALOGI("%*s%swarning: unknown event %d", indent, "", prefix, event);
- }
+ body.appendFormat("warning: unknown event %d", event);
break;
}
i += advance;
+
+ if (!body.isEmpty()) {
+ dumpLine(timestamp, body);
+ deferredTimestamp = false;
+ }
+ }
+ if (deferredTimestamp) {
+ dumpLine(timestamp, body);
}
// FIXME it would be more efficient to put a char mCopy[256] as a member variable of the dumper
delete[] copy;
}
+void NBLog::Reader::dumpLine(const String8& timestamp, String8& body)
+{
+ if (mFd >= 0) {
+ dprintf(mFd, "%.*s%s %s\n", mIndent, "", timestamp.string(), body.string());
+ } else {
+ ALOGI("%.*s%s %s", mIndent, "", timestamp.string(), body.string());
+ }
+ body.clear();
+}
+
bool NBLog::Reader::isIMemory(const sp<IMemory>& iMemory) const
{
- return iMemory.get() == mIMemory.get();
+ return iMemory != 0 && mIMemory != 0 && iMemory->pointer() == mIMemory->pointer();
}
} // namespace android
diff --git a/media/libnbaio/Pipe.cpp b/media/libnbaio/Pipe.cpp
index 1c21f9c..6e0ec8c 100644
--- a/media/libnbaio/Pipe.cpp
+++ b/media/libnbaio/Pipe.cpp
@@ -25,19 +25,22 @@
namespace android {
-Pipe::Pipe(size_t maxFrames, NBAIO_Format format) :
+Pipe::Pipe(size_t maxFrames, const NBAIO_Format& format, void *buffer) :
NBAIO_Sink(format),
mMaxFrames(roundup(maxFrames)),
- mBuffer(malloc(mMaxFrames * Format_frameSize(format))),
+ mBuffer(buffer == NULL ? malloc(mMaxFrames * Format_frameSize(format)) : buffer),
mRear(0),
- mReaders(0)
+ mReaders(0),
+ mFreeBufferInDestructor(buffer == NULL)
{
}
Pipe::~Pipe()
{
ALOG_ASSERT(android_atomic_acquire_load(&mReaders) == 0);
- free(mBuffer);
+ if (mFreeBufferInDestructor) {
+ free(mBuffer);
+ }
}
ssize_t Pipe::write(const void *buffer, size_t count)
@@ -52,13 +55,13 @@ ssize_t Pipe::write(const void *buffer, size_t count)
if (CC_LIKELY(written > count)) {
written = count;
}
- memcpy((char *) mBuffer + (rear << mBitShift), buffer, written << mBitShift);
+ memcpy((char *) mBuffer + (rear * mFrameSize), buffer, written * mFrameSize);
if (CC_UNLIKELY(rear + written == mMaxFrames)) {
if (CC_UNLIKELY((count -= written) > rear)) {
count = rear;
}
if (CC_LIKELY(count > 0)) {
- memcpy(mBuffer, (char *) buffer + (written << mBitShift), count << mBitShift);
+ memcpy(mBuffer, (char *) buffer + (written * mFrameSize), count * mFrameSize);
written += count;
}
}
diff --git a/media/libnbaio/PipeReader.cpp b/media/libnbaio/PipeReader.cpp
index d786b84..c8e4953 100644
--- a/media/libnbaio/PipeReader.cpp
+++ b/media/libnbaio/PipeReader.cpp
@@ -59,7 +59,7 @@ ssize_t PipeReader::availableToRead()
return avail;
}
-ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS)
+ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS __unused)
{
ssize_t avail = availableToRead();
if (CC_UNLIKELY(avail <= 0)) {
@@ -76,14 +76,14 @@ ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS)
red = count;
}
// In particular, an overrun during the memcpy will result in reading corrupt data
- memcpy(buffer, (char *) mPipe.mBuffer + (front << mBitShift), red << mBitShift);
+ memcpy(buffer, (char *) mPipe.mBuffer + (front * mFrameSize), red * mFrameSize);
// We could re-read the rear pointer here to detect the corruption, but why bother?
if (CC_UNLIKELY(front + red == mPipe.mMaxFrames)) {
if (CC_UNLIKELY((count -= red) > front)) {
count = front;
}
if (CC_LIKELY(count > 0)) {
- memcpy((char *) buffer + (red << mBitShift), mPipe.mBuffer, count << mBitShift);
+ memcpy((char *) buffer + (red * mFrameSize), mPipe.mBuffer, count * mFrameSize);
red += count;
}
}
diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp
index 062fa0f..e21ef48 100644
--- a/media/libnbaio/SourceAudioBufferProvider.cpp
+++ b/media/libnbaio/SourceAudioBufferProvider.cpp
@@ -24,7 +24,7 @@ namespace android {
SourceAudioBufferProvider::SourceAudioBufferProvider(const sp<NBAIO_Source>& source) :
mSource(source),
- // mFrameBitShiftFormat below
+ // mFrameSize below
mAllocated(NULL), mSize(0), mOffset(0), mRemaining(0), mGetCount(0), mFramesReleased(0)
{
ALOG_ASSERT(source != 0);
@@ -37,7 +37,7 @@ SourceAudioBufferProvider::SourceAudioBufferProvider(const sp<NBAIO_Source>& sou
numCounterOffers = 0;
index = source->negotiate(counterOffers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
- mFrameBitShift = Format_frameBitShift(source->format());
+ mFrameSize = Format_frameSize(source->format());
}
SourceAudioBufferProvider::~SourceAudioBufferProvider()
@@ -54,14 +54,14 @@ status_t SourceAudioBufferProvider::getNextBuffer(Buffer *buffer, int64_t pts)
if (mRemaining < buffer->frameCount) {
buffer->frameCount = mRemaining;
}
- buffer->raw = (char *) mAllocated + (mOffset << mFrameBitShift);
+ buffer->raw = (char *) mAllocated + (mOffset * mFrameSize);
mGetCount = buffer->frameCount;
return OK;
}
// do we need to reallocate?
if (buffer->frameCount > mSize) {
free(mAllocated);
- mAllocated = malloc(buffer->frameCount << mFrameBitShift);
+ mAllocated = malloc(buffer->frameCount * mFrameSize);
mSize = buffer->frameCount;
}
// read from source
@@ -84,7 +84,7 @@ status_t SourceAudioBufferProvider::getNextBuffer(Buffer *buffer, int64_t pts)
void SourceAudioBufferProvider::releaseBuffer(Buffer *buffer)
{
ALOG_ASSERT((buffer != NULL) &&
- (buffer->raw == (char *) mAllocated + (mOffset << mFrameBitShift)) &&
+ (buffer->raw == (char *) mAllocated + (mOffset * mFrameSize)) &&
(buffer->frameCount <= mGetCount) &&
(mGetCount <= mRemaining) &&
(mOffset + mRemaining <= mSize));
diff --git a/media/libstagefright/AACExtractor.cpp b/media/libstagefright/AACExtractor.cpp
index 4d1072f..196f6ee 100644
--- a/media/libstagefright/AACExtractor.cpp
+++ b/media/libstagefright/AACExtractor.cpp
@@ -219,7 +219,7 @@ sp<MediaSource> AACExtractor::getTrack(size_t index) {
return new AACSource(mDataSource, mMeta, mOffsetVector, mFrameDurationUs);
}
-sp<MetaData> AACExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+sp<MetaData> AACExtractor::getTrackMetaData(size_t index, uint32_t /* flags */) {
if (mInitCheck != OK || index != 0) {
return NULL;
}
@@ -252,7 +252,7 @@ AACSource::~AACSource() {
}
}
-status_t AACSource::start(MetaData *params) {
+status_t AACSource::start(MetaData * /* params */) {
CHECK(!mStarted);
if (mOffsetVector.empty()) {
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index c9bcaba..2e41d80 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -14,6 +14,12 @@
* limitations under the License.
*/
+#include <fcntl.h>
+#include <inttypes.h>
+#include <sys/prctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "AACWriter"
#include <utils/Log.h>
@@ -27,10 +33,6 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/mediarecorder.h>
-#include <sys/prctl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
namespace android {
@@ -59,7 +61,8 @@ AACWriter::AACWriter(int fd)
mPaused(false),
mResumed(false),
mChannelCount(-1),
- mSampleRate(-1) {
+ mSampleRate(-1),
+ mAACProfile(OMX_AUDIO_AACObjectLC) {
}
AACWriter::~AACWriter() {
@@ -77,10 +80,6 @@ status_t AACWriter::initCheck() const {
return mInitCheck;
}
-static int writeInt8(int fd, uint8_t x) {
- return ::write(fd, &x, 1);
-}
-
status_t AACWriter::addSource(const sp<MediaSource> &source) {
if (mInitCheck != OK) {
@@ -111,7 +110,7 @@ status_t AACWriter::addSource(const sp<MediaSource> &source) {
return OK;
}
-status_t AACWriter::start(MetaData *params) {
+status_t AACWriter::start(MetaData * /* params */) {
if (mInitCheck != OK) {
return mInitCheck;
}
@@ -348,7 +347,7 @@ status_t AACWriter::threadFunc() {
mResumed = false;
}
timestampUs -= previousPausedDurationUs;
- ALOGV("time stamp: %lld, previous paused duration: %lld",
+ ALOGV("time stamp: %" PRId64 ", previous paused duration: %" PRId64,
timestampUs, previousPausedDurationUs);
if (timestampUs > maxTimestampUs) {
maxTimestampUs = timestampUs;
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 76a3358..d298cb1 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -17,6 +17,13 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ACodec"
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
+#include <inttypes.h>
+#include <utils/Trace.h>
+
#include <media/stagefright/ACodec.h>
#include <binder/MemoryDealer.h>
@@ -25,6 +32,7 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/MediaCodecList.h>
@@ -35,12 +43,57 @@
#include <media/hardware/HardwareAPI.h>
+#include <OMX_AudioExt.h>
+#include <OMX_VideoExt.h>
#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
#include "include/avc_utils.h"
namespace android {
+// OMX errors are directly mapped into status_t range if
+// there is no corresponding MediaError status code.
+// Use the statusFromOMXError(int32_t omxError) function.
+//
+// Currently this is a direct map.
+// See frameworks/native/include/media/openmax/OMX_Core.h
+//
+// Vendor OMX errors from 0x90000000 - 0x9000FFFF
+// Extension OMX errors from 0x8F000000 - 0x90000000
+// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current)
+//
+
+// returns true if err is a recognized OMX error code.
+// as OMX error is OMX_S32, this is an int32_t type
+static inline bool isOMXError(int32_t err) {
+ return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX);
+}
+
+// converts an OMX error to a status_t
+static inline status_t statusFromOMXError(int32_t omxError) {
+ switch (omxError) {
+ case OMX_ErrorInvalidComponentName:
+ case OMX_ErrorComponentNotFound:
+ return NAME_NOT_FOUND; // can trigger illegal argument error for provided names.
+ default:
+ return isOMXError(omxError) ? omxError : 0; // no translation required
+ }
+}
+
+// checks and converts status_t to a non-side-effect status_t
+static inline status_t makeNoSideEffectStatus(status_t err) {
+ switch (err) {
+ // the following errors have side effects and may come
+ // from other code modules. Remap for safety reasons.
+ case INVALID_OPERATION:
+ case DEAD_OBJECT:
+ return UNKNOWN_ERROR;
+ default:
+ return err;
+ }
+}
+
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -62,7 +115,7 @@ struct CodecObserver : public BnOMXObserver {
sp<AMessage> msg = mNotify->dup();
msg->setInt32("type", omx_msg.type);
- msg->setPointer("node", omx_msg.node);
+ msg->setInt32("node", omx_msg.node);
switch (omx_msg.type) {
case omx_message::EVENT:
@@ -75,13 +128,13 @@ struct CodecObserver : public BnOMXObserver {
case omx_message::EMPTY_BUFFER_DONE:
{
- msg->setPointer("buffer", omx_msg.u.buffer_data.buffer);
+ msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
break;
}
case omx_message::FILL_BUFFER_DONE:
{
- msg->setPointer(
+ msg->setInt32(
"buffer", omx_msg.u.extended_buffer_data.buffer);
msg->setInt32(
"range_offset",
@@ -95,12 +148,6 @@ struct CodecObserver : public BnOMXObserver {
msg->setInt64(
"timestamp",
omx_msg.u.extended_buffer_data.timestamp);
- msg->setPointer(
- "platform_private",
- omx_msg.u.extended_buffer_data.platform_private);
- msg->setPointer(
- "data_ptr",
- omx_msg.u.extended_buffer_data.data_ptr);
break;
}
@@ -155,9 +202,7 @@ private:
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
- int64_t timeUs,
- void *platformPrivate,
- void *dataPtr);
+ int64_t timeUs);
void getMoreInputDataIfPossible();
@@ -358,19 +403,26 @@ private:
ACodec::ACodec()
: mQuirks(0),
- mNode(NULL),
+ mNode(0),
mSentFormat(false),
mIsEncoder(false),
mUseMetadataOnEncoderOutput(false),
mShutdownInProgress(false),
+ mExplicitShutdown(false),
mEncoderDelay(0),
mEncoderPadding(0),
+ mRotationDegrees(0),
mChannelMaskPresent(false),
mChannelMask(0),
mDequeueCounter(0),
mStoreMetaDataInOutputBuffers(false),
mMetaDataBuffersToSubmit(0),
- mRepeatFrameDelayUs(-1ll) {
+ mRepeatFrameDelayUs(-1ll),
+ mMaxPtsGapUs(-1ll),
+ mTimePerFrameUs(-1ll),
+ mTimePerCaptureUs(-1ll),
+ mCreateInputBuffersSuspended(false),
+ mTunneled(false) {
mUninitializedState = new UninitializedState(this);
mLoadedState = new LoadedState(this);
mLoadedToIdleState = new LoadedToIdleState(this);
@@ -446,12 +498,28 @@ void ACodec::initiateShutdown(bool keepComponentAllocated) {
sp<AMessage> msg = new AMessage(kWhatShutdown, id());
msg->setInt32("keepComponentAllocated", keepComponentAllocated);
msg->post();
+ if (!keepComponentAllocated) {
+ // ensure shutdown completes in 3 seconds
+ (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000);
+ }
}
void ACodec::signalRequestIDRFrame() {
(new AMessage(kWhatRequestIDRFrame, id()))->post();
}
+// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+// Some codecs may return input buffers before having them processed.
+// This causes a halt if we already signaled an EOS on the input
+// port. For now keep submitting an output buffer if there was an
+// EOS on the input port, but not yet on the output port.
+void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() {
+ if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
+ mMetaDataBuffersToSubmit > 0) {
+ (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post();
+ }
+}
+
status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
@@ -474,7 +542,7 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
if (err == OK) {
- ALOGV("[%s] Allocating %lu buffers of size %lu on %s port",
+ ALOGV("[%s] Allocating %u buffers of size %u on %s port",
mComponentName.c_str(),
def.nBufferCountActual, def.nBufferSize,
portIndex == kPortIndexInput ? "input" : "output");
@@ -528,7 +596,7 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
}
sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", ACodec::kWhatBuffersAllocated);
+ notify->setInt32("what", CodecBase::kWhatBuffersAllocated);
notify->setInt32("portIndex", portIndex);
@@ -572,6 +640,27 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(
return err;
}
+ if (mRotationDegrees != 0) {
+ uint32_t transform = 0;
+ switch (mRotationDegrees) {
+ case 0: transform = 0; break;
+ case 90: transform = HAL_TRANSFORM_ROT_90; break;
+ case 180: transform = HAL_TRANSFORM_ROT_180; break;
+ case 270: transform = HAL_TRANSFORM_ROT_270; break;
+ default: transform = 0; break;
+ }
+
+ if (transform > 0) {
+ err = native_window_set_buffers_transform(
+ mNativeWindow.get(), transform);
+ if (err != 0) {
+ ALOGE("native_window_set_buffers_transform failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+ }
+ }
+
// Set up the native window.
OMX_U32 usage = 0;
err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
@@ -580,8 +669,9 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(
// XXX: Currently this error is logged, but not fatal.
usage = 0;
}
+ int omxUsage = usage;
- if (mFlags & kFlagIsSecure) {
+ if (mFlags & kFlagIsGrallocUsageProtected) {
usage |= GRALLOC_USAGE_PROTECTED;
}
@@ -604,6 +694,18 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(
}
}
+ int consumerUsage = 0;
+ err = mNativeWindow->query(
+ mNativeWindow.get(), NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+ &consumerUsage);
+ if (err != 0) {
+ ALOGW("failed to get consumer usage bits. ignoring");
+ err = 0;
+ }
+
+ ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec) + %#x(Consumer) = %#x",
+ omxUsage, usage, consumerUsage, usage | consumerUsage);
+ usage |= consumerUsage;
err = native_window_set_usage(
mNativeWindow.get(),
usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
@@ -613,6 +715,21 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(
return err;
}
+ // Exits here for tunneled video playback codecs -- i.e. skips native window
+ // buffer allocation step as this is managed by the tunneled OMX omponent
+ // itself and explicitly sets def.nBufferCountActual to 0.
+ if (mTunneled) {
+ ALOGV("Tunneled Playback: skipping native window buffer allocation.");
+ def.nBufferCountActual = 0;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+
+ *minUndequeuedBuffers = 0;
+ *bufferCount = 0;
+ *bufferSize = 0;
+ return err;
+ }
+
*minUndequeuedBuffers = 0;
err = mNativeWindow->query(
mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
@@ -624,18 +741,34 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(
return err;
}
- // XXX: Is this the right logic to use? It's not clear to me what the OMX
- // buffer counts refer to - how do they account for the renderer holding on
- // to buffers?
- if (def.nBufferCountActual < def.nBufferCountMin + *minUndequeuedBuffers) {
- OMX_U32 newBufferCount = def.nBufferCountMin + *minUndequeuedBuffers;
+ // FIXME: assume that surface is controlled by app (native window
+ // returns the number for the case when surface is not controlled by app)
+ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
+ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
+
+ // Use conservative allocation while also trying to reduce starvation
+ //
+ // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
+ // minimum needed for the consumer to be able to work
+ // 2. try to allocate two (2) additional buffers to reduce starvation from
+ // the consumer
+ // plus an extra buffer to account for incorrect minUndequeuedBufs
+ for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
+ OMX_U32 newBufferCount =
+ def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
def.nBufferCountActual = newBufferCount;
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- if (err != OK) {
- ALOGE("[%s] setting nBufferCountActual to %lu failed: %d",
- mComponentName.c_str(), newBufferCount, err);
+ if (err == OK) {
+ *minUndequeuedBuffers += extraBuffers;
+ break;
+ }
+
+ ALOGW("[%s] setting nBufferCountActual to %u failed: %d",
+ mComponentName.c_str(), newBufferCount, err);
+ /* exit condition */
+ if (extraBuffers == 0) {
return err;
}
}
@@ -660,8 +793,9 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {
&bufferCount, &bufferSize, &minUndequeuedBuffers);
if (err != 0)
return err;
+ mNumUndequeuedBuffers = minUndequeuedBuffers;
- ALOGV("[%s] Allocating %lu buffers from a native window of size %lu on "
+ ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
"output port",
mComponentName.c_str(), bufferCount, bufferSize);
@@ -685,14 +819,14 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {
err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
&bufferId);
if (err != 0) {
- ALOGE("registering GraphicBuffer %lu with OMX IL component failed: "
+ ALOGE("registering GraphicBuffer %u with OMX IL component failed: "
"%d", i, err);
break;
}
mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
- ALOGV("[%s] Registered graphic buffer with ID %p (pointer = %p)",
+ ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
mComponentName.c_str(),
bufferId, graphicBuffer.get());
}
@@ -713,7 +847,10 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {
for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
- cancelBufferToNativeWindow(info);
+ status_t error = cancelBufferToNativeWindow(info);
+ if (err == 0) {
+ err = error;
+ }
}
return err;
@@ -725,8 +862,9 @@ status_t ACodec::allocateOutputMetaDataBuffers() {
&bufferCount, &bufferSize, &minUndequeuedBuffers);
if (err != 0)
return err;
+ mNumUndequeuedBuffers = minUndequeuedBuffers;
- ALOGV("[%s] Allocating %lu meta buffers on output port",
+ ALOGV("[%s] Allocating %u meta buffers on output port",
mComponentName.c_str(), bufferCount);
size_t totalSize = bufferCount * 8;
@@ -750,7 +888,7 @@ status_t ACodec::allocateOutputMetaDataBuffers() {
mBuffers[kPortIndexOutput].push(info);
- ALOGV("[%s] allocated meta buffer with ID %p (pointer = %p)",
+ ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)",
mComponentName.c_str(), info.mBufferID, mem->pointer());
}
@@ -767,7 +905,7 @@ status_t ACodec::submitOutputMetaDataBuffer() {
if (info == NULL)
return ERROR_IO;
- ALOGV("[%s] submitting output meta buffer ID %p for graphic buffer %p",
+ ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",
mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get());
--mMetaDataBuffersToSubmit;
@@ -781,23 +919,30 @@ status_t ACodec::submitOutputMetaDataBuffer() {
status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
- ALOGV("[%s] Calling cancelBuffer on buffer %p",
+ ALOGV("[%s] Calling cancelBuffer on buffer %u",
mComponentName.c_str(), info->mBufferID);
int err = mNativeWindow->cancelBuffer(
mNativeWindow.get(), info->mGraphicBuffer.get(), -1);
- CHECK_EQ(err, 0);
+ ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",
+ mComponentName.c_str(), info->mBufferID);
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
- return OK;
+ return err;
}
ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
ANativeWindowBuffer *buf;
- int fenceFd = -1;
CHECK(mNativeWindow.get() != NULL);
+
+ if (mTunneled) {
+ ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel"
+ " video playback mode mode!");
+ return NULL;
+ }
+
if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) {
ALOGE("dequeueBuffer failed.");
return NULL;
@@ -891,7 +1036,7 @@ status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
if (portIndex == kPortIndexOutput && mNativeWindow != NULL
&& info->mStatus == BufferInfo::OWNED_BY_US) {
- CHECK_EQ((status_t)OK, cancelBufferToNativeWindow(info));
+ cancelBufferToNativeWindow(info);
}
CHECK_EQ(mOMX->freeBuffer(
@@ -945,12 +1090,16 @@ status_t ACodec::setComponentRole(
"audio_decoder.aac", "audio_encoder.aac" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS,
"audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW,
"audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW,
"audio_decoder.g711alaw", "audio_encoder.g711alaw" },
{ MEDIA_MIMETYPE_VIDEO_AVC,
"video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4,
"video_decoder.mpeg4", "video_encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_H263,
@@ -965,6 +1114,12 @@ status_t ACodec::setComponentRole(
"audio_decoder.flac", "audio_encoder.flac" },
{ MEDIA_MIMETYPE_AUDIO_MSGSM,
"audio_decoder.gsm", "audio_encoder.gsm" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2,
+ "video_decoder.mpeg2", "video_encoder.mpeg2" },
+ { MEDIA_MIMETYPE_AUDIO_AC3,
+ "audio_decoder.ac3", "audio_encoder.ac3" },
+ { MEDIA_MIMETYPE_AUDIO_EAC3,
+ "audio_decoder.eac3", "audio_encoder.eac3" },
};
static const size_t kNumMimeToRole =
@@ -1016,6 +1171,9 @@ status_t ACodec::configureCodec(
encoder = false;
}
+ sp<AMessage> inputFormat = new AMessage();
+ sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged
+
mIsEncoder = encoder;
status_t err = setComponentRole(encoder /* isEncoder */, mime);
@@ -1096,84 +1254,258 @@ status_t ACodec::configureCodec(
&mRepeatFrameDelayUs)) {
mRepeatFrameDelayUs = -1ll;
}
+
+ if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
+ mMaxPtsGapUs = -1ll;
+ }
+
+ if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
+ mTimePerCaptureUs = -1ll;
+ }
+
+ if (!msg->findInt32(
+ "create-input-buffers-suspended",
+ (int32_t*)&mCreateInputBuffersSuspended)) {
+ mCreateInputBuffersSuspended = false;
+ }
}
- // Always try to enable dynamic output buffers on native surface
+ // NOTE: we only use native window for video decoders
sp<RefBase> obj;
- int32_t haveNativeWindow = msg->findObject("native-window", &obj) &&
- obj != NULL;
+ bool haveNativeWindow = msg->findObject("native-window", &obj)
+ && obj != NULL && video && !encoder;
mStoreMetaDataInOutputBuffers = false;
- if (!encoder && video && haveNativeWindow) {
- err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE);
- if (err != OK) {
- ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
- mComponentName.c_str(), err);
-
- // if adaptive playback has been requested, try JB fallback
- // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
- // LARGE MEMORY REQUIREMENT
-
- // we will not do adaptive playback on software accessed
- // surfaces as they never had to respond to changes in the
- // crop window, and we don't trust that they will be able to.
- int usageBits = 0;
- bool canDoAdaptivePlayback;
+ if (video && !encoder) {
+ inputFormat->setInt32("adaptive-playback", false);
+
+ int32_t usageProtected;
+ if (msg->findInt32("protected", &usageProtected) && usageProtected) {
+ if (!haveNativeWindow) {
+ ALOGE("protected output buffers must be sent to an ANativeWindow");
+ return PERMISSION_DENIED;
+ }
+ mFlags |= kFlagIsGrallocUsageProtected;
+ mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ }
+ }
+ if (haveNativeWindow) {
+ sp<NativeWindowWrapper> windowWrapper(
+ static_cast<NativeWindowWrapper *>(obj.get()));
+ sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
+
+ // START of temporary support for automatic FRC - THIS WILL BE REMOVED
+ int32_t autoFrc;
+ if (msg->findInt32("auto-frc", &autoFrc)) {
+ bool enabled = autoFrc;
+ OMX_CONFIG_BOOLEANTYPE config;
+ InitOMXParams(&config);
+ config.bEnabled = (OMX_BOOL)enabled;
+ status_t temp = mOMX->setConfig(
+ mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
+ &config, sizeof(config));
+ if (temp == OK) {
+ outputFormat->setInt32("auto-frc", enabled);
+ } else if (enabled) {
+ ALOGI("codec does not support requested auto-frc (err %d)", temp);
+ }
+ }
+ // END of temporary support for automatic FRC
- sp<NativeWindowWrapper> windowWrapper(
- static_cast<NativeWindowWrapper *>(obj.get()));
- sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow();
+ int32_t tunneled;
+ if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
+ tunneled != 0) {
+ ALOGI("Configuring TUNNELED video playback.");
+ mTunneled = true;
- if (nativeWindow->query(
- nativeWindow.get(),
- NATIVE_WINDOW_CONSUMER_USAGE_BITS,
- &usageBits) != OK) {
- canDoAdaptivePlayback = false;
- } else {
- canDoAdaptivePlayback =
- (usageBits &
- (GRALLOC_USAGE_SW_READ_MASK |
- GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+ int32_t audioHwSync = 0;
+ if (!msg->findInt32("audio-hw-sync", &audioHwSync)) {
+ ALOGW("No Audio HW Sync provided for video tunnel");
+ }
+ err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
+ if (err != OK) {
+ ALOGE("configureTunneledVideoPlayback(%d,%p) failed!",
+ audioHwSync, nativeWindow.get());
+ return err;
}
int32_t maxWidth = 0, maxHeight = 0;
- if (canDoAdaptivePlayback &&
- msg->findInt32("max-width", &maxWidth) &&
- msg->findInt32("max-height", &maxHeight)) {
- ALOGV("[%s] prepareForAdaptivePlayback(%ldx%ld)",
- mComponentName.c_str(), maxWidth, maxHeight);
+ if (msg->findInt32("max-width", &maxWidth) &&
+ msg->findInt32("max-height", &maxHeight)) {
err = mOMX->prepareForAdaptivePlayback(
mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
- ALOGW_IF(err != OK,
- "[%s] prepareForAdaptivePlayback failed w/ err %d",
- mComponentName.c_str(), err);
+ if (err != OK) {
+ ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
+ mComponentName.c_str(), err);
+ // allow failure
+ err = OK;
+ } else {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
}
- // allow failure
- err = OK;
} else {
- ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str());
- mStoreMetaDataInOutputBuffers = true;
+ ALOGV("Configuring CPU controlled video playback.");
+ mTunneled = false;
+
+ // Explicity reset the sideband handle of the window for
+ // non-tunneled video in case the window was previously used
+ // for a tunneled video playback.
+ err = native_window_set_sideband_stream(nativeWindow.get(), NULL);
+ if (err != OK) {
+ ALOGE("set_sideband_stream(NULL) failed! (err %d).", err);
+ return err;
+ }
+
+ // Always try to enable dynamic output buffers on native surface
+ err = mOMX->storeMetaDataInBuffers(
+ mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != OK) {
+ ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ // if adaptive playback has been requested, try JB fallback
+ // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
+ // LARGE MEMORY REQUIREMENT
+
+ // we will not do adaptive playback on software accessed
+ // surfaces as they never had to respond to changes in the
+ // crop window, and we don't trust that they will be able to.
+ int usageBits = 0;
+ bool canDoAdaptivePlayback;
+
+ if (nativeWindow->query(
+ nativeWindow.get(),
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+ &usageBits) != OK) {
+ canDoAdaptivePlayback = false;
+ } else {
+ canDoAdaptivePlayback =
+ (usageBits &
+ (GRALLOC_USAGE_SW_READ_MASK |
+ GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
+ }
+
+ int32_t maxWidth = 0, maxHeight = 0;
+ if (canDoAdaptivePlayback &&
+ msg->findInt32("max-width", &maxWidth) &&
+ msg->findInt32("max-height", &maxHeight)) {
+ ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
+ mComponentName.c_str(), maxWidth, maxHeight);
+
+ err = mOMX->prepareForAdaptivePlayback(
+ mNode, kPortIndexOutput, OMX_TRUE, maxWidth,
+ maxHeight);
+ ALOGW_IF(err != OK,
+ "[%s] prepareForAdaptivePlayback failed w/ err %d",
+ mComponentName.c_str(), err);
+
+ if (err == OK) {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+ }
+ // allow failure
+ err = OK;
+ } else {
+ ALOGV("[%s] storeMetaDataInBuffers succeeded",
+ mComponentName.c_str());
+ mStoreMetaDataInOutputBuffers = true;
+ inputFormat->setInt32("adaptive-playback", true);
+ }
+
+ int32_t push;
+ if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
+ && push != 0) {
+ mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ }
}
- int32_t push;
- if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
- && push != 0) {
- mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
+ int32_t rotationDegrees;
+ if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
+ mRotationDegrees = rotationDegrees;
+ } else {
+ mRotationDegrees = 0;
}
}
if (video) {
+ // determine need for software renderer
+ bool usingSwRenderer = false;
+ if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) {
+ usingSwRenderer = true;
+ haveNativeWindow = false;
+ }
+
if (encoder) {
err = setupVideoEncoder(mime, msg);
} else {
- int32_t width, height;
- if (!msg->findInt32("width", &width)
- || !msg->findInt32("height", &height)) {
- err = INVALID_OPERATION;
- } else {
- err = setupVideoDecoder(mime, width, height);
+ err = setupVideoDecoder(mime, msg, haveNativeWindow);
+ }
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (haveNativeWindow) {
+ sp<NativeWindowWrapper> nativeWindow(
+ static_cast<NativeWindowWrapper *>(obj.get()));
+ CHECK(nativeWindow != NULL);
+ mNativeWindow = nativeWindow->getNativeWindow();
+
+ native_window_set_scaling_mode(
+ mNativeWindow.get(), NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+ }
+
+ // initialize native window now to get actual output format
+ // TODO: this is needed for some encoders even though they don't use native window
+ CHECK_EQ((status_t)OK, initNativeWindow());
+
+ // fallback for devices that do not handle flex-YUV for native buffers
+ if (haveNativeWindow) {
+ int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
+ if (msg->findInt32("color-format", &requestedColorFormat) &&
+ requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
+ CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
+ int32_t colorFormat = OMX_COLOR_FormatUnused;
+ OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
+ CHECK(outputFormat->findInt32("color-format", &colorFormat));
+ ALOGD("[%s] Requested output format %#x and got %#x.",
+ mComponentName.c_str(), requestedColorFormat, colorFormat);
+ if (!isFlexibleColorFormat(
+ mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
+ || flexibleEquivalent != (OMX_U32)requestedColorFormat) {
+ // device did not handle flex-YUV request for native window, fall back
+ // to SW renderer
+ ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
+ mNativeWindow.clear();
+ haveNativeWindow = false;
+ usingSwRenderer = true;
+ if (mStoreMetaDataInOutputBuffers) {
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+ mStoreMetaDataInOutputBuffers = false;
+ // TODO: implement adaptive-playback support for bytebuffer mode.
+ // This is done by SW codecs, but most HW codecs don't support it.
+ inputFormat->setInt32("adaptive-playback", false);
+ }
+ if (err == OK) {
+ err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE);
+ }
+ if (mFlags & kFlagIsGrallocUsageProtected) {
+ // fallback is not supported for protected playback
+ err = PERMISSION_DENIED;
+ } else if (err == OK) {
+ err = setupVideoDecoder(mime, msg, false);
+ }
+ }
}
}
+
+ if (usingSwRenderer) {
+ outputFormat->setInt32("using-sw-renderer", 1);
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
int32_t numChannels, sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
@@ -1194,16 +1526,52 @@ status_t ACodec::configureCodec(
err = INVALID_OPERATION;
} else {
int32_t isADTS, aacProfile;
+ int32_t sbrMode;
+ int32_t maxOutputChannelCount;
+ int32_t pcmLimiterEnable;
+ drcParams_t drc;
if (!msg->findInt32("is-adts", &isADTS)) {
isADTS = 0;
}
if (!msg->findInt32("aac-profile", &aacProfile)) {
aacProfile = OMX_AUDIO_AACObjectNull;
}
+ if (!msg->findInt32("aac-sbr-mode", &sbrMode)) {
+ sbrMode = -1;
+ }
+
+ if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
+ maxOutputChannelCount = -1;
+ }
+ if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
+ // value is unknown
+ pcmLimiterEnable = -1;
+ }
+ if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
+ // value is unknown
+ drc.encodedTargetLevel = -1;
+ }
+ if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
+ // value is unknown
+ drc.drcCut = -1;
+ }
+ if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
+ // value is unknown
+ drc.drcBoost = -1;
+ }
+ if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
+ // value is unknown
+ drc.heavyCompression = -1;
+ }
+ if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
+ // value is unknown
+ drc.targetRefLevel = -1;
+ }
err = setupAACCodec(
encoder, numChannels, sampleRate, bitRate, aacProfile,
- isADTS != 0);
+ isADTS != 0, sbrMode, maxOutputChannelCount, drc,
+ pcmLimiterEnable);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
@@ -1218,7 +1586,11 @@ status_t ACodec::configureCodec(
if (!msg->findInt32("channel-count", &numChannels)) {
err = INVALID_OPERATION;
} else {
- err = setupG711Codec(encoder, numChannels);
+ int32_t sampleRate;
+ if (!msg->findInt32("sample-rate", &sampleRate)) {
+ sampleRate = 8000;
+ }
+ err = setupG711Codec(encoder, sampleRate, numChannels);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
int32_t numChannels, sampleRate, compressionLevel = -1;
@@ -1230,8 +1602,10 @@ status_t ACodec::configureCodec(
} else {
if (encoder) {
if (!msg->findInt32(
+ "complexity", &compressionLevel) &&
+ !msg->findInt32(
"flac-compression-level", &compressionLevel)) {
- compressionLevel = 5;// default FLAC compression level
+ compressionLevel = 5; // default FLAC compression level
} else if (compressionLevel < 0) {
ALOGW("compression level %d outside [0..8] range, "
"using 0",
@@ -1256,6 +1630,24 @@ status_t ACodec::configureCodec(
} else {
err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) {
+ int32_t numChannels;
+ int32_t sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ err = setupAC3Codec(encoder, numChannels, sampleRate);
+ }
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) {
+ int32_t numChannels;
+ int32_t sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ err = INVALID_OPERATION;
+ } else {
+ err = setupEAC3Codec(encoder, numChannels, sampleRate);
+ }
}
if (err != OK) {
@@ -1283,6 +1675,13 @@ status_t ACodec::configureCodec(
err = setMinBufferSize(kPortIndexInput, 8192); // XXX
}
+ mBaseOutputFormat = outputFormat;
+
+ CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK);
+ CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
+ mInputFormat = inputFormat;
+ mOutputFormat = outputFormat;
+
return err;
}
@@ -1351,7 +1750,9 @@ status_t ACodec::selectAudioPortFormat(
status_t ACodec::setupAACCodec(
bool encoder, int32_t numChannels, int32_t sampleRate,
- int32_t bitRate, int32_t aacProfile, bool isADTS) {
+ int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode,
+ int32_t maxOutputChannelCount, const drcParams_t& drc,
+ int32_t pcmLimiterEnable) {
if (encoder && isADTS) {
return -EINVAL;
}
@@ -1418,6 +1819,32 @@ status_t ACodec::setupAACCodec(
profile.nAACERtools = OMX_AUDIO_AACERNone;
profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile;
profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
+ switch (sbrMode) {
+ case 0:
+ // disable sbr
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 1:
+ // enable single-rate sbr
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 2:
+ // enable dual-rate sbr
+ profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case -1:
+ // enable both modes -> the codec will decide which mode should be used
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ default:
+ // unsupported sbr mode
+ return BAD_VALUE;
+ }
+
err = mOMX->setParameter(
mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
@@ -1448,8 +1875,100 @@ status_t ACodec::setupAACCodec(
? OMX_AUDIO_AACStreamFormatMP4ADTS
: OMX_AUDIO_AACStreamFormatMP4FF;
+ OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation;
+ presentation.nMaxOutputChannels = maxOutputChannelCount;
+ presentation.nDrcCut = drc.drcCut;
+ presentation.nDrcBoost = drc.drcBoost;
+ presentation.nHeavyCompression = drc.heavyCompression;
+ presentation.nTargetReferenceLevel = drc.targetRefLevel;
+ presentation.nEncodedTargetLevel = drc.encodedTargetLevel;
+ presentation.nPCMLimiterEnable = pcmLimiterEnable;
+
+ status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ if (res == OK) {
+ // optional parameters, will not cause configuration failure
+ mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
+ &presentation, sizeof(presentation));
+ } else {
+ ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res);
+ }
+ return res;
+}
+
+status_t ACodec::setupAC3Codec(
+ bool encoder, int32_t numChannels, int32_t sampleRate) {
+ status_t err = setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (encoder) {
+ ALOGW("AC3 encoding is not supported.");
+ return INVALID_OPERATION;
+ }
+
+ OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.nChannels = numChannels;
+ def.nSampleRate = sampleRate;
+
return mOMX->setParameter(
- mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+}
+
+status_t ACodec::setupEAC3Codec(
+ bool encoder, int32_t numChannels, int32_t sampleRate) {
+ status_t err = setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (encoder) {
+ ALOGW("EAC3 encoding is not supported.");
+ return INVALID_OPERATION;
+ }
+
+ OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ &def,
+ sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.nChannels = numChannels;
+ def.nSampleRate = sampleRate;
+
+ return mOMX->setParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ &def,
+ sizeof(def));
}
static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
@@ -1525,11 +2044,11 @@ status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
1 /* numChannels */);
}
-status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) {
+status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) {
CHECK(!encoder); // XXX TODO
return setupRawAudioFormat(
- kPortIndexInput, 8000 /* sampleRate */, numChannels);
+ kPortIndexInput, sampleRate, numChannels);
}
status_t ACodec::setupFlacCodec(
@@ -1608,10 +2127,32 @@ status_t ACodec::setupRawAudioFormat(
mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
}
+status_t ACodec::configureTunneledVideoPlayback(
+ int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) {
+ native_handle_t* sidebandHandle;
+
+ status_t err = mOMX->configureVideoTunnelMode(
+ mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
+ if (err != OK) {
+ ALOGE("configureVideoTunnelMode failed! (err %d).", err);
+ return err;
+ }
+
+ err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle);
+ if (err != OK) {
+ ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
+ sidebandHandle, err);
+ return err;
+ }
+
+ return OK;
+}
+
status_t ACodec::setVideoPortFormatType(
OMX_U32 portIndex,
OMX_VIDEO_CODINGTYPE compressionFormat,
- OMX_COLOR_FORMATTYPE colorFormat) {
+ OMX_COLOR_FORMATTYPE colorFormat,
+ bool usingNativeBuffers) {
OMX_VIDEO_PARAM_PORTFORMATTYPE format;
InitOMXParams(&format);
format.nPortIndex = portIndex;
@@ -1629,6 +2170,17 @@ status_t ACodec::setVideoPortFormatType(
return err;
}
+ // substitute back flexible color format to codec supported format
+ OMX_U32 flexibleEquivalent;
+ if (compressionFormat == OMX_VIDEO_CodingUnused
+ && isFlexibleColorFormat(
+ mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent)
+ && colorFormat == flexibleEquivalent) {
+ ALOGI("[%s] using color format %#x in place of %#x",
+ mComponentName.c_str(), format.eColorFormat, colorFormat);
+ colorFormat = format.eColorFormat;
+ }
+
// The following assertion is violated by TI's video decoder.
// CHECK_EQ(format.nIndex, index);
@@ -1667,18 +2219,66 @@ status_t ACodec::setVideoPortFormatType(
return err;
}
-status_t ACodec::setSupportedOutputFormat() {
- OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+// Set optimal output format. OMX component lists output formats in the order
+// of preference, but this got more complicated since the introduction of flexible
+// YUV formats. We support a legacy behavior for applications that do not use
+// surface output, do not specify an output format, but expect a "usable" standard
+// OMX format. SW readable and standard formats must be flex-YUV.
+//
+// Suggested preference order:
+// - optimal format for texture rendering (mediaplayer behavior)
+// - optimal SW readable & texture renderable format (flex-YUV support)
+// - optimal SW readable non-renderable format (flex-YUV bytebuffer support)
+// - legacy "usable" standard formats
+//
+// For legacy support, we prefer a standard format, but will settle for a SW readable
+// flex-YUV format.
+status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat;
InitOMXParams(&format);
format.nPortIndex = kPortIndexOutput;
- format.nIndex = 0;
- status_t err = mOMX->getParameter(
- mNode, OMX_IndexParamVideoPortFormat,
- &format, sizeof(format));
- CHECK_EQ(err, (status_t)OK);
- CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
+ InitOMXParams(&legacyFormat);
+ // this field will change when we find a suitable legacy format
+ legacyFormat.eColorFormat = OMX_COLOR_FormatUnused;
+ for (OMX_U32 index = 0; ; ++index) {
+ format.nIndex = index;
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamVideoPortFormat,
+ &format, sizeof(format));
+ if (err != OK) {
+ // no more formats, pick legacy format if found
+ if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) {
+ memcpy(&format, &legacyFormat, sizeof(format));
+ break;
+ }
+ return err;
+ }
+ if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) {
+ return OMX_ErrorBadParameter;
+ }
+ if (!getLegacyFlexibleFormat) {
+ break;
+ }
+ // standard formats that were exposed to users before
+ if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar
+ || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar
+ || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
+ || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar
+ || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
+ break;
+ }
+ // find best legacy non-standard format
+ OMX_U32 flexibleEquivalent;
+ if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused
+ && isFlexibleColorFormat(
+ mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */,
+ &flexibleEquivalent)
+ && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) {
+ memcpy(&legacyFormat, &format, sizeof(format));
+ }
+ }
return mOMX->setParameter(
mNode, OMX_IndexParamVideoPortFormat,
&format, sizeof(format));
@@ -1689,6 +2289,7 @@ static const struct VideoCodingMapEntry {
OMX_VIDEO_CODINGTYPE mVideoCodingType;
} kVideoCodingMapEntry[] = {
{ MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
+ { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
{ MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
{ MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
@@ -1729,7 +2330,13 @@ static status_t GetMimeTypeForVideoCoding(
}
status_t ACodec::setupVideoDecoder(
- const char *mime, int32_t width, int32_t height) {
+ const char *mime, const sp<AMessage> &msg, bool haveNativeWindow) {
+ int32_t width, height;
+ if (!msg->findInt32("width", &width)
+ || !msg->findInt32("height", &height)) {
+ return INVALID_OPERATION;
+ }
+
OMX_VIDEO_CODINGTYPE compressionFormat;
status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
@@ -1744,14 +2351,36 @@ status_t ACodec::setupVideoDecoder(
return err;
}
- err = setSupportedOutputFormat();
+ int32_t tmp;
+ if (msg->findInt32("color-format", &tmp)) {
+ OMX_COLOR_FORMATTYPE colorFormat =
+ static_cast<OMX_COLOR_FORMATTYPE>(tmp);
+ err = setVideoPortFormatType(
+ kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow);
+ if (err != OK) {
+ ALOGW("[%s] does not support color format %d",
+ mComponentName.c_str(), colorFormat);
+ err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
+ }
+ } else {
+ err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
+ }
if (err != OK) {
return err;
}
+ int32_t frameRateInt;
+ float frameRateFloat;
+ if (!msg->findFloat("frame-rate", &frameRateFloat)) {
+ if (!msg->findInt32("frame-rate", &frameRateInt)) {
+ frameRateInt = -1;
+ }
+ frameRateFloat = (float)frameRateInt;
+ }
+
err = setVideoFormatOnPort(
- kPortIndexInput, width, height, compressionFormat);
+ kPortIndexInput, width, height, compressionFormat, frameRateFloat);
if (err != OK) {
return err;
@@ -1835,11 +2464,16 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
return INVALID_OPERATION;
}
frameRate = (float)tmp;
+ mTimePerFrameUs = (int64_t) (1000000.0f / frameRate);
}
video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
- video_def->eColorFormat = colorFormat;
+ // this is redundant as it was already set up in setVideoPortFormatType
+ // FIXME for now skip this only for flexible YUV formats
+ if (colorFormat != OMX_COLOR_FormatYUV420Flexible) {
+ video_def->eColorFormat = colorFormat;
+ }
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
@@ -1909,6 +2543,10 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
err = setupAVCEncoderParameters(msg);
break;
+ case OMX_VIDEO_CodingHEVC:
+ err = setupHEVCEncoderParameters(msg);
+ break;
+
case OMX_VIDEO_CodingVP8:
case OMX_VIDEO_CodingVP9:
err = setupVPXEncoderParameters(msg);
@@ -1967,7 +2605,6 @@ static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
return 0;
}
OMX_U32 ret = frameRate * iFramesInterval;
- CHECK(ret > 1);
return ret;
}
@@ -2137,6 +2774,58 @@ status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) {
return setupErrorCorrectionParameters();
}
+// static
+int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor(
+ int width, int height, int rate, int bitrate,
+ OMX_VIDEO_AVCPROFILETYPE profile) {
+ // convert bitrate to main/baseline profile kbps equivalent
+ switch (profile) {
+ case OMX_VIDEO_AVCProfileHigh10:
+ bitrate = divUp(bitrate, 3000); break;
+ case OMX_VIDEO_AVCProfileHigh:
+ bitrate = divUp(bitrate, 1250); break;
+ default:
+ bitrate = divUp(bitrate, 1000); break;
+ }
+
+ // convert size and rate to MBs
+ width = divUp(width, 16);
+ height = divUp(height, 16);
+ int mbs = width * height;
+ rate *= mbs;
+ int maxDimension = max(width, height);
+
+ static const int limits[][5] = {
+ /* MBps MB dim bitrate level */
+ { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 },
+ { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b },
+ { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 },
+ { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 },
+ { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 },
+ { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 },
+ { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 },
+ { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 },
+ { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 },
+ { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 },
+ { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 },
+ { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 },
+ { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 },
+ { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 },
+ { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 },
+ { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 },
+ { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 },
+ };
+
+ for (size_t i = 0; i < ARRAY_SIZE(limits); i++) {
+ const int (&limit)[5] = limits[i];
+ if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2]
+ && bitrate <= limit[3]) {
+ return limit[4];
+ }
+ }
+ return 0;
+}
+
status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
int32_t bitrate, iFrameInterval;
if (!msg->findInt32("bitrate", &bitrate)
@@ -2245,14 +2934,139 @@ status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
return configureBitrate(bitrate, bitrateMode);
}
+status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) {
+ int32_t bitrate, iFrameInterval;
+ if (!msg->findInt32("bitrate", &bitrate)
+ || !msg->findInt32("i-frame-interval", &iFrameInterval)) {
+ return INVALID_OPERATION;
+ }
+
+ OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ OMX_VIDEO_PARAM_HEVCTYPE hevcType;
+ InitOMXParams(&hevcType);
+ hevcType.nPortIndex = kPortIndexOutput;
+
+ status_t err = OK;
+ err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
+ if (err != OK) {
+ return err;
+ }
+
+ int32_t profile;
+ if (msg->findInt32("profile", &profile)) {
+ int32_t level;
+ if (!msg->findInt32("level", &level)) {
+ return INVALID_OPERATION;
+ }
+
+ err = verifySupportForProfileAndLevel(profile, level);
+ if (err != OK) {
+ return err;
+ }
+
+ hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile);
+ hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level);
+ }
+
+ // TODO: Need OMX structure definition for setting iFrameInterval
+
+ err = mOMX->setParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
+ if (err != OK) {
+ return err;
+ }
+
+ return configureBitrate(bitrate, bitrateMode);
+}
+
status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) {
int32_t bitrate;
+ int32_t iFrameInterval = 0;
+ size_t tsLayers = 0;
+ OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern =
+ OMX_VIDEO_VPXTemporalLayerPatternNone;
+ static const uint32_t kVp8LayerRateAlloction
+ [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS]
+ [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = {
+ {100, 100, 100}, // 1 layer
+ { 60, 100, 100}, // 2 layers {60%, 40%}
+ { 40, 60, 100}, // 3 layers {40%, 20%, 40%}
+ };
if (!msg->findInt32("bitrate", &bitrate)) {
return INVALID_OPERATION;
}
+ msg->findInt32("i-frame-interval", &iFrameInterval);
OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ AString tsSchema;
+ if (msg->findString("ts-schema", &tsSchema)) {
+ if (tsSchema == "webrtc.vp8.1-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 1;
+ } else if (tsSchema == "webrtc.vp8.2-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 2;
+ } else if (tsSchema == "webrtc.vp8.3-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 3;
+ } else {
+ ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str());
+ }
+ }
+
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
+ InitOMXParams(&vp8type);
+ vp8type.nPortIndex = kPortIndexOutput;
+ status_t err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type, sizeof(vp8type));
+
+ if (err == OK) {
+ if (iFrameInterval > 0) {
+ vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate);
+ }
+ vp8type.eTemporalPattern = pattern;
+ vp8type.nTemporalLayerCount = tsLayers;
+ if (tsLayers > 0) {
+ for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) {
+ vp8type.nTemporalLayerBitrateRatio[i] =
+ kVp8LayerRateAlloction[tsLayers - 1][i];
+ }
+ }
+ if (bitrateMode == OMX_Video_ControlRateConstant) {
+ vp8type.nMinQuantizer = 2;
+ vp8type.nMaxQuantizer = 63;
+ }
+
+ err = mOMX->setParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type, sizeof(vp8type));
+ if (err != OK) {
+ ALOGW("Extended VP8 parameters set failed: %d", err);
+ }
+ }
+
return configureBitrate(bitrate, bitrateMode);
}
@@ -2330,7 +3144,8 @@ status_t ACodec::setupErrorCorrectionParameters() {
status_t ACodec::setVideoFormatOnPort(
OMX_U32 portIndex,
- int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) {
+ int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat,
+ float frameRate) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
@@ -2358,6 +3173,9 @@ status_t ACodec::setVideoFormatOnPort(
if (portIndex == kPortIndexInput) {
video_def->eCompressionFormat = compressionFormat;
video_def->eColorFormat = OMX_COLOR_FormatUnused;
+ if (frameRate >= 0) {
+ video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
+ }
}
err = mOMX->setParameter(
@@ -2408,19 +3226,7 @@ void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {
return;
}
- int minUndequeuedBufs = 0;
- status_t err = mNativeWindow->query(
- mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
- &minUndequeuedBufs);
-
- if (err != OK) {
- ALOGE("[%s] NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
- mComponentName.c_str(), strerror(-err), -err);
-
- minUndequeuedBufs = 0;
- }
-
- while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs
+ while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers
&& dequeueBufferFromNativeWindow() != NULL) {
// these buffers will be submitted as regular buffers; account for this
if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) {
@@ -2436,7 +3242,7 @@ bool ACodec::allYourBuffersAreBelongToUs(
if (info->mStatus != BufferInfo::OWNED_BY_US
&& info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
- ALOGV("[%s] Buffer %p on port %ld still has status %d",
+ ALOGV("[%s] Buffer %u on port %u still has status %d",
mComponentName.c_str(),
info->mBufferID, portIndex, info->mStatus);
return false;
@@ -2452,7 +3258,6 @@ bool ACodec::allYourBuffersAreBelongToUs() {
}
void ACodec::deferMessage(const sp<AMessage> &msg) {
- bool wasEmptyBefore = mDeferredQueue.empty();
mDeferredQueue.push_back(msg);
}
@@ -2466,79 +3271,329 @@ void ACodec::processDeferredMessages() {
}
}
-void ACodec::sendFormatChange(const sp<AMessage> &reply) {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
+// static
+bool ACodec::describeDefaultColorFormat(DescribeColorFormatParams &params) {
+ MediaImage &image = params.sMediaImage;
+ memset(&image, 0, sizeof(image));
+
+ image.mType = MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN;
+ image.mNumPlanes = 0;
+
+ const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
+ image.mWidth = params.nFrameWidth;
+ image.mHeight = params.nFrameHeight;
+
+ // only supporting YUV420
+ if (fmt != OMX_COLOR_FormatYUV420Planar &&
+ fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
+ fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
+ fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
+ fmt != HAL_PIXEL_FORMAT_YV12) {
+ ALOGW("do not know color format 0x%x = %d", fmt, fmt);
+ return false;
+ }
+
+ // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
+ if (params.nStride != 0 && params.nSliceHeight == 0) {
+ ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
+ params.nFrameHeight);
+ params.nSliceHeight = params.nFrameHeight;
+ }
+
+ // we need stride and slice-height to be non-zero
+ if (params.nStride == 0 || params.nSliceHeight == 0) {
+ ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
+ fmt, fmt, params.nStride, params.nSliceHeight);
+ return false;
+ }
+
+ // set-up YUV format
+ image.mType = MediaImage::MEDIA_IMAGE_TYPE_YUV;
+ image.mNumPlanes = 3;
+ image.mBitDepth = 8;
+ image.mPlane[image.Y].mOffset = 0;
+ image.mPlane[image.Y].mColInc = 1;
+ image.mPlane[image.Y].mRowInc = params.nStride;
+ image.mPlane[image.Y].mHorizSubsampling = 1;
+ image.mPlane[image.Y].mVertSubsampling = 1;
+
+ switch ((int)fmt) {
+ case HAL_PIXEL_FORMAT_YV12:
+ if (params.bUsingNativeBuffers) {
+ size_t ystride = align(params.nStride, 16);
+ size_t cstride = align(params.nStride / 2, 16);
+ image.mPlane[image.Y].mRowInc = ystride;
+
+ image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
+ image.mPlane[image.V].mColInc = 1;
+ image.mPlane[image.V].mRowInc = cstride;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+
+ image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
+ + (cstride * params.nSliceHeight / 2);
+ image.mPlane[image.U].mColInc = 1;
+ image.mPlane[image.U].mRowInc = cstride;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+ break;
+ } else {
+ // fall through as YV12 is used for YUV420Planar by some codecs
+ }
+
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420PackedPlanar:
+ image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
+ image.mPlane[image.U].mColInc = 1;
+ image.mPlane[image.U].mRowInc = params.nStride / 2;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+
+ image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
+ + (params.nStride * params.nSliceHeight / 4);
+ image.mPlane[image.V].mColInc = 1;
+ image.mPlane[image.V].mRowInc = params.nStride / 2;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+ break;
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
+ case OMX_COLOR_FormatYUV420PackedSemiPlanar:
+ // NV12
+ image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
+ image.mPlane[image.U].mColInc = 2;
+ image.mPlane[image.U].mRowInc = params.nStride;
+ image.mPlane[image.U].mHorizSubsampling = 2;
+ image.mPlane[image.U].mVertSubsampling = 2;
+
+ image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
+ image.mPlane[image.V].mColInc = 2;
+ image.mPlane[image.V].mRowInc = params.nStride;
+ image.mPlane[image.V].mHorizSubsampling = 2;
+ image.mPlane[image.V].mVertSubsampling = 2;
+ break;
+
+ default:
+ TRESPASS();
+ }
+ return true;
+}
+
+// static
+bool ACodec::describeColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ DescribeColorFormatParams &describeParams)
+{
+ OMX_INDEXTYPE describeColorFormatIndex;
+ if (omx->getExtensionIndex(
+ node, "OMX.google.android.index.describeColorFormat",
+ &describeColorFormatIndex) != OK ||
+ omx->getParameter(
+ node, describeColorFormatIndex,
+ &describeParams, sizeof(describeParams)) != OK) {
+ return describeDefaultColorFormat(describeParams);
+ }
+ return describeParams.sMediaImage.mType !=
+ MediaImage::MEDIA_IMAGE_TYPE_UNKNOWN;
+}
+
+// static
+bool ACodec::isFlexibleColorFormat(
+ const sp<IOMX> &omx, IOMX::node_id node,
+ uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
+ DescribeColorFormatParams describeParams;
+ InitOMXParams(&describeParams);
+ describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
+ // reasonable dummy values
+ describeParams.nFrameWidth = 128;
+ describeParams.nFrameHeight = 128;
+ describeParams.nStride = 128;
+ describeParams.nSliceHeight = 128;
+ describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
+
+ CHECK(flexibleEquivalent != NULL);
+
+ if (!describeColorFormat(omx, node, describeParams)) {
+ return false;
+ }
+
+ const MediaImage &img = describeParams.sMediaImage;
+ if (img.mType == MediaImage::MEDIA_IMAGE_TYPE_YUV) {
+ if (img.mNumPlanes != 3 ||
+ img.mPlane[img.Y].mHorizSubsampling != 1 ||
+ img.mPlane[img.Y].mVertSubsampling != 1) {
+ return false;
+ }
+
+ // YUV 420
+ if (img.mPlane[img.U].mHorizSubsampling == 2
+ && img.mPlane[img.U].mVertSubsampling == 2
+ && img.mPlane[img.V].mHorizSubsampling == 2
+ && img.mPlane[img.V].mVertSubsampling == 2) {
+ // possible flexible YUV420 format
+ if (img.mBitDepth <= 8) {
+ *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify) {
+ // TODO: catch errors an return them instead of using CHECK
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
- def.nPortIndex = kPortIndexOutput;
+ def.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)),
(status_t)OK);
- CHECK_EQ((int)def.eDir, (int)OMX_DirOutput);
+ CHECK_EQ((int)def.eDir,
+ (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput));
switch (def.eDomain) {
case OMX_PortDomainVideo:
{
OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
+ switch ((int)videoDef->eCompressionFormat) {
+ case OMX_VIDEO_CodingUnused:
+ {
+ CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput));
+ notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+
+ notify->setInt32("stride", videoDef->nStride);
+ notify->setInt32("slice-height", videoDef->nSliceHeight);
+ notify->setInt32("color-format", videoDef->eColorFormat);
+
+ if (mNativeWindow == NULL) {
+ DescribeColorFormatParams describeParams;
+ InitOMXParams(&describeParams);
+ describeParams.eColorFormat = videoDef->eColorFormat;
+ describeParams.nFrameWidth = videoDef->nFrameWidth;
+ describeParams.nFrameHeight = videoDef->nFrameHeight;
+ describeParams.nStride = videoDef->nStride;
+ describeParams.nSliceHeight = videoDef->nSliceHeight;
+ describeParams.bUsingNativeBuffers = OMX_FALSE;
+
+ if (describeColorFormat(mOMX, mNode, describeParams)) {
+ notify->setBuffer(
+ "image-data",
+ ABuffer::CreateAsCopy(
+ &describeParams.sMediaImage,
+ sizeof(describeParams.sMediaImage)));
+
+ MediaImage *img = &describeParams.sMediaImage;
+ ALOGV("[%s] MediaImage { F(%zux%zu) @%zu+%zu+%zu @%zu+%zu+%zu @%zu+%zu+%zu }",
+ mComponentName.c_str(), img->mWidth, img->mHeight,
+ img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc,
+ img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc,
+ img->mPlane[2].mOffset, img->mPlane[2].mColInc, img->mPlane[2].mRowInc);
+ }
+ }
- AString mime;
- if (!mIsEncoder) {
- notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
- } else if (GetMimeTypeForVideoCoding(
- videoDef->eCompressionFormat, &mime) != OK) {
- notify->setString("mime", "application/octet-stream");
- } else {
- notify->setString("mime", mime.c_str());
- }
+ if (portIndex != kPortIndexOutput) {
+ // TODO: also get input crop
+ break;
+ }
- notify->setInt32("width", videoDef->nFrameWidth);
- notify->setInt32("height", videoDef->nFrameHeight);
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = portIndex;
+
+ if (mOMX->getConfig(
+ mNode,
+ (portIndex == kPortIndexOutput ?
+ OMX_IndexConfigCommonOutputCrop :
+ OMX_IndexConfigCommonInputCrop),
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
- if (!mIsEncoder) {
- notify->setInt32("stride", videoDef->nStride);
- notify->setInt32("slice-height", videoDef->nSliceHeight);
- notify->setInt32("color-format", videoDef->eColorFormat);
-
- OMX_CONFIG_RECTTYPE rect;
- InitOMXParams(&rect);
- rect.nPortIndex = kPortIndexOutput;
-
- if (mOMX->getConfig(
- mNode, OMX_IndexConfigCommonOutputCrop,
- &rect, sizeof(rect)) != OK) {
- rect.nLeft = 0;
- rect.nTop = 0;
- rect.nWidth = videoDef->nFrameWidth;
- rect.nHeight = videoDef->nFrameHeight;
- }
+ CHECK_GE(rect.nLeft, 0);
+ CHECK_GE(rect.nTop, 0);
+ CHECK_GE(rect.nWidth, 0u);
+ CHECK_GE(rect.nHeight, 0u);
+ CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+ CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
- CHECK_GE(rect.nLeft, 0);
- CHECK_GE(rect.nTop, 0);
- CHECK_GE(rect.nWidth, 0u);
- CHECK_GE(rect.nHeight, 0u);
- CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
- CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
-
- notify->setRect(
- "crop",
- rect.nLeft,
- rect.nTop,
- rect.nLeft + rect.nWidth - 1,
- rect.nTop + rect.nHeight - 1);
-
- if (mNativeWindow != NULL) {
- reply->setRect(
+ notify->setRect(
"crop",
rect.nLeft,
rect.nTop,
- rect.nLeft + rect.nWidth,
- rect.nTop + rect.nHeight);
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ break;
+ }
+
+ case OMX_VIDEO_CodingVP8:
+ case OMX_VIDEO_CodingVP9:
+ {
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
+ InitOMXParams(&vp8type);
+ vp8type.nPortIndex = kPortIndexOutput;
+ status_t err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type,
+ sizeof(vp8type));
+
+ if (err == OK) {
+ AString tsSchema = "none";
+ if (vp8type.eTemporalPattern
+ == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ switch (vp8type.nTemporalLayerCount) {
+ case 1:
+ {
+ tsSchema = "webrtc.vp8.1-layer";
+ break;
+ }
+ case 2:
+ {
+ tsSchema = "webrtc.vp8.2-layer";
+ break;
+ }
+ case 3:
+ {
+ tsSchema = "webrtc.vp8.3-layer";
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ }
+ notify->setString("ts-schema", tsSchema);
+ }
+ // Fall through to set up mime.
+ }
+
+ default:
+ {
+ CHECK(mIsEncoder ^ (portIndex == kPortIndexInput));
+ AString mime;
+ if (GetMimeTypeForVideoCoding(
+ videoDef->eCompressionFormat, &mime) != OK) {
+ notify->setString("mime", "application/octet-stream");
+ } else {
+ notify->setString("mime", mime.c_str());
+ }
+ break;
}
}
+ notify->setInt32("width", videoDef->nFrameWidth);
+ notify->setInt32("height", videoDef->nFrameHeight);
+ ALOGV("[%s] %s format is %s", mComponentName.c_str(),
+ portIndex == kPortIndexInput ? "input" : "output",
+ notify->debugString().c_str());
+
break;
}
@@ -2546,12 +3601,12 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
- switch (audioDef->eEncoding) {
+ switch ((int)audioDef->eEncoding) {
case OMX_AUDIO_CodingPCM:
{
OMX_AUDIO_PARAM_PCMMODETYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioPcm,
@@ -2571,20 +3626,6 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
notify->setInt32("channel-count", params.nChannels);
notify->setInt32("sample-rate", params.nSamplingRate);
- if (mEncoderDelay + mEncoderPadding) {
- size_t frameSize = params.nChannels * sizeof(int16_t);
- if (mSkipCutBuffer != NULL) {
- size_t prevbufsize = mSkipCutBuffer->size();
- if (prevbufsize != 0) {
- ALOGW("Replacing SkipCutBuffer holding %d "
- "bytes",
- prevbufsize);
- }
- }
- mSkipCutBuffer = new SkipCutBuffer(
- mEncoderDelay * frameSize,
- mEncoderPadding * frameSize);
- }
if (mChannelMaskPresent) {
notify->setInt32("channel-mask", mChannelMask);
@@ -2596,7 +3637,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_AACPROFILETYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioAac,
@@ -2613,7 +3654,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_AMRTYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioAmr,
@@ -2639,7 +3680,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_FLACTYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioFlac,
@@ -2652,7 +3693,139 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
break;
}
+ case OMX_AUDIO_CodingMP3:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioMp3,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingVORBIS:
+ {
+ OMX_AUDIO_PARAM_VORBISTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioVorbis,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAndroidAC3:
+ {
+ OMX_AUDIO_PARAM_ANDROID_AC3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ((status_t)OK, mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &params,
+ sizeof(params)));
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAndroidEAC3:
+ {
+ OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ((status_t)OK, mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
+ &params,
+ sizeof(params)));
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAndroidOPUS:
+ {
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ((status_t)OK, mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
+ &params,
+ sizeof(params)));
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingG711:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ((status_t)OK, mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioPcm,
+ &params,
+ sizeof(params)));
+
+ const char *mime = NULL;
+ if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) {
+ mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW;
+ } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) {
+ mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW;
+ } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear
+ mime = MEDIA_MIMETYPE_AUDIO_RAW;
+ }
+ notify->setString("mime", mime);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSamplingRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingGSMFR:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
default:
+ ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding);
TRESPASS();
}
break;
@@ -2662,6 +3835,43 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
TRESPASS();
}
+ return OK;
+}
+
+void ACodec::sendFormatChange(const sp<AMessage> &reply) {
+ sp<AMessage> notify = mBaseOutputFormat->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+
+ CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK);
+
+ AString mime;
+ CHECK(notify->findString("mime", &mime));
+
+ int32_t left, top, right, bottom;
+ if (mime == MEDIA_MIMETYPE_VIDEO_RAW &&
+ mNativeWindow != NULL &&
+ notify->findRect("crop", &left, &top, &right, &bottom)) {
+ // notify renderer of the crop change
+ // NOTE: native window uses extended right-bottom coordinate
+ reply->setRect("crop", left, top, right + 1, bottom + 1);
+ } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW &&
+ (mEncoderDelay || mEncoderPadding)) {
+ int32_t channelCount;
+ CHECK(notify->findInt32("channel-count", &channelCount));
+ size_t frameSize = channelCount * sizeof(int16_t);
+ if (mSkipCutBuffer != NULL) {
+ size_t prevbufsize = mSkipCutBuffer->size();
+ if (prevbufsize != 0) {
+ ALOGW("Replacing SkipCutBuffer holding %d "
+ "bytes",
+ prevbufsize);
+ }
+ }
+ mSkipCutBuffer = new SkipCutBuffer(
+ mEncoderDelay * frameSize,
+ mEncoderPadding * frameSize);
+ }
+
notify->post();
mSentFormat = true;
@@ -2669,9 +3879,19 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", ACodec::kWhatError);
- notify->setInt32("omx-error", error);
+ notify->setInt32("what", CodecBase::kWhatError);
+ ALOGE("signalError(omxError %#x, internalError %d)", error, internalError);
+
+ if (internalError == UNKNOWN_ERROR) { // find better error code
+ const status_t omxStatus = statusFromOMXError(error);
+ if (omxStatus != 0) {
+ internalError = omxStatus;
+ } else {
+ ALOGW("Invalid OMX error %#x", error);
+ }
+ }
notify->setInt32("err", internalError);
+ notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error.
notify->post();
}
@@ -2745,7 +3965,6 @@ status_t ACodec::pushBlankBuffersToNativeWindow() {
// on the screen and then been replaced, so an previous video frames are
// guaranteed NOT to be currently displayed.
for (int i = 0; i < numBufs + 1; i++) {
- int fenceFd = -1;
err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb);
if (err != NO_ERROR) {
ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)",
@@ -2868,7 +4087,8 @@ ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
mCodec(codec) {
}
-ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(OMX_U32 portIndex) {
+ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
+ OMX_U32 /* portIndex */) {
return KEEP_BUFFERS;
}
@@ -2894,6 +4114,7 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
case ACodec::kWhatCreateInputSurface:
case ACodec::kWhatSignalEndOfInputStream:
{
+ // This may result in an app illegal state exception.
ALOGE("Message 0x%x was not handled", msg->what());
mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION);
return true;
@@ -2901,11 +4122,25 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
case ACodec::kWhatOMXDied:
{
+ // This will result in kFlagSawMediaServerDie handling in MediaCodec.
ALOGE("OMX/mediaserver died, signalling error!");
mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
break;
}
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ ALOGI("[%s] forcing the release of codec",
+ mCodec->mComponentName.c_str());
+ status_t err = mCodec->mOMX->freeNode(mCodec->mNode);
+ ALOGE_IF("[%s] failed to release codec instance: err=%d",
+ mCodec->mComponentName.c_str(), err);
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+ break;
+ }
+
default:
return false;
}
@@ -2917,8 +4152,16 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
int32_t type;
CHECK(msg->findInt32("type", &type));
+ // there is a possibility that this is an outstanding message for a
+ // codec that we have already destroyed
+ if (mCodec->mNode == NULL) {
+ ALOGI("ignoring message as already freed component: %s",
+ msg->debugString().c_str());
+ return true;
+ }
+
IOMX::node_id nodeID;
- CHECK(msg->findPointer("node", &nodeID));
+ CHECK(msg->findInt32("node", (int32_t*)&nodeID));
CHECK_EQ(nodeID, mCodec->mNode);
switch (type) {
@@ -2949,7 +4192,7 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
case omx_message::EMPTY_BUFFER_DONE:
{
IOMX::buffer_id bufferID;
- CHECK(msg->findPointer("buffer", &bufferID));
+ CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
return onOMXEmptyBufferDone(bufferID);
}
@@ -2957,27 +4200,21 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
case omx_message::FILL_BUFFER_DONE:
{
IOMX::buffer_id bufferID;
- CHECK(msg->findPointer("buffer", &bufferID));
+ CHECK(msg->findInt32("buffer", (int32_t*)&bufferID));
int32_t rangeOffset, rangeLength, flags;
int64_t timeUs;
- void *platformPrivate;
- void *dataPtr;
CHECK(msg->findInt32("range_offset", &rangeOffset));
CHECK(msg->findInt32("range_length", &rangeLength));
CHECK(msg->findInt32("flags", &flags));
CHECK(msg->findInt64("timestamp", &timeUs));
- CHECK(msg->findPointer("platform_private", &platformPrivate));
- CHECK(msg->findPointer("data_ptr", &dataPtr));
return onOMXFillBufferDone(
bufferID,
(size_t)rangeOffset, (size_t)rangeLength,
(OMX_U32)flags,
- timeUs,
- platformPrivate,
- dataPtr);
+ timeUs);
}
default:
@@ -2997,7 +4234,13 @@ bool ACodec::BaseState::onOMXEvent(
ALOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1);
- mCodec->signalError((OMX_ERRORTYPE)data1);
+ // verify OMX component sends back an error we expect.
+ OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1;
+ if (!isOMXError(omxError)) {
+ ALOGW("Invalid OMX error %#x", omxError);
+ omxError = OMX_ErrorUndefined;
+ }
+ mCodec->signalError(omxError);
return true;
}
@@ -3012,23 +4255,12 @@ bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT);
info->mStatus = BufferInfo::OWNED_BY_US;
- const sp<AMessage> &bufferMeta = info->mData->meta();
- void *mediaBuffer;
- if (bufferMeta->findPointer("mediaBuffer", &mediaBuffer)
- && mediaBuffer != NULL) {
- // We're in "store-metadata-in-buffers" mode, the underlying
- // OMX component had access to data that's implicitly refcounted
- // by this "mediaBuffer" object. Now that the OMX component has
- // told us that it's done with the input buffer, we can decrement
- // the mediaBuffer's reference count.
-
- ALOGV("releasing mbuf %p", mediaBuffer);
-
- ((MediaBuffer *)mediaBuffer)->release();
- mediaBuffer = NULL;
-
- bufferMeta->setPointer("mediaBuffer", NULL);
- }
+ // We're in "store-metadata-in-buffers" mode, the underlying
+ // OMX component had access to data that's implicitly refcounted
+ // by this "MediaBuffer" object. Now that the OMX component has
+ // told us that it's done with the input buffer, we can decrement
+ // the mediaBuffer's reference count.
+ info->mData->setMediaBufferBase(NULL);
PortMode mode = getPortMode(kPortIndexInput);
@@ -3059,14 +4291,14 @@ void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFillThisBuffer);
- notify->setPointer("buffer-id", info->mBufferID);
+ notify->setInt32("what", CodecBase::kWhatFillThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
info->mData->meta()->clear();
notify->setBuffer("buffer", info->mData);
sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id());
- reply->setPointer("buffer-id", info->mBufferID);
+ reply->setInt32("buffer-id", info->mBufferID);
notify->setMessage("reply", reply);
@@ -3077,8 +4309,7 @@ void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {
void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
IOMX::buffer_id bufferID;
- CHECK(msg->findPointer("buffer-id", &bufferID));
-
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
sp<ABuffer> buffer;
int32_t err = OK;
bool eos = false;
@@ -3182,8 +4413,7 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
(outputMode == FREE_BUFFERS ? "FREE" :
outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));
if (outputMode == RESUBMIT_BUFFERS) {
- CHECK_EQ(mCodec->submitOutputMetaDataBuffer(),
- (status_t)OK);
+ mCodec->submitOutputMetaDataBuffer();
}
}
@@ -3235,11 +4465,11 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {
mCodec->mInputEOSResult = err;
}
break;
-
- default:
- CHECK_EQ((int)mode, (int)FREE_BUFFERS);
- break;
}
+
+ default:
+ CHECK_EQ((int)mode, (int)FREE_BUFFERS);
+ break;
}
}
@@ -3276,10 +4506,8 @@ bool ACodec::BaseState::onOMXFillBufferDone(
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
- int64_t timeUs,
- void *platformPrivate,
- void *dataPtr) {
- ALOGV("[%s] onOMXFillBufferDone %p time %lld us, flags = 0x%08lx",
+ int64_t timeUs) {
+ ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",
mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
ssize_t index;
@@ -3315,8 +4543,9 @@ bool ACodec::BaseState::onOMXFillBufferDone(
case RESUBMIT_BUFFERS:
{
- if (rangeLength == 0 && !(flags & OMX_BUFFERFLAG_EOS)) {
- ALOGV("[%s] calling fillBuffer %p",
+ if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS)
+ || mCodec->mPortEOS[kPortIndexOutput])) {
+ ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
CHECK_EQ(mCodec->mOMX->fillBuffer(
@@ -3330,7 +4559,7 @@ bool ACodec::BaseState::onOMXFillBufferDone(
sp<AMessage> reply =
new AMessage(kWhatOutputBufferDrained, mCodec->id());
- if (!mCodec->mSentFormat) {
+ if (!mCodec->mSentFormat && rangeLength > 0) {
mCodec->sendFormatChange(reply);
}
@@ -3357,12 +4586,12 @@ bool ACodec::BaseState::onOMXFillBufferDone(
info->mData->meta()->setInt64("timeUs", timeUs);
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatDrainThisBuffer);
- notify->setPointer("buffer-id", info->mBufferID);
+ notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
notify->setBuffer("buffer", info->mData);
notify->setInt32("flags", flags);
- reply->setPointer("buffer-id", info->mBufferID);
+ reply->setInt32("buffer-id", info->mBufferID);
notify->setMessage("reply", reply);
@@ -3374,7 +4603,7 @@ bool ACodec::BaseState::onOMXFillBufferDone(
ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str());
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatEOS);
+ notify->setInt32("what", CodecBase::kWhatEOS);
notify->setInt32("err", mCodec->mInputEOSResult);
notify->post();
@@ -3398,8 +4627,7 @@ bool ACodec::BaseState::onOMXFillBufferDone(
void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
IOMX::buffer_id bufferID;
- CHECK(msg->findPointer("buffer-id", &bufferID));
-
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
ssize_t index;
BufferInfo *info =
mCodec->findBufferByID(kPortIndexOutput, bufferID, &index);
@@ -3416,18 +4644,42 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
if (mCodec->mNativeWindow != NULL
&& msg->findInt32("render", &render) && render != 0
&& info->mData != NULL && info->mData->size() != 0) {
+ ATRACE_NAME("render");
// The client wants this buffer to be rendered.
+ int64_t timestampNs = 0;
+ if (!msg->findInt64("timestampNs", &timestampNs)) {
+ // TODO: it seems like we should use the timestamp
+ // in the (media)buffer as it potentially came from
+ // an input surface, but we did not propagate it prior to
+ // API 20. Perhaps check for target SDK version.
+#if 0
+ if (info->mData->meta()->findInt64("timeUs", &timestampNs)) {
+ ALOGV("using buffer PTS of %" PRId64, timestampNs);
+ timestampNs *= 1000;
+ }
+#endif
+ }
+
status_t err;
+ err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
+ if (err != OK) {
+ ALOGW("failed to set buffer timestamp: %d", err);
+ }
+
if ((err = mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(),
info->mGraphicBuffer.get(), -1)) == OK) {
info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
} else {
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
info->mStatus = BufferInfo::OWNED_BY_US;
}
} else {
+ if (mCodec->mNativeWindow != NULL &&
+ (info->mData == NULL || info->mData->size() != 0)) {
+ ATRACE_NAME("frame-drop");
+ }
info->mStatus = BufferInfo::OWNED_BY_US;
}
@@ -3458,7 +4710,7 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
}
if (info != NULL) {
- ALOGV("[%s] calling fillBuffer %p",
+ ALOGV("[%s] calling fillBuffer %u",
mCodec->mComponentName.c_str(), info->mBufferID);
CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID),
@@ -3491,7 +4743,7 @@ void ACodec::UninitializedState::stateEntered() {
ALOGV("Now uninitialized");
if (mDeathNotifier != NULL) {
- mCodec->mOMX->asBinder()->unlinkToDeath(mDeathNotifier);
+ IInterface::asBinder(mCodec->mOMX)->unlinkToDeath(mDeathNotifier);
mDeathNotifier.clear();
}
@@ -3528,10 +4780,11 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
int32_t keepComponentAllocated;
CHECK(msg->findInt32(
"keepComponentAllocated", &keepComponentAllocated));
- CHECK(!keepComponentAllocated);
+ ALOGW_IF(keepComponentAllocated,
+ "cannot keep component allocated on shutdown in Uninitialized state");
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
notify->post();
handled = true;
@@ -3541,13 +4794,20 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
case ACodec::kWhatFlush:
{
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
notify->post();
handled = true;
break;
}
+ case ACodec::kWhatReleaseCodecInstance:
+ {
+ // nothing to do, as we have already signaled shutdown
+ handled = true;
+ break;
+ }
+
default:
return BaseState::onMessageReceived(msg);
}
@@ -3576,7 +4836,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id());
mDeathNotifier = new DeathNotifier(notify);
- if (omx->asBinder()->linkToDeath(mDeathNotifier) != OK) {
+ if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) {
// This was a local binder, if it dies so do we, we won't care
// about any notifications in the afterlife.
mDeathNotifier.clear();
@@ -3588,6 +4848,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
AString componentName;
uint32_t quirks = 0;
+ int32_t encoder = false;
if (msg->findString("componentName", &componentName)) {
ssize_t index = matchingCodecs.add();
OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index);
@@ -3600,7 +4861,6 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
} else {
CHECK(msg->findString("mime", &mime));
- int32_t encoder;
if (!msg->findInt32("encoder", &encoder)) {
encoder = false;
}
@@ -3621,7 +4881,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
componentName = matchingCodecs.itemAt(matchIndex).mName.string();
quirks = matchingCodecs.itemAt(matchIndex).mQuirks;
- pid_t tid = androidGetTid();
+ pid_t tid = gettid();
int prevPriority = androidGetThreadPriority(tid);
androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND);
status_t err = omx->allocateNode(componentName.c_str(), observer, &node);
@@ -3629,6 +4889,8 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
if (err == OK) {
break;
+ } else {
+ ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());
}
node = NULL;
@@ -3636,10 +4898,10 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
if (node == NULL) {
if (!mime.empty()) {
- ALOGE("Unable to instantiate a decoder for type '%s'.",
- mime.c_str());
+ ALOGE("Unable to instantiate a %scoder for type '%s'.",
+ encoder ? "en" : "de", mime.c_str());
} else {
- ALOGE("Unable to instantiate decoder '%s'.", componentName.c_str());
+ ALOGE("Unable to instantiate codec '%s'.", componentName.c_str());
}
mCodec->signalError(OMX_ErrorComponentNotFound);
@@ -3654,6 +4916,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
if (componentName.endsWith(".secure")) {
mCodec->mFlags |= kFlagIsSecure;
+ mCodec->mFlags |= kFlagIsGrallocUsageProtected;
mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
}
@@ -3663,7 +4926,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
{
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatComponentAllocated);
+ notify->setInt32("what", CodecBase::kWhatComponentAllocated);
notify->setString("componentName", mCodec->mComponentName.c_str());
notify->post();
}
@@ -3690,6 +4953,9 @@ void ACodec::LoadedState::stateEntered() {
mCodec->mDequeueCounter = 0;
mCodec->mMetaDataBuffersToSubmit = 0;
mCodec->mRepeatFrameDelayUs = -1ll;
+ mCodec->mInputFormat.clear();
+ mCodec->mOutputFormat.clear();
+ mCodec->mBaseOutputFormat.clear();
if (mCodec->mShutdownInProgress) {
bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
@@ -3699,6 +4965,9 @@ void ACodec::LoadedState::stateEntered() {
onShutdown(keepComponentAllocated);
}
+ mCodec->mExplicitShutdown = false;
+
+ mCodec->processDeferredMessages();
}
void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
@@ -3708,9 +4977,12 @@ void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
mCodec->changeState(mCodec->mUninitializedState);
}
- sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatShutdownCompleted);
- notify->post();
+ if (mCodec->mExplicitShutdown) {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+ mCodec->mExplicitShutdown = false;
+ }
}
bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
@@ -3744,6 +5016,7 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->findInt32(
"keepComponentAllocated", &keepComponentAllocated));
+ mCodec->mExplicitShutdown = true;
onShutdown(keepComponentAllocated);
handled = true;
@@ -3753,7 +5026,7 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
case ACodec::kWhatFlush:
{
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
notify->post();
handled = true;
@@ -3782,27 +5055,15 @@ bool ACodec::LoadedState::onConfigureComponent(
ALOGE("[%s] configureCodec returning error %d",
mCodec->mComponentName.c_str(), err);
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
return false;
}
- sp<RefBase> obj;
- if (msg->findObject("native-window", &obj)
- && strncmp("OMX.google.", mCodec->mComponentName.c_str(), 11)) {
- sp<NativeWindowWrapper> nativeWindow(
- static_cast<NativeWindowWrapper *>(obj.get()));
- CHECK(nativeWindow != NULL);
- mCodec->mNativeWindow = nativeWindow->getNativeWindow();
-
- native_window_set_scaling_mode(
- mCodec->mNativeWindow.get(),
- NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- }
- CHECK_EQ((status_t)OK, mCodec->initNativeWindow());
-
{
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatComponentConfigured);
+ notify->setInt32("what", CodecBase::kWhatComponentConfigured);
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
notify->post();
}
@@ -3810,11 +5071,11 @@ bool ACodec::LoadedState::onConfigureComponent(
}
void ACodec::LoadedState::onCreateInputSurface(
- const sp<AMessage> &msg) {
+ const sp<AMessage> & /* msg */) {
ALOGV("onCreateInputSurface");
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatInputSurfaceCreated);
+ notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
sp<IGraphicBufferProducer> bufferProducer;
status_t err;
@@ -3838,6 +5099,56 @@ void ACodec::LoadedState::onCreateInputSurface(
}
}
+ if (err == OK && mCodec->mMaxPtsGapUs > 0ll) {
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP,
+ &mCodec->mMaxPtsGapUs,
+ sizeof(mCodec->mMaxPtsGapUs));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure max timestamp gap (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ }
+ }
+
+ if (err == OK && mCodec->mTimePerCaptureUs > 0ll
+ && mCodec->mTimePerFrameUs > 0ll) {
+ int64_t timeLapse[2];
+ timeLapse[0] = mCodec->mTimePerFrameUs;
+ timeLapse[1] = mCodec->mTimePerCaptureUs;
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_TIME_LAPSE,
+ &timeLapse[0],
+ sizeof(timeLapse));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure time lapse (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ }
+ }
+
+ if (err == OK && mCodec->mCreateInputBuffersSuspended) {
+ bool suspend = true;
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_SUSPEND,
+ &suspend,
+ sizeof(suspend));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure option to suspend (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ }
+ }
+
if (err == OK) {
notify->setObject("input-surface",
new BufferProducerWrapper(bufferProducer));
@@ -3877,7 +5188,7 @@ void ACodec::LoadedToIdleState::stateEntered() {
"(error 0x%08x)",
err);
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
mCodec->changeState(mCodec->mLoadedState);
}
@@ -3895,6 +5206,7 @@ status_t ACodec::LoadedToIdleState::allocateBuffers() {
bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatSetParameters:
case kWhatShutdown:
{
mCodec->deferMessage(msg);
@@ -3917,7 +5229,7 @@ bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
{
// We haven't even started yet, so we're flushed alright...
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
notify->post();
return true;
}
@@ -3961,6 +5273,7 @@ void ACodec::IdleToExecutingState::stateEntered() {
bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatSetParameters:
case kWhatShutdown:
{
mCodec->deferMessage(msg);
@@ -3977,7 +5290,7 @@ bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
{
// We haven't even started yet, so we're flushed alright...
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
notify->post();
return true;
@@ -4021,7 +5334,7 @@ ACodec::ExecutingState::ExecutingState(ACodec *codec)
}
ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode(
- OMX_U32 portIndex) {
+ OMX_U32 /* portIndex */) {
return RESUBMIT_BUFFERS;
}
@@ -4036,6 +5349,9 @@ void ACodec::ExecutingState::submitOutputMetaBuffers() {
break;
}
}
+
+ // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+ mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround();
}
void ACodec::ExecutingState::submitRegularOutputBuffers() {
@@ -4080,11 +5396,14 @@ void ACodec::ExecutingState::resume() {
submitOutputBuffers();
- // Post the first input buffer.
+ // Post all available input buffers
CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u);
- BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(0);
-
- postFillThisBuffer(info);
+ for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {
+ BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ postFillThisBuffer(info);
+ }
+ }
mActive = true;
}
@@ -4106,6 +5425,7 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
"keepComponentAllocated", &keepComponentAllocated));
mCodec->mShutdownInProgress = true;
+ mCodec->mExplicitShutdown = true;
mCodec->mKeepComponentAllocated = keepComponentAllocated;
mActive = false;
@@ -4184,6 +5504,19 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
+ case kWhatSubmitOutputMetaDataBufferIfEOS:
+ {
+ if (mCodec->mPortEOS[kPortIndexInput] &&
+ !mCodec->mPortEOS[kPortIndexOutput]) {
+ status_t err = mCodec->submitOutputMetaDataBuffer();
+ if (err == OK) {
+ mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround();
+ }
+ }
+ return true;
+ }
+
default:
handled = BaseState::onMessageReceived(msg);
break;
@@ -4214,6 +5547,22 @@ status_t ACodec::setParameters(const sp<AMessage> &params) {
}
}
+ int64_t skipFramesBeforeUs;
+ if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
+ status_t err =
+ mOMX->setInternalOption(
+ mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_START_TIME,
+ &skipFramesBeforeUs,
+ sizeof(skipFramesBeforeUs));
+
+ if (err != OK) {
+ ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err);
+ return err;
+ }
+ }
+
int32_t dropInputFrames;
if (params->findInt32("drop-input-frames", &dropInputFrames)) {
bool suspend = dropInputFrames != 0;
@@ -4247,7 +5596,7 @@ status_t ACodec::setParameters(const sp<AMessage> &params) {
void ACodec::onSignalEndOfInputStream() {
sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", ACodec::kWhatSignaledInputEOS);
+ notify->setInt32("what", CodecBase::kWhatSignaledInputEOS);
status_t err = mOMX->signalEndOfInputStream(mNode);
if (err != OK) {
@@ -4319,6 +5668,7 @@ bool ACodec::OutputPortSettingsChangedState::onMessageReceived(
case kWhatFlush:
case kWhatShutdown:
case kWhatResume:
+ case kWhatSetParameters:
{
if (msg->what() == kWhatResume) {
ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str());
@@ -4367,7 +5717,7 @@ bool ACodec::OutputPortSettingsChangedState::onOMXEvent(
"port reconfiguration (error 0x%08x)",
err);
- mCodec->signalError(OMX_ErrorUndefined, err);
+ mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));
// This is technically not correct, but appears to be
// the only way to free the component instance.
@@ -4636,7 +5986,7 @@ bool ACodec::FlushingState::onOMXEvent(
{
sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id());
msg->setInt32("type", omx_message::EVENT);
- msg->setPointer("node", mCodec->mNode);
+ msg->setInt32("node", mCodec->mNode);
msg->setInt32("event", event);
msg->setInt32("data1", data1);
msg->setInt32("data2", data2);
@@ -4677,7 +6027,7 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
notify->post();
mCodec->mPortEOS[kPortIndexInput] =
diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp
index 03dcbf9..a6fb3d8 100644
--- a/media/libstagefright/AMRExtractor.cpp
+++ b/media/libstagefright/AMRExtractor.cpp
@@ -189,7 +189,7 @@ sp<MediaSource> AMRExtractor::getTrack(size_t index) {
mOffsetTable, mOffsetTableLength);
}
-sp<MetaData> AMRExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+sp<MetaData> AMRExtractor::getTrackMetaData(size_t index, uint32_t /* flags */) {
if (mInitCheck != OK || index != 0) {
return NULL;
}
@@ -221,7 +221,7 @@ AMRSource::~AMRSource() {
}
}
-status_t AMRSource::start(MetaData *params) {
+status_t AMRSource::start(MetaData * /* params */) {
CHECK(!mStarted);
mOffset = mIsWide ? 9 : 6;
@@ -258,14 +258,14 @@ status_t AMRSource::read(
int64_t seekFrame = seekTimeUs / 20000ll; // 20ms per frame.
mCurrentTimeUs = seekFrame * 20000ll;
- int index = seekFrame / 50;
+ size_t index = seekFrame < 0 ? 0 : seekFrame / 50;
if (index >= mOffsetTableLength) {
index = mOffsetTableLength - 1;
}
mOffset = mOffsetTable[index] + (mIsWide ? 9 : 6);
- for (int i = 0; i< seekFrame - index * 50; i++) {
+ for (size_t i = 0; i< seekFrame - index * 50; i++) {
status_t err;
if ((err = getFrameSizeByOffset(mDataSource, mOffset,
mIsWide, &size)) != OK) {
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index 3fe247a..9aa7d95 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -14,6 +14,12 @@
* limitations under the License.
*/
+#include <fcntl.h>
+#include <inttypes.h>
+#include <sys/prctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/MediaBuffer.h>
@@ -22,10 +28,6 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/mediarecorder.h>
-#include <sys/prctl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
namespace android {
@@ -105,7 +107,7 @@ status_t AMRWriter::addSource(const sp<MediaSource> &source) {
return OK;
}
-status_t AMRWriter::start(MetaData *params) {
+status_t AMRWriter::start(MetaData * /* params */) {
if (mInitCheck != OK) {
return mInitCheck;
}
@@ -235,7 +237,7 @@ status_t AMRWriter::threadFunc() {
mResumed = false;
}
timestampUs -= previousPausedDurationUs;
- ALOGV("time stamp: %lld, previous paused duration: %lld",
+ ALOGV("time stamp: %" PRId64 ", previous paused duration: %" PRId64,
timestampUs, previousPausedDurationUs);
if (timestampUs > maxTimestampUs) {
maxTimestampUs = timestampUs;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 6a2a696..2629afc 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -14,7 +14,10 @@ LOCAL_SRC_FILES:= \
AwesomePlayer.cpp \
CameraSource.cpp \
CameraSourceTimeLapse.cpp \
+ ClockEstimator.cpp \
+ CodecBase.cpp \
DataSource.cpp \
+ DataURISource.cpp \
DRMExtractor.cpp \
ESDS.cpp \
FileSource.cpp \
@@ -30,8 +33,11 @@ LOCAL_SRC_FILES:= \
MediaBufferGroup.cpp \
MediaCodec.cpp \
MediaCodecList.cpp \
+ MediaCodecSource.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
+ MidiExtractor.cpp \
+ http/MediaHTTP.cpp \
MediaMuxer.cpp \
MediaSource.cpp \
MetaData.cpp \
@@ -55,22 +61,20 @@ LOCAL_SRC_FILES:= \
WVMExtractor.cpp \
XINGSeeker.cpp \
avc_utils.cpp \
- mp4/FragmentedMP4Parser.cpp \
- mp4/TrackFragment.cpp \
LOCAL_C_INCLUDES:= \
+ $(TOP)/frameworks/av/include/media/ \
$(TOP)/frameworks/av/include/media/stagefright/timedtext \
$(TOP)/frameworks/native/include/media/hardware \
$(TOP)/frameworks/native/include/media/openmax \
- $(TOP)/frameworks/native/services/connectivitymanager \
$(TOP)/external/flac/include \
$(TOP)/external/tremolo \
- $(TOP)/external/openssl/include \
+ $(TOP)/external/libvpx/libwebm \
+ $(TOP)/system/netd/include \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libcamera_client \
- libconnectivitymanager \
libcutils \
libdl \
libdrmframework \
@@ -80,6 +84,8 @@ LOCAL_SHARED_LIBRARIES := \
libicuuc \
liblog \
libmedia \
+ libnetd_client \
+ libopus \
libsonivox \
libssl \
libstagefright_omx \
@@ -95,6 +101,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_color_conversion \
libstagefright_aacenc \
libstagefright_matroska \
+ libstagefright_webm \
libstagefright_timedtext \
libvpx \
libwebm \
@@ -103,13 +110,6 @@ LOCAL_STATIC_LIBRARIES := \
libFLAC \
libmedia_helper
-LOCAL_SRC_FILES += \
- chromium_http_stub.cpp
-LOCAL_CPPFLAGS += -DCHROMIUM_AVAILABLE=1
-
-LOCAL_SHARED_LIBRARIES += libstlport
-include external/stlport/libstlport.mk
-
LOCAL_SHARED_LIBRARIES += \
libstagefright_enc_common \
libstagefright_avc_common \
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index 05ee34e..e24824b 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioPlayer"
#include <utils/Log.h>
@@ -21,6 +23,7 @@
#include <binder/IPCThreadState.h>
#include <media/AudioTrack.h>
+#include <media/openmax/OMX_Audio.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/AudioPlayer.h>
@@ -139,6 +142,12 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) {
} else {
ALOGV("Mime type \"%s\" mapped to audio_format 0x%x", mime, audioFormat);
}
+
+ int32_t aacaot = -1;
+ if ((audioFormat == AUDIO_FORMAT_AAC) && format->findInt32(kKeyAACAOT, &aacaot)) {
+ // Redefine AAC format corrosponding to aac profile
+ mapAACProfileToAudioFormat(audioFormat,(OMX_AUDIO_AACPROFILETYPE) aacaot);
+ }
}
int avgBitRate = -1;
@@ -221,7 +230,8 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) {
mAudioTrack = new AudioTrack(
AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask,
- 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0);
+ 0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this,
+ 0 /*notificationFrames*/);
if ((err = mAudioTrack->initCheck()) != OK) {
mAudioTrack.clear();
@@ -410,7 +420,7 @@ status_t AudioPlayer::setPlaybackRatePermille(int32_t ratePermille) {
// static
size_t AudioPlayer::AudioSinkCallback(
- MediaPlayerBase::AudioSink *audioSink,
+ MediaPlayerBase::AudioSink * /* audioSink */,
void *buffer, size_t size, void *cookie,
MediaPlayerBase::AudioSink::cb_event_t event) {
AudioPlayer *me = (AudioPlayer *)cookie;
@@ -565,12 +575,12 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) {
int64_t timeToCompletionUs =
(1000000ll * numFramesPendingPlayout) / mSampleRate;
- ALOGV("total number of frames played: %lld (%lld us)",
+ ALOGV("total number of frames played: %" PRId64 " (%lld us)",
(mNumFramesPlayed + numAdditionalFrames),
1000000ll * (mNumFramesPlayed + numAdditionalFrames)
/ mSampleRate);
- ALOGV("%d frames left to play, %lld us (%.2f secs)",
+ ALOGV("%d frames left to play, %" PRId64 " us (%.2f secs)",
numFramesPendingPlayout,
timeToCompletionUs, timeToCompletionUs / 1E6);
@@ -627,7 +637,7 @@ size_t AudioPlayer::fillBuffer(void *data, size_t size) {
mPositionTimeRealUs =
((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
/ mSampleRate;
- ALOGV("buffer->size() = %d, "
+ ALOGV("buffer->size() = %zu, "
"mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
mInputBuffer->range_length(),
mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
@@ -745,7 +755,7 @@ int64_t AudioPlayer::getOutputPlayPositionUs_l()
// HAL position is relative to the first buffer we sent at mStartPosUs
const int64_t renderedDuration = mStartPosUs + playedUs;
- ALOGV("getOutputPlayPositionUs_l %lld", renderedDuration);
+ ALOGV("getOutputPlayPositionUs_l %" PRId64, renderedDuration);
return renderedDuration;
}
@@ -756,8 +766,13 @@ int64_t AudioPlayer::getMediaTimeUs() {
if (mSeeking) {
return mSeekTimeUs;
}
+ if (mReachedEOS) {
+ int64_t durationUs;
+ mSource->getFormat()->findInt64(kKeyDuration, &durationUs);
+ return durationUs;
+ }
mPositionTimeRealUs = getOutputPlayPositionUs_l();
- ALOGV("getMediaTimeUs getOutputPlayPositionUs_l() mPositionTimeRealUs %lld",
+ ALOGV("getMediaTimeUs getOutputPlayPositionUs_l() mPositionTimeRealUs %" PRId64,
mPositionTimeRealUs);
return mPositionTimeRealUs;
}
@@ -795,7 +810,7 @@ bool AudioPlayer::getMediaTimeMapping(
status_t AudioPlayer::seekTo(int64_t time_us) {
Mutex::Autolock autoLock(mLock);
- ALOGV("seekTo( %lld )", time_us);
+ ALOGV("seekTo( %" PRId64 " )", time_us);
mSeeking = true;
mPositionTimeRealUs = mPositionTimeMediaUs = -1;
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index f0d1a14..804f131 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -14,6 +14,9 @@
* limitations under the License.
*/
+#include <inttypes.h>
+#include <stdlib.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioSource"
#include <utils/Log.h>
@@ -26,7 +29,6 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <cutils/properties.h>
-#include <stdlib.h>
namespace android {
@@ -65,10 +67,10 @@ AudioSource::AudioSource(
if (status == OK) {
// make sure that the AudioRecord callback never returns more than the maximum
// buffer size
- int frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
+ uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
// make sure that the AudioRecord total buffer size is large enough
- int bufCount = 2;
+ size_t bufCount = 2;
while ((bufCount * frameCount) < minFrameCount) {
bufCount++;
}
@@ -76,10 +78,10 @@ AudioSource::AudioSource(
mRecord = new AudioRecord(
inputSource, sampleRate, AUDIO_FORMAT_PCM_16_BIT,
audio_channel_in_mask_from_count(channelCount),
- bufCount * frameCount,
+ (size_t) (bufCount * frameCount),
AudioRecordCallbackFunction,
this,
- frameCount);
+ frameCount /*notificationFrames*/);
mInitCheck = mRecord->initCheck();
} else {
mInitCheck = status;
@@ -136,7 +138,7 @@ void AudioSource::releaseQueuedFrames_l() {
}
void AudioSource::waitOutstandingEncodingFrames_l() {
- ALOGV("waitOutstandingEncodingFrames_l: %lld", mNumClientOwnedBuffers);
+ ALOGV("waitOutstandingEncodingFrames_l: %" PRId64, mNumClientOwnedBuffers);
while (mNumClientOwnedBuffers > 0) {
mFrameEncodingCompletionCondition.wait(mLock);
}
@@ -153,6 +155,8 @@ status_t AudioSource::reset() {
}
mStarted = false;
+ mFrameAvailableCondition.signal();
+
mRecord->stop();
waitOutstandingEncodingFrames_l();
releaseQueuedFrames_l();
@@ -208,7 +212,7 @@ void AudioSource::rampVolume(
}
status_t AudioSource::read(
- MediaBuffer **out, const ReadOptions *options) {
+ MediaBuffer **out, const ReadOptions * /* options */) {
Mutex::Autolock autoLock(mLock);
*out = NULL;
@@ -269,7 +273,7 @@ void AudioSource::signalBufferReturned(MediaBuffer *buffer) {
status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
int64_t timeUs = systemTime() / 1000ll;
- ALOGV("dataCallbackTimestamp: %lld us", timeUs);
+ ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
Mutex::Autolock autoLock(mLock);
if (!mStarted) {
ALOGW("Spurious callback from AudioRecord. Drop the audio data.");
@@ -278,8 +282,8 @@ status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
// Drop retrieved and previously lost audio data.
if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) {
- mRecord->getInputFramesLost();
- ALOGV("Drop audio data at %lld/%lld us", timeUs, mStartTimeUs);
+ (void) mRecord->getInputFramesLost();
+ ALOGV("Drop audio data at %" PRId64 "/%" PRId64 " us", timeUs, mStartTimeUs);
return OK;
}
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 29c007a..87eef1e 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -19,7 +19,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AwesomePlayer"
#define ATRACE_TAG ATRACE_TAG_VIDEO
+
#include <inttypes.h>
+
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -35,16 +37,20 @@
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/timedtext/TimedTextDriver.h>
#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/ClockEstimator.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaHTTP.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
@@ -69,7 +75,7 @@ static const size_t kHighWaterMarkBytes = 200000;
// maximum time in paused state when offloading audio decompression. When elapsed, the AudioPlayer
// is destroyed to allow the audio DSP to power down.
-static int64_t kOffloadPauseMaxUs = 60000000ll;
+static int64_t kOffloadPauseMaxUs = 10000000ll;
struct AwesomeEvent : public TimedEventQueue::Event {
@@ -83,7 +89,7 @@ struct AwesomeEvent : public TimedEventQueue::Event {
protected:
virtual ~AwesomeEvent() {}
- virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
+ virtual void fire(TimedEventQueue * /* queue */, int64_t /* now_us */) {
(mPlayer->*mMethod)();
}
@@ -97,17 +103,21 @@ private:
struct AwesomeLocalRenderer : public AwesomeRenderer {
AwesomeLocalRenderer(
- const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
- : mTarget(new SoftwareRenderer(nativeWindow, meta)) {
+ const sp<ANativeWindow> &nativeWindow, const sp<AMessage> &format)
+ : mFormat(format),
+ mTarget(new SoftwareRenderer(nativeWindow)) {
}
virtual void render(MediaBuffer *buffer) {
+ int64_t timeUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
render((const uint8_t *)buffer->data() + buffer->range_offset(),
- buffer->range_length());
+ buffer->range_length(), timeUs * 1000);
}
- void render(const void *data, size_t size) {
- mTarget->render(data, size, NULL);
+ void render(const void *data, size_t size, int64_t timestampNs) {
+ mTarget->render(data, size, timestampNs, NULL, mFormat);
}
protected:
@@ -117,6 +127,7 @@ protected:
}
private:
+ sp<AMessage> mFormat;
SoftwareRenderer *mTarget;
AwesomeLocalRenderer(const AwesomeLocalRenderer &);
@@ -228,6 +239,8 @@ AwesomePlayer::AwesomePlayer()
&AwesomePlayer::onAudioTearDownEvent);
mAudioTearDownEventPending = false;
+ mClockEstimator = new WindowedLinearFitEstimator();
+
reset();
}
@@ -277,15 +290,20 @@ void AwesomePlayer::setUID(uid_t uid) {
}
status_t AwesomePlayer::setDataSource(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
Mutex::Autolock autoLock(mLock);
- return setDataSource_l(uri, headers);
+ return setDataSource_l(httpService, uri, headers);
}
status_t AwesomePlayer::setDataSource_l(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
reset_l();
+ mHTTPService = httpService;
mUri = uri;
if (headers) {
@@ -302,7 +320,7 @@ status_t AwesomePlayer::setDataSource_l(
}
}
- ALOGI("setDataSource_l(URL suppressed)");
+ ALOGI("setDataSource_l(%s)", uriDebugString(mUri, mFlags & INCOGNITO).c_str());
// The actual work will be done during preparation in the call to
// ::finishSetDataSource_l to avoid blocking the calling thread in
@@ -394,6 +412,13 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
totalBitRate += bitrate;
}
+ sp<MetaData> fileMeta = mExtractor->getMetaData();
+ if (fileMeta != NULL) {
+ int64_t duration;
+ if (fileMeta->findInt64(kKeyDuration, &duration)) {
+ mDurationUs = duration;
+ }
+ }
mBitrate = totalBitRate;
@@ -582,6 +607,7 @@ void AwesomePlayer::reset_l() {
mSeekNotificationSent = true;
mSeekTimeUs = 0;
+ mHTTPService.clear();
mUri.setTo("");
mUriHeaders.clear();
@@ -709,11 +735,9 @@ void AwesomePlayer::onBufferingUpdate() {
finishAsyncPrepare_l();
}
} else {
- int64_t bitrate;
- if (getBitrate(&bitrate)) {
- size_t cachedSize = mCachedSource->cachedSize();
- int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate;
-
+ bool eos2;
+ int64_t cachedDurationUs;
+ if (getCachedDuration_l(&cachedDurationUs, &eos2) && mDurationUs > 0) {
int percentage = 100.0 * (double)cachedDurationUs / mDurationUs;
if (percentage > 100) {
percentage = 100;
@@ -721,7 +745,7 @@ void AwesomePlayer::onBufferingUpdate() {
notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage);
} else {
- // We don't know the bitrate of the stream, use absolute size
+ // We don't know the bitrate/duration of the stream, use absolute size
// limits to maintain the cache.
if ((mFlags & PLAYING) && !eos
@@ -854,11 +878,18 @@ void AwesomePlayer::onStreamDone() {
return;
}
+ if (mFlags & AUTO_LOOPING) {
+ audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
+ if (mAudioSink != NULL) {
+ streamType = mAudioSink->getAudioStreamType();
+ }
+ if (streamType == AUDIO_STREAM_NOTIFICATION) {
+ ALOGW("disabling auto-loop for notification");
+ modifyFlags(AUTO_LOOPING, CLEAR);
+ }
+ }
if ((mFlags & LOOPING)
- || ((mFlags & AUTO_LOOPING)
- && (mAudioSink == NULL || mAudioSink->realtime()))) {
- // Don't AUTO_LOOP if we're being recorded, since that cannot be
- // turned off and recording would go on indefinitely.
+ || (mFlags & AUTO_LOOPING)) {
seekTo_l(0);
@@ -1214,7 +1245,9 @@ void AwesomePlayer::initRenderer_l() {
// allocate their buffers in local address space. This renderer
// then performs a color conversion and copy to get the data
// into the ANativeBuffer.
- mVideoRenderer = new AwesomeLocalRenderer(mNativeWindow, meta);
+ sp<AMessage> format;
+ convertMetaDataToMessage(meta, &format);
+ mVideoRenderer = new AwesomeLocalRenderer(mNativeWindow, format);
}
}
@@ -1483,7 +1516,7 @@ void AwesomePlayer::addTextSource_l(size_t trackIndex, const sp<MediaSource>& so
CHECK(source != NULL);
if (mTextDriver == NULL) {
- mTextDriver = new TimedTextDriver(mListener);
+ mTextDriver = new TimedTextDriver(mListener, mHTTPService);
}
mTextDriver->addInBandTextSource(trackIndex, source);
@@ -1695,7 +1728,7 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
}
if (mAudioPlayer != NULL) {
- ALOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
+ ALOGV("seeking audio to %" PRId64 " us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
// If we don't have a video time, seek audio to the originally
// requested seek time instead.
@@ -1759,7 +1792,7 @@ void AwesomePlayer::onVideoEvent() {
if (!mVideoBuffer) {
MediaSource::ReadOptions options;
if (mSeeking != NO_SEEK) {
- ALOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
+ ALOGV("seeking to %" PRId64 " us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
options.setSeekTo(
mSeekTimeUs,
@@ -1829,7 +1862,7 @@ void AwesomePlayer::onVideoEvent() {
if (mSeeking == SEEK_VIDEO_ONLY) {
if (mSeekTimeUs > timeUs) {
- ALOGI("XXX mSeekTimeUs = %lld us, timeUs = %lld us",
+ ALOGI("XXX mSeekTimeUs = %" PRId64 " us, timeUs = %" PRId64 " us",
mSeekTimeUs, timeUs);
}
}
@@ -1859,21 +1892,28 @@ void AwesomePlayer::onVideoEvent() {
TimeSource *ts =
((mFlags & AUDIO_AT_EOS) || !(mFlags & AUDIOPLAYER_STARTED))
? &mSystemTimeSource : mTimeSource;
+ int64_t systemTimeUs = mSystemTimeSource.getRealTimeUs();
+ int64_t looperTimeUs = ALooper::GetNowUs();
if (mFlags & FIRST_FRAME) {
modifyFlags(FIRST_FRAME, CLEAR);
mSinceLastDropped = 0;
- mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
+ mClockEstimator->reset();
+ mTimeSourceDeltaUs = estimateRealTimeUs(ts, systemTimeUs) - timeUs;
}
int64_t realTimeUs, mediaTimeUs;
if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
+ ALOGV("updating TSdelta (%" PRId64 " => %" PRId64 " change %" PRId64 ")",
+ mTimeSourceDeltaUs, realTimeUs - mediaTimeUs,
+ mTimeSourceDeltaUs - (realTimeUs - mediaTimeUs));
+ ATRACE_INT("TS delta change (ms)", (mTimeSourceDeltaUs - (realTimeUs - mediaTimeUs)) / 1E3);
mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
}
if (wasSeeking == SEEK_VIDEO_ONLY) {
- int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
+ int64_t nowUs = estimateRealTimeUs(ts, systemTimeUs) - mTimeSourceDeltaUs;
int64_t latenessUs = nowUs - timeUs;
@@ -1884,12 +1924,13 @@ void AwesomePlayer::onVideoEvent() {
}
}
+ int64_t latenessUs = 0;
if (wasSeeking == NO_SEEK) {
// Let's display the first frame after seeking right away.
- int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
+ int64_t nowUs = estimateRealTimeUs(ts, systemTimeUs) - mTimeSourceDeltaUs;
- int64_t latenessUs = nowUs - timeUs;
+ latenessUs = nowUs - timeUs;
ATRACE_INT("Video Lateness (ms)", latenessUs / 1E3);
@@ -1919,13 +1960,13 @@ void AwesomePlayer::onVideoEvent() {
if (latenessUs > 40000) {
// We're more than 40ms late.
- ALOGV("we're late by %lld us (%.2f secs)",
+ ALOGV("we're late by %" PRId64 " us (%.2f secs)",
latenessUs, latenessUs / 1E6);
if (!(mFlags & SLOW_DECODER_HACK)
|| mSinceLastDropped > FRAME_DROP_FREQ)
{
- ALOGV("we're late by %lld us (%.2f secs) dropping "
+ ALOGV("we're late by %" PRId64 " us (%.2f secs) dropping "
"one after %d frames",
latenessUs, latenessUs / 1E6, mSinceLastDropped);
@@ -1943,9 +1984,9 @@ void AwesomePlayer::onVideoEvent() {
}
}
- if (latenessUs < -10000) {
- // We're more than 10ms early.
- postVideoEvent_l(10000);
+ if (latenessUs < -30000) {
+ // We're more than 30ms early, schedule at most 20 ms before time due
+ postVideoEvent_l(latenessUs < -60000 ? 30000 : -latenessUs - 20000);
return;
}
}
@@ -1959,6 +2000,8 @@ void AwesomePlayer::onVideoEvent() {
if (mVideoRenderer != NULL) {
mSinceLastDropped++;
+ mVideoBuffer->meta_data()->setInt64(kKeyTime, looperTimeUs - latenessUs);
+
mVideoRenderer->render(mVideoBuffer);
if (!mVideoRenderingStarted) {
mVideoRenderingStarted = true;
@@ -2008,14 +2051,26 @@ void AwesomePlayer::onVideoEvent() {
int64_t nextTimeUs;
CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &nextTimeUs));
- int64_t delayUs = nextTimeUs - ts->getRealTimeUs() + mTimeSourceDeltaUs;
- postVideoEvent_l(delayUs > 10000 ? 10000 : delayUs < 0 ? 0 : delayUs);
+ systemTimeUs = mSystemTimeSource.getRealTimeUs();
+ int64_t delayUs = nextTimeUs - estimateRealTimeUs(ts, systemTimeUs) + mTimeSourceDeltaUs;
+ ATRACE_INT("Frame delta (ms)", (nextTimeUs - timeUs) / 1E3);
+ ALOGV("next frame in %" PRId64, delayUs);
+ // try to schedule 30ms before time due
+ postVideoEvent_l(delayUs > 60000 ? 30000 : (delayUs < 30000 ? 0 : delayUs - 30000));
return;
}
postVideoEvent_l();
}
+int64_t AwesomePlayer::estimateRealTimeUs(TimeSource *ts, int64_t systemTimeUs) {
+ if (ts == &mSystemTimeSource) {
+ return systemTimeUs;
+ } else {
+ return (int64_t)mClockEstimator->estimate(systemTimeUs, ts->getRealTimeUs());
+ }
+}
+
void AwesomePlayer::postVideoEvent_l(int64_t delayUs) {
ATRACE_CALL();
@@ -2193,15 +2248,14 @@ status_t AwesomePlayer::finishSetDataSource_l() {
if (!strncasecmp("http://", mUri.string(), 7)
|| !strncasecmp("https://", mUri.string(), 8)
|| isWidevineStreaming) {
- mConnectingDataSource = HTTPBase::Create(
- (mFlags & INCOGNITO)
- ? HTTPBase::kFlagIncognito
- : 0);
-
- if (mUIDValid) {
- mConnectingDataSource->setUID(mUID);
+ if (mHTTPService == NULL) {
+ ALOGE("Attempt to play media from http URI without HTTP service.");
+ return UNKNOWN_ERROR;
}
+ sp<IMediaHTTPConnection> conn = mHTTPService->makeHTTPConnection();
+ mConnectingDataSource = new MediaHTTP(conn);
+
String8 cacheConfig;
bool disconnectAtHighwatermark;
NuCachedSource2::RemoveCacheSpecificHeaders(
@@ -2209,6 +2263,10 @@ status_t AwesomePlayer::finishSetDataSource_l() {
mLock.unlock();
status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders);
+ // force connection at this point, to avoid a race condition between getMIMEType and the
+ // caching datasource constructed below, which could result in multiple requests to the
+ // server, and/or failed connections.
+ String8 contentType = mConnectingDataSource->getMIMEType();
mLock.lock();
if (err != OK) {
@@ -2239,8 +2297,6 @@ status_t AwesomePlayer::finishSetDataSource_l() {
mConnectingDataSource.clear();
- String8 contentType = dataSource->getMIMEType();
-
if (strncasecmp(contentType.string(), "audio/", 6)) {
// We're not doing this for streams that appear to be audio-only
// streams to ensure that even low bandwidth streams start
@@ -2272,12 +2328,12 @@ status_t AwesomePlayer::finishSetDataSource_l() {
if (finalStatus != OK
|| (metaDataSize >= 0
- && cachedDataRemaining >= metaDataSize)
+ && (off64_t)cachedDataRemaining >= metaDataSize)
|| (mFlags & PREPARE_CANCELLED)) {
break;
}
- ALOGV("now cached %d bytes of data", cachedDataRemaining);
+ ALOGV("now cached %zu bytes of data", cachedDataRemaining);
if (metaDataSize < 0
&& cachedDataRemaining >= kMinBytesForSniffing) {
@@ -2317,7 +2373,8 @@ status_t AwesomePlayer::finishSetDataSource_l() {
}
}
} else {
- dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
+ dataSource = DataSource::CreateFromURI(
+ mHTTPService, mUri.string(), &mUriHeaders);
}
if (dataSource == NULL) {
@@ -2655,7 +2712,7 @@ status_t AwesomePlayer::selectAudioTrack_l(
status_t AwesomePlayer::selectTrack(size_t trackIndex, bool select) {
ATRACE_CALL();
- ALOGV("selectTrack: trackIndex = %d and select=%d", trackIndex, select);
+ ALOGV("selectTrack: trackIndex = %zu and select=%d", trackIndex, select);
Mutex::Autolock autoLock(mLock);
size_t trackCount = mExtractor->countTracks();
if (mTextDriver != NULL) {
@@ -2759,7 +2816,7 @@ status_t AwesomePlayer::invoke(const Parcel &request, Parcel *reply) {
{
Mutex::Autolock autoLock(mLock);
if (mTextDriver == NULL) {
- mTextDriver = new TimedTextDriver(mListener);
+ mTextDriver = new TimedTextDriver(mListener, mHTTPService);
}
// String values written in Parcel are UTF-16 values.
String8 uri(request.readString16());
@@ -2771,7 +2828,7 @@ status_t AwesomePlayer::invoke(const Parcel &request, Parcel *reply) {
{
Mutex::Autolock autoLock(mLock);
if (mTextDriver == NULL) {
- mTextDriver = new TimedTextDriver(mListener);
+ mTextDriver = new TimedTextDriver(mListener, mHTTPService);
}
int fd = request.readFileDescriptor();
off64_t offset = request.readInt64();
@@ -2804,14 +2861,15 @@ bool AwesomePlayer::isStreamingHTTP() const {
return mCachedSource != NULL || mWVMExtractor != NULL;
}
-status_t AwesomePlayer::dump(int fd, const Vector<String16> &args) const {
+status_t AwesomePlayer::dump(
+ int fd, const Vector<String16> & /* args */) const {
Mutex::Autolock autoLock(mStatsLock);
FILE *out = fdopen(dup(fd), "w");
fprintf(out, " AwesomePlayer\n");
if (mStats.mFd < 0) {
- fprintf(out, " URI(suppressed)");
+ fprintf(out, " URI(%s)", uriDebugString(mUri, mFlags & INCOGNITO).c_str());
} else {
fprintf(out, " fd(%d)", mStats.mFd);
}
@@ -2900,6 +2958,9 @@ void AwesomePlayer::onAudioTearDownEvent() {
// get current position so we can start recreated stream from here
getPosition(&mAudioTearDownPosition);
+ sp<IMediaHTTPService> savedHTTPService = mHTTPService;
+
+ bool wasLooping = mFlags & LOOPING;
// Reset and recreate
reset_l();
@@ -2909,7 +2970,7 @@ void AwesomePlayer::onAudioTearDownEvent() {
mFileSource = fileSource;
err = setDataSource_l(fileSource);
} else {
- err = setDataSource_l(uri, &uriHeaders);
+ err = setDataSource_l(savedHTTPService, uri, &uriHeaders);
}
mFlags |= PREPARING;
@@ -2918,6 +2979,9 @@ void AwesomePlayer::onAudioTearDownEvent() {
// a MEDIA_ERROR to the client and abort the prepare
mFlags |= PREPARE_CANCELLED;
}
+ if (wasLooping) {
+ mFlags |= LOOPING;
+ }
mAudioTearDown = true;
mIsAsyncPrepare = true;
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 3017fe7..ad12bdd 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "CameraSource"
#include <utils/Log.h>
@@ -31,6 +33,12 @@
#include <utils/String8.h>
#include <cutils/properties.h>
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
@@ -63,12 +71,15 @@ CameraSourceListener::~CameraSourceListener() {
}
void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
+ UNUSED_UNLESS_VERBOSE(msgType);
+ UNUSED_UNLESS_VERBOSE(ext1);
+ UNUSED_UNLESS_VERBOSE(ext2);
ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
}
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr,
- camera_frame_metadata_t *metadata) {
- ALOGV("postData(%d, ptr:%p, size:%d)",
+ camera_frame_metadata_t * /* metadata */) {
+ ALOGV("postData(%d, ptr:%p, size:%zu)",
msgType, dataPtr->pointer(), dataPtr->size());
sp<CameraSource> source = mSource.promote();
@@ -119,6 +130,7 @@ static int32_t getColorFormat(const char* colorFormat) {
"CameraSource::getColorFormat", colorFormat);
CHECK(!"Unknown color format");
+ return -1;
}
CameraSource *CameraSource::Create(const String16 &clientName) {
@@ -207,7 +219,7 @@ status_t CameraSource::isCameraAvailable(
mCameraFlags |= FLAGS_HOT_CAMERA;
mDeathNotifier = new DeathNotifier();
// isBinderAlive needs linkToDeath to work.
- mCameraRecordingProxy->asBinder()->linkToDeath(mDeathNotifier);
+ IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
}
mCamera->lock();
@@ -577,14 +589,15 @@ CameraSource::~CameraSource() {
}
}
-void CameraSource::startCameraRecording() {
+status_t CameraSource::startCameraRecording() {
ALOGV("startCameraRecording");
// Reset the identity to the current thread because media server owns the
// camera and recording is started by the applications. The applications
// will connect to the camera in ICameraRecordingProxy::startRecording.
int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ status_t err;
if (mNumInputBuffers > 0) {
- status_t err = mCamera->sendCommand(
+ err = mCamera->sendCommand(
CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
// This could happen for CameraHAL1 clients; thus the failure is
@@ -595,17 +608,25 @@ void CameraSource::startCameraRecording() {
}
}
+ err = OK;
if (mCameraFlags & FLAGS_HOT_CAMERA) {
mCamera->unlock();
mCamera.clear();
- CHECK_EQ((status_t)OK,
- mCameraRecordingProxy->startRecording(new ProxyListener(this)));
+ if ((err = mCameraRecordingProxy->startRecording(
+ new ProxyListener(this))) != OK) {
+ ALOGE("Failed to start recording, received error: %s (%d)",
+ strerror(-err), err);
+ }
} else {
mCamera->setListener(new CameraSourceListener(this));
mCamera->startRecording();
- CHECK(mCamera->recordingEnabled());
+ if (!mCamera->recordingEnabled()) {
+ err = -EINVAL;
+ ALOGE("Failed to start recording");
+ }
}
IPCThreadState::self()->restoreCallingIdentity(token);
+ return err;
}
status_t CameraSource::start(MetaData *meta) {
@@ -637,10 +658,12 @@ status_t CameraSource::start(MetaData *meta) {
}
}
- startCameraRecording();
+ status_t err;
+ if ((err = startCameraRecording()) == OK) {
+ mStarted = true;
+ }
- mStarted = true;
- return OK;
+ return err;
}
void CameraSource::stopCameraRecording() {
@@ -655,63 +678,80 @@ void CameraSource::stopCameraRecording() {
void CameraSource::releaseCamera() {
ALOGV("releaseCamera");
- if (mCamera != 0) {
+ sp<Camera> camera;
+ bool coldCamera = false;
+ {
+ Mutex::Autolock autoLock(mLock);
+ // get a local ref and clear ref to mCamera now
+ camera = mCamera;
+ mCamera.clear();
+ coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
+ }
+
+ if (camera != 0) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ if (coldCamera) {
ALOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
- mCamera->disconnect();
+ camera->stopPreview();
+ camera->disconnect();
}
- mCamera->unlock();
- mCamera.clear();
- mCamera = 0;
+ camera->unlock();
IPCThreadState::self()->restoreCallingIdentity(token);
}
- if (mCameraRecordingProxy != 0) {
- mCameraRecordingProxy->asBinder()->unlinkToDeath(mDeathNotifier);
- mCameraRecordingProxy.clear();
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mCameraRecordingProxy != 0) {
+ IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
+ mCameraRecordingProxy.clear();
+ }
+ mCameraFlags = 0;
}
- mCameraFlags = 0;
}
status_t CameraSource::reset() {
ALOGD("reset: E");
- Mutex::Autolock autoLock(mLock);
- mStarted = false;
- mFrameAvailableCondition.signal();
- int64_t token;
- bool isTokenValid = false;
- if (mCamera != 0) {
- token = IPCThreadState::self()->clearCallingIdentity();
- isTokenValid = true;
- }
- releaseQueuedFrames();
- while (!mFramesBeingEncoded.empty()) {
- if (NO_ERROR !=
- mFrameCompleteCondition.waitRelative(mLock,
- mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
- ALOGW("Timed out waiting for outstanding frames being encoded: %d",
- mFramesBeingEncoded.size());
+ {
+ Mutex::Autolock autoLock(mLock);
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
+ int64_t token;
+ bool isTokenValid = false;
+ if (mCamera != 0) {
+ token = IPCThreadState::self()->clearCallingIdentity();
+ isTokenValid = true;
+ }
+ releaseQueuedFrames();
+ while (!mFramesBeingEncoded.empty()) {
+ if (NO_ERROR !=
+ mFrameCompleteCondition.waitRelative(mLock,
+ mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
+ ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
+ mFramesBeingEncoded.size());
+ }
+ }
+ stopCameraRecording();
+ if (isTokenValid) {
+ IPCThreadState::self()->restoreCallingIdentity(token);
}
- }
- stopCameraRecording();
- releaseCamera();
- if (isTokenValid) {
- IPCThreadState::self()->restoreCallingIdentity(token);
- }
- if (mCollectStats) {
- ALOGI("Frames received/encoded/dropped: %d/%d/%d in %lld us",
- mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
- mLastFrameTimestampUs - mFirstFrameTimeUs);
- }
+ if (mCollectStats) {
+ ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
+ mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
+ mLastFrameTimestampUs - mFirstFrameTimeUs);
+ }
- if (mNumGlitches > 0) {
- ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ if (mNumGlitches > 0) {
+ ALOGW("%d long delays between neighboring video frames", mNumGlitches);
+ }
+
+ CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
}
- CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
+ releaseCamera();
+
ALOGD("reset: X");
return OK;
}
@@ -785,11 +825,11 @@ status_t CameraSource::read(
mFrameAvailableCondition.waitRelative(mLock,
mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
if (mCameraRecordingProxy != 0 &&
- !mCameraRecordingProxy->asBinder()->isBinderAlive()) {
+ !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
ALOGW("camera recording proxy is gone");
return ERROR_END_OF_STREAM;
}
- ALOGW("Timed out waiting for incoming camera video frames: %lld us",
+ ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
mLastFrameTimestampUs);
}
}
@@ -812,10 +852,10 @@ status_t CameraSource::read(
void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
int32_t msgType, const sp<IMemory> &data) {
- ALOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
+ ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs);
Mutex::Autolock autoLock(mLock);
if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
- ALOGV("Drop frame at %lld/%lld us", timestampUs, mStartTimeUs);
+ ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs);
releaseOneRecordingFrame(data);
return;
}
@@ -854,7 +894,7 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
mFramesReceived.push_back(data);
int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
mFrameTimes.push_back(timeUs);
- ALOGV("initial delay: %lld, current time stamp: %lld",
+ ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
mStartTimeUs, timeUs);
mFrameAvailableCondition.signal();
}
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 5772316..0acd9d0 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "CameraSourceTimeLapse"
@@ -79,13 +81,14 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
mSkipCurrentFrame(false) {
mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
- ALOGD("starting time lapse mode: %lld us",
+ ALOGD("starting time lapse mode: %" PRId64 " us",
mTimeBetweenFrameCaptureUs);
mVideoWidth = videoSize.width;
mVideoHeight = videoSize.height;
- if (!trySettingVideoSize(videoSize.width, videoSize.height)) {
+ if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
+ releaseCamera();
mInitCheck = NO_INIT;
}
@@ -134,7 +137,7 @@ bool CameraSourceTimeLapse::trySettingVideoSize(
}
bool videoSizeSupported = false;
- for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ for (size_t i = 0; i < supportedSizes.size(); ++i) {
int32_t pictureWidth = supportedSizes[i].width;
int32_t pictureHeight = supportedSizes[i].height;
@@ -231,7 +234,7 @@ sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(
return newMemory;
}
-bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
ALOGV("skipCurrentFrame");
if (mSkipCurrentFrame) {
mSkipCurrentFrame = false;
@@ -265,7 +268,7 @@ bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
// Really make sure that this video recording frame will not be dropped.
if (*timestampUs < mStartTimeUs) {
- ALOGI("set timestampUs to start time stamp %lld us", mStartTimeUs);
+ ALOGI("set timestampUs to start time stamp %" PRId64 " us", mStartTimeUs);
*timestampUs = mStartTimeUs;
}
return false;
diff --git a/media/libstagefright/ClockEstimator.cpp b/media/libstagefright/ClockEstimator.cpp
new file mode 100644
index 0000000..34d1e42
--- /dev/null
+++ b/media/libstagefright/ClockEstimator.cpp
@@ -0,0 +1,177 @@
+/*
+**
+** Copyright 2014, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ClockEstimator"
+#include <utils/Log.h>
+
+#include <math.h>
+#include <media/stagefright/ClockEstimator.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+WindowedLinearFitEstimator::WindowedLinearFitEstimator(
+ size_t headLength, double headFactor, size_t mainLength, double tailFactor)
+ : mHeadFactorInv(1. / headFactor),
+ mTailFactor(tailFactor),
+ mHistoryLength(mainLength + headLength),
+ mHeadLength(headLength) {
+ reset();
+ mXHistory.resize(mHistoryLength);
+ mYHistory.resize(mHistoryLength);
+ mFirstWeight = pow(headFactor, mHeadLength);
+}
+
+WindowedLinearFitEstimator::LinearFit::LinearFit() {
+ reset();
+}
+
+void WindowedLinearFitEstimator::LinearFit::reset() {
+ mX = mXX = mY = mYY = mXY = mW = 0.;
+}
+
+double WindowedLinearFitEstimator::LinearFit::size() const {
+ double s = mW * mW + mX * mX + mY * mY + mXX * mXX + mXY * mXY + mYY * mYY;
+ if (s > 1e72) {
+ // 1e72 corresponds to clock monotonic time of about 8 years
+ ALOGW("estimator is overflowing: w=%g x=%g y=%g xx=%g xy=%g yy=%g",
+ mW, mX, mY, mXX, mXY, mYY);
+ }
+ return s;
+}
+
+void WindowedLinearFitEstimator::LinearFit::add(double x, double y, double w) {
+ mW += w;
+ mX += w * x;
+ mY += w * y;
+ mXX += w * x * x;
+ mXY += w * x * y;
+ mYY += w * y * y;
+}
+
+void WindowedLinearFitEstimator::LinearFit::combine(const LinearFit &lf) {
+ mW += lf.mW;
+ mX += lf.mX;
+ mY += lf.mY;
+ mXX += lf.mXX;
+ mXY += lf.mXY;
+ mYY += lf.mYY;
+}
+
+void WindowedLinearFitEstimator::LinearFit::scale(double w) {
+ mW *= w;
+ mX *= w;
+ mY *= w;
+ mXX *= w;
+ mXY *= w;
+ mYY *= w;
+}
+
+double WindowedLinearFitEstimator::LinearFit::interpolate(double x) {
+ double div = mW * mXX - mX * mX;
+ if (fabs(div) < 1e-5 * mW * mW) {
+ // this only should happen on the first value
+ return x;
+ // assuming a = 1, we could also return x + (mY - mX) / mW;
+ }
+ double a_div = (mW * mXY - mX * mY);
+ double b_div = (mXX * mY - mX * mXY);
+ ALOGV("a=%.4g b=%.4g in=%g out=%g",
+ a_div / div, b_div / div, x, (a_div * x + b_div) / div);
+ return (a_div * x + b_div) / div;
+}
+
+double WindowedLinearFitEstimator::estimate(double x, double y) {
+ /*
+ * TODO: We could update the head by adding the new sample to it
+ * and amplifying it, but this approach can lead to unbounded
+ * error. Instead, we recalculate the head at each step, which
+ * is computationally more expensive. We could balance the two
+ * methods by recalculating just before the error becomes
+ * significant.
+ */
+ const bool update_head = false;
+ if (update_head) {
+ // add new sample to the head
+ mHead.scale(mHeadFactorInv); // amplify head
+ mHead.add(x, y, mFirstWeight);
+ }
+
+ /*
+ * TRICKY: place elements into the circular buffer at decreasing
+ * indices, so that we can access past elements by addition
+ * (thereby avoiding potentially negative indices.)
+ */
+ if (mNumSamples >= mHeadLength) {
+ // move last head sample from head to the main window
+ size_t lastHeadIx = (mSampleIx + mHeadLength) % mHistoryLength;
+ if (update_head) {
+ mHead.add(mXHistory[lastHeadIx], mYHistory[lastHeadIx], -1.); // remove
+ }
+ mMain.add(mXHistory[lastHeadIx], mYHistory[lastHeadIx], 1.);
+ if (mNumSamples >= mHistoryLength) {
+ // move last main sample from main window to tail
+ mMain.add(mXHistory[mSampleIx], mYHistory[mSampleIx], -1.); // remove
+ mTail.add(mXHistory[mSampleIx], mYHistory[mSampleIx], 1.);
+ mTail.scale(mTailFactor); // attenuate tail
+ }
+ }
+
+ mXHistory.editItemAt(mSampleIx) = x;
+ mYHistory.editItemAt(mSampleIx) = y;
+ if (mNumSamples < mHistoryLength) {
+ ++mNumSamples;
+ }
+
+ // recalculate head unless we were using the update method
+ if (!update_head) {
+ mHead.reset();
+ double w = mFirstWeight;
+ for (size_t headIx = 0; headIx < mHeadLength && headIx < mNumSamples; ++headIx) {
+ size_t ix = (mSampleIx + headIx) % mHistoryLength;
+ mHead.add(mXHistory[ix], mYHistory[ix], w);
+ w *= mHeadFactorInv;
+ }
+ }
+
+ if (mSampleIx > 0) {
+ --mSampleIx;
+ } else {
+ mSampleIx = mHistoryLength - 1;
+ }
+
+ // return estimation result
+ LinearFit total;
+ total.combine(mHead);
+ total.combine(mMain);
+ total.combine(mTail);
+ return total.interpolate(x);
+}
+
+void WindowedLinearFitEstimator::reset() {
+ mHead.reset();
+ mMain.reset();
+ mTail.reset();
+ mNumSamples = 0;
+ mSampleIx = mHistoryLength - 1;
+}
+
+}; // namespace android
+
+
diff --git a/media/libstagefright/include/chromium_http_stub.h b/media/libstagefright/CodecBase.cpp
index e0651a4..f729d4d 100644
--- a/media/libstagefright/include/chromium_http_stub.h
+++ b/media/libstagefright/CodecBase.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,21 +14,25 @@
* limitations under the License.
*/
-#ifndef CHROMIUM_HTTP_STUB_H_
-#define CHROMIUM_HTTP_STUB_H_
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecBase"
-#include <include/HTTPBase.h>
-#include <media/stagefright/DataSource.h>
+#include <inttypes.h>
+
+#include <media/stagefright/CodecBase.h>
namespace android {
-extern "C" {
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags);
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
+CodecBase::CodecBase() {
+}
-DataSource *createDataUriSource(const char *uri);
+CodecBase::~CodecBase() {
}
+
+CodecBase::PortDescription::PortDescription() {
+}
+
+CodecBase::PortDescription::~PortDescription() {
}
-#endif
+} // namespace android
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 97987e2..f7dcf35 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -13,17 +13,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DataSource"
#include "include/AMRExtractor.h"
-#if CHROMIUM_AVAILABLE
-#include "include/chromium_http_stub.h"
-#endif
-
#include "include/AACExtractor.h"
#include "include/DRMExtractor.h"
#include "include/FLACExtractor.h"
#include "include/HTTPBase.h"
+#include "include/MidiExtractor.h"
#include "include/MP3Extractor.h"
#include "include/MPEG2PSExtractor.h"
#include "include/MPEG2TSExtractor.h"
@@ -35,10 +34,15 @@
#include "matroska/MatroskaExtractor.h"
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataURISource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaHTTP.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -169,6 +173,7 @@ void DataSource::RegisterDefaultSniffers() {
RegisterSniffer_l(SniffAAC);
RegisterSniffer_l(SniffMPEG2PS);
RegisterSniffer_l(SniffWVM);
+ RegisterSniffer_l(SniffMidi);
char value[PROPERTY_VALUE_MAX];
if (property_get("drm.service.enabled", value, NULL)
@@ -180,7 +185,15 @@ void DataSource::RegisterDefaultSniffers() {
// static
sp<DataSource> DataSource::CreateFromURI(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers,
+ String8 *contentType,
+ HTTPBase *httpSource) {
+ if (contentType != NULL) {
+ *contentType = "";
+ }
+
bool isWidevine = !strncasecmp("widevine://", uri, 11);
sp<DataSource> source;
@@ -189,7 +202,19 @@ sp<DataSource> DataSource::CreateFromURI(
} else if (!strncasecmp("http://", uri, 7)
|| !strncasecmp("https://", uri, 8)
|| isWidevine) {
- sp<HTTPBase> httpSource = HTTPBase::Create();
+ if (httpService == NULL) {
+ ALOGE("Invalid http service!");
+ return NULL;
+ }
+
+ if (httpSource == NULL) {
+ sp<IMediaHTTPConnection> conn = httpService->makeHTTPConnection();
+ if (conn == NULL) {
+ ALOGE("Failed to make http connection from http service!");
+ return NULL;
+ }
+ httpSource = new MediaHTTP(conn);
+ }
String8 tmp;
if (isWidevine) {
@@ -199,32 +224,38 @@ sp<DataSource> DataSource::CreateFromURI(
uri = tmp.string();
}
- if (httpSource->connect(uri, headers) != OK) {
+ String8 cacheConfig;
+ bool disconnectAtHighwatermark;
+ KeyedVector<String8, String8> nonCacheSpecificHeaders;
+ if (headers != NULL) {
+ nonCacheSpecificHeaders = *headers;
+ NuCachedSource2::RemoveCacheSpecificHeaders(
+ &nonCacheSpecificHeaders,
+ &cacheConfig,
+ &disconnectAtHighwatermark);
+ }
+
+ if (httpSource->connect(uri, &nonCacheSpecificHeaders) != OK) {
+ ALOGE("Failed to connect http source!");
return NULL;
}
if (!isWidevine) {
- String8 cacheConfig;
- bool disconnectAtHighwatermark;
- if (headers != NULL) {
- KeyedVector<String8, String8> copy = *headers;
- NuCachedSource2::RemoveCacheSpecificHeaders(
- &copy, &cacheConfig, &disconnectAtHighwatermark);
+ if (contentType != NULL) {
+ *contentType = httpSource->getMIMEType();
}
source = new NuCachedSource2(
httpSource,
- cacheConfig.isEmpty() ? NULL : cacheConfig.string());
+ cacheConfig.isEmpty() ? NULL : cacheConfig.string(),
+ disconnectAtHighwatermark);
} else {
// We do not want that prefetching, caching, datasource wrapper
// in the widevine:// case.
source = httpSource;
}
-
-# if CHROMIUM_AVAILABLE
} else if (!strncasecmp("data:", uri, 5)) {
- source = createDataUriSource(uri);
-#endif
+ source = DataURISource::Create(uri);
} else {
// Assume it's a filename.
source = new FileSource(uri);
@@ -237,6 +268,19 @@ sp<DataSource> DataSource::CreateFromURI(
return source;
}
+sp<DataSource> DataSource::CreateMediaHTTP(const sp<IMediaHTTPService> &httpService) {
+ if (httpService == NULL) {
+ return NULL;
+ }
+
+ sp<IMediaHTTPConnection> conn = httpService->makeHTTPConnection();
+ if (conn == NULL) {
+ return NULL;
+ } else {
+ return new MediaHTTP(conn);
+ }
+}
+
String8 DataSource::getMIMEType() const {
return String8("application/octet-stream");
}
diff --git a/media/libstagefright/DataURISource.cpp b/media/libstagefright/DataURISource.cpp
new file mode 100644
index 0000000..2c39314
--- /dev/null
+++ b/media/libstagefright/DataURISource.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/DataURISource.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+
+namespace android {
+
+// static
+sp<DataURISource> DataURISource::Create(const char *uri) {
+ if (strncasecmp("data:", uri, 5)) {
+ return NULL;
+ }
+
+ char *commaPos = strrchr(uri, ',');
+
+ if (commaPos == NULL) {
+ return NULL;
+ }
+
+ sp<ABuffer> buffer;
+
+ AString tmp(&uri[5], commaPos - &uri[5]);
+
+ if (tmp.endsWith(";base64")) {
+ AString encoded(commaPos + 1);
+
+ // Strip CR and LF...
+ for (size_t i = encoded.size(); i-- > 0;) {
+ if (encoded.c_str()[i] == '\r' || encoded.c_str()[i] == '\n') {
+ encoded.erase(i, 1);
+ }
+ }
+
+ buffer = decodeBase64(encoded);
+
+ if (buffer == NULL) {
+ ALOGE("Malformed base64 encoded content found.");
+ return NULL;
+ }
+ } else {
+#if 0
+ size_t dataLen = strlen(uri) - tmp.size() - 6;
+ buffer = new ABuffer(dataLen);
+ memcpy(buffer->data(), commaPos + 1, dataLen);
+
+ // unescape
+#else
+ // MediaPlayer doesn't care for this right now as we don't
+ // play any text-based media.
+ return NULL;
+#endif
+ }
+
+ // We don't really care about charset or mime type.
+
+ return new DataURISource(buffer);
+}
+
+DataURISource::DataURISource(const sp<ABuffer> &buffer)
+ : mBuffer(buffer) {
+}
+
+DataURISource::~DataURISource() {
+}
+
+status_t DataURISource::initCheck() const {
+ return OK;
+}
+
+ssize_t DataURISource::readAt(off64_t offset, void *data, size_t size) {
+ if ((offset < 0) || (offset >= (off64_t)mBuffer->size())) {
+ return 0;
+ }
+
+ size_t copy = mBuffer->size() - offset;
+ if (copy > size) {
+ copy = size;
+ }
+
+ memcpy(data, mBuffer->data() + offset, copy);
+
+ return copy;
+}
+
+status_t DataURISource::getSize(off64_t *size) {
+ *size = mBuffer->size();
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/ESDS.cpp b/media/libstagefright/ESDS.cpp
index 4a0c35c..427bf7b 100644
--- a/media/libstagefright/ESDS.cpp
+++ b/media/libstagefright/ESDS.cpp
@@ -91,7 +91,7 @@ status_t ESDS::skipDescriptorHeader(
}
while (more);
- ALOGV("tag=0x%02x data_size=%d", *tag, *data_size);
+ ALOGV("tag=0x%02x data_size=%zu", *tag, *data_size);
if (*data_size > size) {
return ERROR_MALFORMED;
diff --git a/media/libstagefright/FLACExtractor.cpp b/media/libstagefright/FLACExtractor.cpp
index 098fcf9..fa7251c 100644
--- a/media/libstagefright/FLACExtractor.cpp
+++ b/media/libstagefright/FLACExtractor.cpp
@@ -208,55 +208,55 @@ private:
// with the same parameter list, but discard redundant information.
FLAC__StreamDecoderReadStatus FLACParser::read_callback(
- const FLAC__StreamDecoder *decoder, FLAC__byte buffer[],
+ const FLAC__StreamDecoder * /* decoder */, FLAC__byte buffer[],
size_t *bytes, void *client_data)
{
return ((FLACParser *) client_data)->readCallback(buffer, bytes);
}
FLAC__StreamDecoderSeekStatus FLACParser::seek_callback(
- const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamDecoder * /* decoder */,
FLAC__uint64 absolute_byte_offset, void *client_data)
{
return ((FLACParser *) client_data)->seekCallback(absolute_byte_offset);
}
FLAC__StreamDecoderTellStatus FLACParser::tell_callback(
- const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamDecoder * /* decoder */,
FLAC__uint64 *absolute_byte_offset, void *client_data)
{
return ((FLACParser *) client_data)->tellCallback(absolute_byte_offset);
}
FLAC__StreamDecoderLengthStatus FLACParser::length_callback(
- const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamDecoder * /* decoder */,
FLAC__uint64 *stream_length, void *client_data)
{
return ((FLACParser *) client_data)->lengthCallback(stream_length);
}
FLAC__bool FLACParser::eof_callback(
- const FLAC__StreamDecoder *decoder, void *client_data)
+ const FLAC__StreamDecoder * /* decoder */, void *client_data)
{
return ((FLACParser *) client_data)->eofCallback();
}
FLAC__StreamDecoderWriteStatus FLACParser::write_callback(
- const FLAC__StreamDecoder *decoder, const FLAC__Frame *frame,
+ const FLAC__StreamDecoder * /* decoder */, const FLAC__Frame *frame,
const FLAC__int32 * const buffer[], void *client_data)
{
return ((FLACParser *) client_data)->writeCallback(frame, buffer);
}
void FLACParser::metadata_callback(
- const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamDecoder * /* decoder */,
const FLAC__StreamMetadata *metadata, void *client_data)
{
((FLACParser *) client_data)->metadataCallback(metadata);
}
void FLACParser::error_callback(
- const FLAC__StreamDecoder *decoder,
+ const FLAC__StreamDecoder * /* decoder */,
FLAC__StreamDecoderErrorStatus status, void *client_data)
{
((FLACParser *) client_data)->errorCallback(status);
@@ -380,15 +380,21 @@ void FLACParser::errorCallback(FLAC__StreamDecoderErrorStatus status)
// Copy samples from FLAC native 32-bit non-interleaved to 16-bit interleaved.
// These are candidates for optimization if needed.
-static void copyMono8(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyMono8(
+ short *dst,
+ const int *const *src,
+ unsigned nSamples,
+ unsigned /* nChannels */) {
for (unsigned i = 0; i < nSamples; ++i) {
*dst++ = src[0][i] << 8;
}
}
-static void copyStereo8(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyStereo8(
+ short *dst,
+ const int *const *src,
+ unsigned nSamples,
+ unsigned /* nChannels */) {
for (unsigned i = 0; i < nSamples; ++i) {
*dst++ = src[0][i] << 8;
*dst++ = src[1][i] << 8;
@@ -404,15 +410,21 @@ static void copyMultiCh8(short *dst, const int *const *src, unsigned nSamples, u
}
}
-static void copyMono16(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyMono16(
+ short *dst,
+ const int *const *src,
+ unsigned nSamples,
+ unsigned /* nChannels */) {
for (unsigned i = 0; i < nSamples; ++i) {
*dst++ = src[0][i];
}
}
-static void copyStereo16(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyStereo16(
+ short *dst,
+ const int *const *src,
+ unsigned nSamples,
+ unsigned /* nChannels */) {
for (unsigned i = 0; i < nSamples; ++i) {
*dst++ = src[0][i];
*dst++ = src[1][i];
@@ -430,15 +442,21 @@ static void copyMultiCh16(short *dst, const int *const *src, unsigned nSamples,
// 24-bit versions should do dithering or noise-shaping, here or in AudioFlinger
-static void copyMono24(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyMono24(
+ short *dst,
+ const int *const *src,
+ unsigned nSamples,
+ unsigned /* nChannels */) {
for (unsigned i = 0; i < nSamples; ++i) {
*dst++ = src[0][i] >> 8;
}
}
-static void copyStereo24(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyStereo24(
+ short *dst,
+ const int *const *src,
+ unsigned nSamples,
+ unsigned /* nChannels */) {
for (unsigned i = 0; i < nSamples; ++i) {
*dst++ = src[0][i] >> 8;
*dst++ = src[1][i] >> 8;
@@ -454,8 +472,11 @@ static void copyMultiCh24(short *dst, const int *const *src, unsigned nSamples,
}
}
-static void copyTrespass(short *dst, const int *const *src, unsigned nSamples, unsigned nChannels)
-{
+static void copyTrespass(
+ short * /* dst */,
+ const int *const * /* src */,
+ unsigned /* nSamples */,
+ unsigned /* nChannels */) {
TRESPASS();
}
@@ -700,7 +721,7 @@ FLACSource::~FLACSource()
}
}
-status_t FLACSource::start(MetaData *params)
+status_t FLACSource::start(MetaData * /* params */)
{
ALOGV("FLACSource::start");
@@ -792,8 +813,7 @@ sp<MediaSource> FLACExtractor::getTrack(size_t index)
}
sp<MetaData> FLACExtractor::getTrackMetaData(
- size_t index, uint32_t flags)
-{
+ size_t index, uint32_t /* flags */) {
if (mInitCheck != OK || index > 0) {
return NULL;
}
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 5fa4b6f..0c2ff15 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -20,17 +20,13 @@
#include "include/HTTPBase.h"
-#if CHROMIUM_AVAILABLE
-#include "include/chromium_http_stub.h"
-#endif
-
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <cutils/properties.h>
#include <cutils/qtaguid.h>
-#include <ConnectivityManager.h>
+#include <NetdClient.h>
namespace android {
@@ -41,33 +37,7 @@ HTTPBase::HTTPBase()
mPrevBandwidthMeasureTimeUs(0),
mPrevEstimatedBandWidthKbps(0),
mBandWidthCollectFreqMs(5000),
- mUIDValid(false),
- mUID(0) {
-}
-
-// static
-sp<HTTPBase> HTTPBase::Create(uint32_t flags) {
-#if CHROMIUM_AVAILABLE
- HTTPBase *dataSource = createChromiumHTTPDataSource(flags);
- if (dataSource) {
- return dataSource;
- }
-#endif
- {
- TRESPASS();
-
- return NULL;
- }
-}
-
-// static
-status_t HTTPBase::UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
-#if CHROMIUM_AVAILABLE
- return UpdateChromiumHTTPDataSourceProxyConfig(host, port, exclusionList);
-#else
- return INVALID_OPERATION;
-#endif
+ mMaxBandwidthHistoryItems(100) {
}
void HTTPBase::addBandwidthMeasurement(
@@ -81,7 +51,7 @@ void HTTPBase::addBandwidthMeasurement(
mTotalTransferBytes += numBytes;
mBandwidthHistory.push_back(entry);
- if (++mNumBandwidthHistoryItems > 100) {
+ if (++mNumBandwidthHistoryItems > mMaxBandwidthHistoryItems) {
BandwidthEntry *entry = &*mBandwidthHistory.begin();
mTotalTransferTimeUs -= entry->mDelayUs;
mTotalTransferBytes -= entry->mNumBytes;
@@ -135,19 +105,8 @@ status_t HTTPBase::setBandwidthStatCollectFreq(int32_t freqMs) {
return OK;
}
-void HTTPBase::setUID(uid_t uid) {
- mUIDValid = true;
- mUID = uid;
-}
-
-bool HTTPBase::getUID(uid_t *uid) const {
- if (!mUIDValid) {
- return false;
- }
-
- *uid = mUID;
-
- return true;
+void HTTPBase::setBandwidthHistorySize(size_t numHistoryItems) {
+ mMaxBandwidthHistoryItems = numHistoryItems;
}
// static
@@ -168,7 +127,7 @@ void HTTPBase::UnRegisterSocketUserTag(int sockfd) {
// static
void HTTPBase::RegisterSocketUserMark(int sockfd, uid_t uid) {
- ConnectivityManager::markSocketAsUser(sockfd, uid);
+ setNetworkForUser(uid, sockfd);
}
// static
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 380dab4..4a63152 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -398,7 +398,8 @@ sp<MediaSource> MP3Extractor::getTrack(size_t index) {
mSeeker);
}
-sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) {
+sp<MetaData> MP3Extractor::getTrackMetaData(
+ size_t index, uint32_t /* flags */) {
if (mInitCheck != OK || index != 0) {
return NULL;
}
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index c9ed5bb..9856f92 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -555,7 +555,7 @@ status_t MPEG2TSWriter::addSource(const sp<MediaSource> &source) {
return OK;
}
-status_t MPEG2TSWriter::start(MetaData *param) {
+status_t MPEG2TSWriter::start(MetaData * /* param */) {
CHECK(!mStarted);
mStarted = true;
@@ -596,7 +596,8 @@ bool MPEG2TSWriter::reachedEOS() {
return !mStarted || (mNumSourcesDone == mSources.size() ? true : false);
}
-status_t MPEG2TSWriter::dump(int fd, const Vector<String16> &args) {
+status_t MPEG2TSWriter::dump(
+ int /* fd */, const Vector<String16> & /* args */) {
return OK;
}
@@ -681,7 +682,7 @@ void MPEG2TSWriter::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- ALOGV("writing access unit at time %.2f secs (index %d)",
+ ALOGV("writing access unit at time %.2f secs (index %zu)",
minTimeUs / 1E6, minIndex);
source = mSources.editItemAt(minIndex);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 6a33ce6..3dd8b11 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -16,17 +16,19 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MPEG4Extractor"
-#include <utils/Log.h>
-
-#include "include/MPEG4Extractor.h"
-#include "include/SampleTable.h"
-#include "include/ESDS.h"
#include <ctype.h>
+#include <inttypes.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
+#include <utils/Log.h>
+
+#include "include/MPEG4Extractor.h"
+#include "include/SampleTable.h"
+#include "include/ESDS.h"
+
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -46,11 +48,13 @@ namespace android {
class MPEG4Source : public MediaSource {
public:
// Caller retains ownership of both "dataSource" and "sampleTable".
- MPEG4Source(const sp<MetaData> &format,
+ MPEG4Source(const sp<MPEG4Extractor> &owner,
+ const sp<MetaData> &format,
const sp<DataSource> &dataSource,
int32_t timeScale,
const sp<SampleTable> &sampleTable,
Vector<SidxEntry> &sidx,
+ const Trex *trex,
off64_t firstMoofOffset);
virtual status_t start(MetaData *params = NULL);
@@ -67,6 +71,8 @@ protected:
private:
Mutex mLock;
+ // keep the MPEG4Extractor around, since we're referencing its data
+ sp<MPEG4Extractor> mOwner;
sp<MetaData> mFormat;
sp<DataSource> mDataSource;
int32_t mTimescale;
@@ -74,6 +80,7 @@ private:
uint32_t mCurrentSampleIndex;
uint32_t mCurrentFragmentIndex;
Vector<SidxEntry> &mSegments;
+ const Trex *mTrex;
off64_t mFirstMoofOffset;
off64_t mCurrentMoofOffset;
off64_t mNextMoofOffset;
@@ -95,6 +102,7 @@ private:
uint64_t* mCurrentSampleInfoOffsets;
bool mIsAVC;
+ bool mIsHEVC;
size_t mNALLengthSize;
bool mStarted;
@@ -140,6 +148,7 @@ private:
off64_t offset;
size_t size;
uint32_t duration;
+ int32_t compositionOffset;
uint8_t iv[16];
Vector<size_t> clearsizes;
Vector<size_t> encryptedsizes;
@@ -256,6 +265,8 @@ status_t MPEG4DataSource::setCachedRange(off64_t offset, size_t size) {
////////////////////////////////////////////////////////////////////////////////
+static const bool kUseHexDump = false;
+
static void hexdump(const void *_data, size_t size) {
const uint8_t *data = (const uint8_t *)_data;
size_t offset = 0;
@@ -317,6 +328,9 @@ static const char *FourCC2MIME(uint32_t fourcc) {
case FOURCC('a', 'v', 'c', '1'):
return MEDIA_MIMETYPE_VIDEO_AVC;
+ case FOURCC('h', 'v', 'c', '1'):
+ case FOURCC('h', 'e', 'v', '1'):
+ return MEDIA_MIMETYPE_VIDEO_HEVC;
default:
CHECK(!"should not be here.");
return NULL;
@@ -339,8 +353,7 @@ static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t
}
MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source)
- : mSidxDuration(0),
- mMoofOffset(0),
+ : mMoofOffset(0),
mDataSource(source),
mInitCheck(NO_INIT),
mHasVideo(false),
@@ -365,7 +378,7 @@ MPEG4Extractor::~MPEG4Extractor() {
SINF *sinf = mFirstSINF;
while (sinf) {
SINF *next = sinf->next;
- delete sinf->IPMPData;
+ delete[] sinf->IPMPData;
delete sinf;
sinf = next;
}
@@ -405,7 +418,7 @@ size_t MPEG4Extractor::countTracks() {
track = track->next;
}
- ALOGV("MPEG4Extractor::countTracks: %d tracks", n);
+ ALOGV("MPEG4Extractor::countTracks: %zu tracks", n);
return n;
}
@@ -478,8 +491,18 @@ status_t MPEG4Extractor::readMetaData() {
off64_t offset = 0;
status_t err;
while (true) {
+ off64_t orig_offset = offset;
err = parseChunk(&offset, 0);
- if (err == OK) {
+
+ if (err != OK && err != UNKNOWN_ERROR) {
+ break;
+ } else if (offset <= orig_offset) {
+ // only continue parsing if the offset was advanced,
+ // otherwise we might end up in an infinite loop
+ ALOGE("did not advance: 0x%lld->0x%lld", orig_offset, offset);
+ err = ERROR_MALFORMED;
+ break;
+ } else if (err == OK) {
continue;
}
@@ -488,12 +511,12 @@ status_t MPEG4Extractor::readMetaData() {
break;
}
uint32_t chunk_type = ntohl(hdr[1]);
- if (chunk_type == FOURCC('s', 'i', 'd', 'x')) {
- // parse the sidx box too
- continue;
- } else if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
// store the offset of the first segment
mMoofOffset = offset;
+ } else if (chunk_type != FOURCC('m', 'd', 'a', 't')) {
+ // keep parsing until we get to the data
+ continue;
}
break;
}
@@ -505,8 +528,6 @@ status_t MPEG4Extractor::readMetaData() {
} else {
mFileMetaData->setCString(kKeyMIMEType, "audio/mp4");
}
-
- mInitCheck = OK;
} else {
mInitCheck = err;
}
@@ -571,7 +592,8 @@ static int32_t readSize(off64_t offset,
return size;
}
-status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
+status_t MPEG4Extractor::parseDrmSINF(
+ off64_t * /* offset */, off64_t data_offset) {
uint8_t updateIdTag;
if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
return ERROR_IO;
@@ -587,7 +609,6 @@ status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
if (size < 0) {
return ERROR_IO;
}
- int32_t classSize = size;
data_offset += numOfBytes;
while(size >= 11 ) {
@@ -648,7 +669,6 @@ status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
if (size < 0) {
return ERROR_IO;
}
- classSize = size;
data_offset += numOfBytes;
while (size > 0) {
@@ -682,7 +702,10 @@ status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
return ERROR_MALFORMED;
}
sinf->len = dataLen - 3;
- sinf->IPMPData = new char[sinf->len];
+ sinf->IPMPData = new (std::nothrow) char[sinf->len];
+ if (sinf->IPMPData == NULL) {
+ return ERROR_MALFORMED;
+ }
data_offset += 2;
if (mDataSource->readAt(data_offset, sinf->IPMPData, sinf->len) < sinf->len) {
@@ -743,7 +766,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
uint64_t chunk_size = ntohl(hdr[0]);
- uint32_t chunk_type = ntohl(hdr[1]);
+ int32_t chunk_type = ntohl(hdr[1]);
off64_t data_offset = *offset + 8;
if (chunk_size == 1) {
@@ -757,8 +780,25 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// The smallest valid chunk is 16 bytes long in this case.
return ERROR_MALFORMED;
}
+ } else if (chunk_size == 0) {
+ if (depth == 0) {
+ // atom extends to end of file
+ off64_t sourceSize;
+ if (mDataSource->getSize(&sourceSize) == OK) {
+ chunk_size = (sourceSize - *offset);
+ } else {
+ // XXX could we just pick a "sufficiently large" value here?
+ ALOGE("atom size is 0, and data source has no size");
+ return ERROR_MALFORMED;
+ }
+ } else {
+ // not allowed for non-toplevel atoms, skip it
+ *offset += 4;
+ return OK;
+ }
} else if (chunk_size < 8) {
// The smallest valid chunk is 8 bytes long.
+ ALOGE("invalid chunk size: %" PRIu64, chunk_size);
return ERROR_MALFORMED;
}
@@ -766,23 +806,23 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
MakeFourCCString(chunk_type, chunk);
ALOGV("chunk: %s @ %lld, %d", chunk, *offset, depth);
-#if 0
- static const char kWhitespace[] = " ";
- const char *indent = &kWhitespace[sizeof(kWhitespace) - 1 - 2 * depth];
- printf("%sfound chunk '%s' of size %lld\n", indent, chunk, chunk_size);
+ if (kUseHexDump) {
+ static const char kWhitespace[] = " ";
+ const char *indent = &kWhitespace[sizeof(kWhitespace) - 1 - 2 * depth];
+ printf("%sfound chunk '%s' of size %" PRIu64 "\n", indent, chunk, chunk_size);
- char buffer[256];
- size_t n = chunk_size;
- if (n > sizeof(buffer)) {
- n = sizeof(buffer);
- }
- if (mDataSource->readAt(*offset, buffer, n)
- < (ssize_t)n) {
- return ERROR_IO;
- }
+ char buffer[256];
+ size_t n = chunk_size;
+ if (n > sizeof(buffer)) {
+ n = sizeof(buffer);
+ }
+ if (mDataSource->readAt(*offset, buffer, n)
+ < (ssize_t)n) {
+ return ERROR_IO;
+ }
- hexdump(buffer, n);
-#endif
+ hexdump(buffer, n);
+ }
PathAdder autoAdder(&mPath, chunk_type);
@@ -825,7 +865,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('e', 'd', 't', 's'):
{
if (chunk_type == FOURCC('s', 't', 'b', 'l')) {
- ALOGV("sampleTable chunk is %d bytes long.", (size_t)chunk_size);
+ ALOGV("sampleTable chunk is %" PRIu64 " bytes long.", chunk_size);
if (mDataSource->flags()
& (DataSource::kWantsPrefetching
@@ -912,6 +952,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('e', 'l', 's', 't'):
{
+ *offset += chunk_size;
+
// See 14496-12 8.6.6
uint8_t version;
if (mDataSource->readAt(data_offset, &version, 1) < 1) {
@@ -957,6 +999,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
int64_t duration;
int32_t samplerate;
+ if (!mLastTrack) {
+ return ERROR_MALFORMED;
+ }
if (mLastTrack->meta->findInt64(kKeyDuration, &duration) &&
mLastTrack->meta->findInt32(kKeySampleRate, &samplerate)) {
@@ -974,12 +1019,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyEncoderPadding, paddingsamples);
}
}
- *offset += chunk_size;
break;
}
case FOURCC('f', 'r', 'm', 'a'):
{
+ *offset += chunk_size;
+
uint32_t original_fourcc;
if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) {
return ERROR_IO;
@@ -993,12 +1039,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
}
- *offset += chunk_size;
break;
}
case FOURCC('t', 'e', 'n', 'c'):
{
+ *offset += chunk_size;
+
if (chunk_size < 32) {
return ERROR_MALFORMED;
}
@@ -1043,23 +1090,25 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId);
mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize);
mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16);
- *offset += chunk_size;
break;
}
case FOURCC('t', 'k', 'h', 'd'):
{
+ *offset += chunk_size;
+
status_t err;
if ((err = parseTrackHeader(data_offset, chunk_data_size)) != OK) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('p', 's', 's', 'h'):
{
+ *offset += chunk_size;
+
PsshInfo pssh;
if (mDataSource->readAt(data_offset + 4, &pssh.uuid, 16) < 16) {
@@ -1077,7 +1126,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_MALFORMED;
}
- pssh.data = new uint8_t[pssh.datalen];
+ pssh.data = new (std::nothrow) uint8_t[pssh.datalen];
+ if (pssh.data == NULL) {
+ return ERROR_MALFORMED;
+ }
ALOGV("allocated pssh @ %p", pssh.data);
ssize_t requested = (ssize_t) pssh.datalen;
if (mDataSource->readAt(data_offset + 24, pssh.data, requested) < requested) {
@@ -1085,13 +1137,14 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
mPssh.push_back(pssh);
- *offset += chunk_size;
break;
}
case FOURCC('m', 'd', 'h', 'd'):
{
- if (chunk_data_size < 4) {
+ *offset += chunk_size;
+
+ if (chunk_data_size < 4 || mLastTrack == NULL) {
return ERROR_MALFORMED;
}
@@ -1119,8 +1172,15 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
+ if (!timescale) {
+ ALOGE("timescale should not be ZERO.");
+ return ERROR_MALFORMED;
+ }
+
mLastTrack->timescale = ntohl(timescale);
+ // 14496-12 says all ones means indeterminate, but some files seem to use
+ // 0 instead. We treat both the same.
int64_t duration = 0;
if (version == 1) {
if (mDataSource->readAt(
@@ -1128,7 +1188,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
< (ssize_t)sizeof(duration)) {
return ERROR_IO;
}
- duration = ntoh64(duration);
+ if (duration != -1) {
+ duration = ntoh64(duration);
+ }
} else {
uint32_t duration32;
if (mDataSource->readAt(
@@ -1136,13 +1198,14 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
< (ssize_t)sizeof(duration32)) {
return ERROR_IO;
}
- // ffmpeg sets duration to -1, which is incorrect.
if (duration32 != 0xffffffff) {
duration = ntohl(duration32);
}
}
- mLastTrack->meta->setInt64(
- kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
+ if (duration != 0) {
+ mLastTrack->meta->setInt64(
+ kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
+ }
uint8_t lang[2];
off64_t lang_offset;
@@ -1171,7 +1234,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setCString(
kKeyMediaLanguage, lang_code);
- *offset += chunk_size;
break;
}
@@ -1244,7 +1306,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
- uint16_t data_ref_index = U16_AT(&buffer[6]);
+ uint16_t data_ref_index __unused = U16_AT(&buffer[6]);
uint32_t num_channels = U16_AT(&buffer[16]);
uint16_t sample_size = U16_AT(&buffer[18]);
@@ -1281,6 +1343,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('H', '2', '6', '3'):
case FOURCC('h', '2', '6', '3'):
case FOURCC('a', 'v', 'c', '1'):
+ case FOURCC('h', 'v', 'c', '1'):
+ case FOURCC('h', 'e', 'v', '1'):
{
mHasVideo = true;
@@ -1295,7 +1359,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_IO;
}
- uint16_t data_ref_index = U16_AT(&buffer[6]);
+ uint16_t data_ref_index __unused = U16_AT(&buffer[6]);
uint16_t width = U16_AT(&buffer[6 + 18]);
uint16_t height = U16_AT(&buffer[6 + 20]);
@@ -1338,11 +1402,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->sampleTable->setChunkOffsetParams(
chunk_type, data_offset, chunk_data_size);
+ *offset += chunk_size;
+
if (err != OK) {
return err;
}
- *offset += chunk_size;
break;
}
@@ -1352,11 +1417,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->sampleTable->setSampleToChunkParams(
data_offset, chunk_data_size);
+ *offset += chunk_size;
+
if (err != OK) {
return err;
}
- *offset += chunk_size;
break;
}
@@ -1367,6 +1433,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->sampleTable->setSampleSizeParams(
chunk_type, data_offset, chunk_data_size);
+ *offset += chunk_size;
+
if (err != OK) {
return err;
}
@@ -1407,7 +1475,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size);
}
- *offset += chunk_size;
// NOTE: setting another piece of metadata invalidates any pointers (such as the
// mimetype) previously obtained, so don't cache them.
@@ -1431,6 +1498,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 't', 's'):
{
+ *offset += chunk_size;
+
status_t err =
mLastTrack->sampleTable->setTimeToSampleParams(
data_offset, chunk_data_size);
@@ -1439,12 +1508,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('c', 't', 't', 's'):
{
+ *offset += chunk_size;
+
status_t err =
mLastTrack->sampleTable->setCompositionTimeToSampleParams(
data_offset, chunk_data_size);
@@ -1453,12 +1523,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('s', 't', 's', 's'):
{
+ *offset += chunk_size;
+
status_t err =
mLastTrack->sampleTable->setSyncSampleParams(
data_offset, chunk_data_size);
@@ -1467,15 +1538,16 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- *offset += chunk_size;
break;
}
- // @xyz
- case FOURCC('\xA9', 'x', 'y', 'z'):
+ // ©xyz
+ case FOURCC(0xA9, 'x', 'y', 'z'):
{
- // Best case the total data length inside "@xyz" box
- // would be 8, for instance "@xyz" + "\x00\x04\x15\xc7" + "0+0/",
+ *offset += chunk_size;
+
+ // Best case the total data length inside "©xyz" box
+ // would be 8, for instance "©xyz" + "\x00\x04\x15\xc7" + "0+0/",
// where "\x00\x04" is the text string length with value = 4,
// "\0x15\xc7" is the language code = en, and "0+0" is a
// location (string) value with longitude = 0 and latitude = 0.
@@ -1502,12 +1574,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
buffer[location_length] = '\0';
mFileMetaData->setCString(kKeyLocation, buffer);
- *offset += chunk_size;
break;
}
case FOURCC('e', 's', 'd', 's'):
{
+ *offset += chunk_size;
+
if (chunk_data_size < 4) {
return ERROR_MALFORMED;
}
@@ -1545,12 +1618,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
}
- *offset += chunk_size;
break;
}
case FOURCC('a', 'v', 'c', 'C'):
{
+ *offset += chunk_size;
+
sp<ABuffer> buffer = new ABuffer(chunk_data_size);
if (mDataSource->readAt(
@@ -1561,12 +1635,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setData(
kKeyAVCC, kTypeAVCC, buffer->data(), chunk_data_size);
+ break;
+ }
+ case FOURCC('h', 'v', 'c', 'C'):
+ {
+ sp<ABuffer> buffer = new ABuffer(chunk_data_size);
+
+ if (mDataSource->readAt(
+ data_offset, buffer->data(), chunk_data_size) < chunk_data_size) {
+ return ERROR_IO;
+ }
+
+ mLastTrack->meta->setData(
+ kKeyHVCC, kTypeHVCC, buffer->data(), chunk_data_size);
+
*offset += chunk_size;
break;
}
case FOURCC('d', '2', '6', '3'):
{
+ *offset += chunk_size;
/*
* d263 contains a fixed 7 bytes part:
* vendor - 4 bytes
@@ -1592,7 +1681,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setData(kKeyD263, kTypeD263, buffer, chunk_data_size);
- *offset += chunk_size;
break;
}
@@ -1600,11 +1688,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
{
uint8_t buffer[4];
if (chunk_data_size < (off64_t)sizeof(buffer)) {
+ *offset += chunk_size;
return ERROR_MALFORMED;
}
if (mDataSource->readAt(
data_offset, buffer, 4) < 4) {
+ *offset += chunk_size;
return ERROR_IO;
}
@@ -1638,6 +1728,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('n', 'a', 'm', 'e'):
case FOURCC('d', 'a', 't', 'a'):
{
+ *offset += chunk_size;
+
if (mPath.size() == 6 && underMetaDataPath(mPath)) {
status_t err = parseITunesMetaData(data_offset, chunk_data_size);
@@ -1646,17 +1738,18 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
}
- *offset += chunk_size;
break;
}
case FOURCC('m', 'v', 'h', 'd'):
{
- if (chunk_data_size < 24) {
+ *offset += chunk_size;
+
+ if (chunk_data_size < 32) {
return ERROR_MALFORMED;
}
- uint8_t header[24];
+ uint8_t header[32];
if (mDataSource->readAt(
data_offset, header, sizeof(header))
< (ssize_t)sizeof(header)) {
@@ -1664,14 +1757,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
uint64_t creationTime;
+ uint64_t duration = 0;
if (header[0] == 1) {
creationTime = U64_AT(&header[4]);
mHeaderTimescale = U32_AT(&header[20]);
+ duration = U64_AT(&header[24]);
+ if (duration == 0xffffffffffffffff) {
+ duration = 0;
+ }
} else if (header[0] != 0) {
return ERROR_MALFORMED;
} else {
creationTime = U32_AT(&header[4]);
mHeaderTimescale = U32_AT(&header[12]);
+ uint32_t d32 = U32_AT(&header[16]);
+ if (d32 == 0xffffffff) {
+ d32 = 0;
+ }
+ duration = d32;
+ }
+ if (duration != 0) {
+ mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);
}
String8 s;
@@ -1679,7 +1785,50 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mFileMetaData->setCString(kKeyDate, s.string());
+ break;
+ }
+
+ case FOURCC('m', 'e', 'h', 'd'):
+ {
*offset += chunk_size;
+
+ if (chunk_data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t flags[4];
+ if (mDataSource->readAt(
+ data_offset, flags, sizeof(flags))
+ < (ssize_t)sizeof(flags)) {
+ return ERROR_IO;
+ }
+
+ uint64_t duration = 0;
+ if (flags[0] == 1) {
+ // 64 bit
+ if (chunk_data_size < 12) {
+ return ERROR_MALFORMED;
+ }
+ mDataSource->getUInt64(data_offset + 4, &duration);
+ if (duration == 0xffffffffffffffff) {
+ duration = 0;
+ }
+ } else if (flags[0] == 0) {
+ // 32 bit
+ uint32_t d32;
+ mDataSource->getUInt32(data_offset + 4, &d32);
+ if (d32 == 0xffffffff) {
+ d32 = 0;
+ }
+ duration = d32;
+ } else {
+ return ERROR_MALFORMED;
+ }
+
+ if (duration != 0) {
+ mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);
+ }
+
break;
}
@@ -1700,6 +1849,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('h', 'd', 'l', 'r'):
{
+ *offset += chunk_size;
+
uint32_t buffer;
if (mDataSource->readAt(
data_offset + 8, &buffer, 4) < 4) {
@@ -1714,7 +1865,25 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_TEXT_3GPP);
}
+ break;
+ }
+
+ case FOURCC('t', 'r', 'e', 'x'):
+ {
*offset += chunk_size;
+
+ if (chunk_data_size < 24) {
+ return ERROR_IO;
+ }
+ Trex trex;
+ if (!mDataSource->getUInt32(data_offset + 4, &trex.track_ID) ||
+ !mDataSource->getUInt32(data_offset + 8, &trex.default_sample_description_index) ||
+ !mDataSource->getUInt32(data_offset + 12, &trex.default_sample_duration) ||
+ !mDataSource->getUInt32(data_offset + 16, &trex.default_sample_size) ||
+ !mDataSource->getUInt32(data_offset + 20, &trex.default_sample_flags)) {
+ return ERROR_IO;
+ }
+ mTrex.add(trex);
break;
}
@@ -1728,7 +1897,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
size = 0;
}
- uint8_t *buffer = new uint8_t[size + chunk_size];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size + chunk_size];
+ if (buffer == NULL) {
+ return ERROR_MALFORMED;
+ }
if (size > 0) {
memcpy(buffer, data, size);
@@ -1739,6 +1911,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
delete[] buffer;
buffer = NULL;
+ // advance read pointer so we don't end up reading this again
+ *offset += chunk_size;
return ERROR_IO;
}
@@ -1753,6 +1927,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('c', 'o', 'v', 'r'):
{
+ *offset += chunk_size;
+
if (mFileMetaData != NULL) {
ALOGV("chunk_data_size = %lld and data_offset = %lld",
chunk_data_size, data_offset);
@@ -1767,7 +1943,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
buffer->data() + kSkipBytesOfDataBox, chunk_data_size - kSkipBytesOfDataBox);
}
- *offset += chunk_size;
break;
}
@@ -1778,25 +1953,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('a', 'l', 'b', 'm'):
case FOURCC('y', 'r', 'r', 'c'):
{
+ *offset += chunk_size;
+
status_t err = parse3GPPMetaData(data_offset, chunk_data_size, depth);
if (err != OK) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('I', 'D', '3', '2'):
{
+ *offset += chunk_size;
+
if (chunk_data_size < 6) {
return ERROR_MALFORMED;
}
parseID3v2MetaData(data_offset + 6);
- *offset += chunk_size;
break;
}
@@ -1888,7 +2065,7 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
offset += 16;
size -= 16;
}
- ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset);
+ ALOGV("sidx pres/off: %" PRIu64 "/%" PRIu64, earliestPresentationTime, firstOffset);
if (size < 4) {
return -EINVAL;
@@ -1920,9 +2097,10 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
ALOGW("sub-sidx boxes not supported yet");
}
bool sap = d3 & 0x80000000;
- bool saptype = d3 >> 28;
- if (!sap || saptype > 2) {
- ALOGW("not a stream access point, or unsupported type");
+ uint32_t saptype = (d3 >> 28) & 7;
+ if (!sap || (saptype != 1 && saptype != 2)) {
+ // type 1 and 2 are sync samples
+ ALOGW("not a stream access point, or unsupported type: %08x", d3);
}
total_duration += d2;
offset += 12;
@@ -1933,12 +2111,11 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
mSidxEntries.add(se);
}
- mSidxDuration = total_duration * 1000000 / timeScale;
- ALOGV("duration: %lld", mSidxDuration);
+ uint64_t sidxDuration = total_duration * 1000000 / timeScale;
int64_t metaDuration;
if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) {
- mLastTrack->meta->setInt64(kKeyDuration, mSidxDuration);
+ mLastTrack->meta->setInt64(kKeyDuration, sidxDuration);
}
return OK;
}
@@ -1969,7 +2146,7 @@ status_t MPEG4Extractor::parseTrackHeader(
return ERROR_IO;
}
- uint64_t ctime, mtime, duration;
+ uint64_t ctime __unused, mtime __unused, duration __unused;
int32_t id;
if (version == 1) {
@@ -1991,12 +2168,13 @@ status_t MPEG4Extractor::parseTrackHeader(
size_t matrixOffset = dynSize + 16;
int32_t a00 = U32_AT(&buffer[matrixOffset]);
int32_t a01 = U32_AT(&buffer[matrixOffset + 4]);
- int32_t dx = U32_AT(&buffer[matrixOffset + 8]);
int32_t a10 = U32_AT(&buffer[matrixOffset + 12]);
int32_t a11 = U32_AT(&buffer[matrixOffset + 16]);
- int32_t dy = U32_AT(&buffer[matrixOffset + 20]);
#if 0
+ int32_t dx = U32_AT(&buffer[matrixOffset + 8]);
+ int32_t dy = U32_AT(&buffer[matrixOffset + 20]);
+
ALOGI("x' = %.2f * x + %.2f * y + %.2f",
a00 / 65536.0f, a01 / 65536.0f, dx / 65536.0f);
ALOGI("y' = %.2f * x + %.2f * y + %.2f",
@@ -2039,7 +2217,10 @@ status_t MPEG4Extractor::parseITunesMetaData(off64_t offset, size_t size) {
return ERROR_MALFORMED;
}
- uint8_t *buffer = new uint8_t[size + 1];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size + 1];
+ if (buffer == NULL) {
+ return ERROR_MALFORMED;
+ }
if (mDataSource->readAt(
offset, buffer, size) != (ssize_t)size) {
delete[] buffer;
@@ -2054,7 +2235,7 @@ status_t MPEG4Extractor::parseITunesMetaData(off64_t offset, size_t size) {
char chunk[5];
MakeFourCCString(mPath[4], chunk);
ALOGV("meta: %s @ %lld", chunk, offset);
- switch (mPath[4]) {
+ switch ((int32_t)mPath[4]) {
case FOURCC(0xa9, 'a', 'l', 'b'):
{
metadataKey = kKeyAlbum;
@@ -2226,7 +2407,10 @@ status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int dept
return ERROR_MALFORMED;
}
- uint8_t *buffer = new uint8_t[size];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size];
+ if (buffer == NULL) {
+ return ERROR_MALFORMED;
+ }
if (mDataSource->readAt(
offset, buffer, size) != (ssize_t)size) {
delete[] buffer;
@@ -2405,11 +2589,24 @@ sp<MediaSource> MPEG4Extractor::getTrack(size_t index) {
return NULL;
}
- ALOGV("getTrack called, pssh: %d", mPssh.size());
- return new MPEG4Source(
+ Trex *trex = NULL;
+ int32_t trackId;
+ if (track->meta->findInt32(kKeyTrackID, &trackId)) {
+ for (size_t i = 0; i < mTrex.size(); i++) {
+ Trex *t = &mTrex.editItemAt(index);
+ if (t->track_ID == (uint32_t) trackId) {
+ trex = t;
+ break;
+ }
+ }
+ }
+
+ ALOGV("getTrack called, pssh: %zu", mPssh.size());
+
+ return new MPEG4Source(this,
track->meta, mDataSource, track->timescale, track->sampleTable,
- mSidxEntries, mMoofOffset);
+ mSidxEntries, trex, mMoofOffset);
}
// static
@@ -2425,6 +2622,11 @@ status_t MPEG4Extractor::verifyTrack(Track *track) {
|| type != kTypeAVCC) {
return ERROR_MALFORMED;
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ if (!track->meta->findData(kKeyHVCC, &type, &data, &size)
+ || type != kTypeHVCC) {
+ return ERROR_MALFORMED;
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
if (!track->meta->findData(kKeyESDS, &type, &data, &size)
@@ -2433,14 +2635,72 @@ status_t MPEG4Extractor::verifyTrack(Track *track) {
}
}
- if (!track->sampleTable->isValid()) {
+ if (track->sampleTable == NULL || !track->sampleTable->isValid()) {
// Make sure we have all the metadata we need.
+ ALOGE("stbl atom missing/invalid.");
+ return ERROR_MALFORMED;
+ }
+
+ if (track->timescale == 0) {
+ ALOGE("timescale invalid.");
return ERROR_MALFORMED;
}
return OK;
}
+typedef enum {
+ //AOT_NONE = -1,
+ //AOT_NULL_OBJECT = 0,
+ //AOT_AAC_MAIN = 1, /**< Main profile */
+ AOT_AAC_LC = 2, /**< Low Complexity object */
+ //AOT_AAC_SSR = 3,
+ //AOT_AAC_LTP = 4,
+ AOT_SBR = 5,
+ //AOT_AAC_SCAL = 6,
+ //AOT_TWIN_VQ = 7,
+ //AOT_CELP = 8,
+ //AOT_HVXC = 9,
+ //AOT_RSVD_10 = 10, /**< (reserved) */
+ //AOT_RSVD_11 = 11, /**< (reserved) */
+ //AOT_TTSI = 12, /**< TTSI Object */
+ //AOT_MAIN_SYNTH = 13, /**< Main Synthetic object */
+ //AOT_WAV_TAB_SYNTH = 14, /**< Wavetable Synthesis object */
+ //AOT_GEN_MIDI = 15, /**< General MIDI object */
+ //AOT_ALG_SYNTH_AUD_FX = 16, /**< Algorithmic Synthesis and Audio FX object */
+ AOT_ER_AAC_LC = 17, /**< Error Resilient(ER) AAC Low Complexity */
+ //AOT_RSVD_18 = 18, /**< (reserved) */
+ //AOT_ER_AAC_LTP = 19, /**< Error Resilient(ER) AAC LTP object */
+ AOT_ER_AAC_SCAL = 20, /**< Error Resilient(ER) AAC Scalable object */
+ //AOT_ER_TWIN_VQ = 21, /**< Error Resilient(ER) TwinVQ object */
+ AOT_ER_BSAC = 22, /**< Error Resilient(ER) BSAC object */
+ AOT_ER_AAC_LD = 23, /**< Error Resilient(ER) AAC LowDelay object */
+ //AOT_ER_CELP = 24, /**< Error Resilient(ER) CELP object */
+ //AOT_ER_HVXC = 25, /**< Error Resilient(ER) HVXC object */
+ //AOT_ER_HILN = 26, /**< Error Resilient(ER) HILN object */
+ //AOT_ER_PARA = 27, /**< Error Resilient(ER) Parametric object */
+ //AOT_RSVD_28 = 28, /**< might become SSC */
+ AOT_PS = 29, /**< PS, Parametric Stereo (includes SBR) */
+ //AOT_MPEGS = 30, /**< MPEG Surround */
+
+ AOT_ESCAPE = 31, /**< Signal AOT uses more than 5 bits */
+
+ //AOT_MP3ONMP4_L1 = 32, /**< MPEG-Layer1 in mp4 */
+ //AOT_MP3ONMP4_L2 = 33, /**< MPEG-Layer2 in mp4 */
+ //AOT_MP3ONMP4_L3 = 34, /**< MPEG-Layer3 in mp4 */
+ //AOT_RSVD_35 = 35, /**< might become DST */
+ //AOT_RSVD_36 = 36, /**< might become ALS */
+ //AOT_AAC_SLS = 37, /**< AAC + SLS */
+ //AOT_SLS = 38, /**< SLS */
+ //AOT_ER_AAC_ELD = 39, /**< AAC Enhanced Low Delay */
+
+ //AOT_USAC = 42, /**< USAC */
+ //AOT_SAOC = 43, /**< SAOC */
+ //AOT_LD_MPEGS = 44, /**< Low Delay MPEG Surround */
+
+ //AOT_RSVD50 = 50, /**< Interim AOT for Rsvd50 */
+} AUDIO_OBJECT_TYPE;
+
status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
const void *esds_data, size_t esds_size) {
ESDS esds(esds_data, esds_size);
@@ -2471,10 +2731,10 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
return ERROR_MALFORMED;
}
-#if 0
- printf("ESD of size %d\n", csd_size);
- hexdump(csd, csd_size);
-#endif
+ if (kUseHexDump) {
+ printf("ESD of size %d\n", csd_size);
+ hexdump(csd, csd_size);
+ }
if (csd_size == 0) {
// There's no further information, i.e. no codec specific data
@@ -2523,9 +2783,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
sampleRate = kSamplingRate[freqIndex];
}
- if (objectType == 5 || objectType == 29) { // SBR specific config per 14496-3 table 1.13
+ if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 table 1.13
uint32_t extFreqIndex = br.getBits(4);
- int32_t extSampleRate;
+ int32_t extSampleRate __unused;
if (extFreqIndex == 15) {
if (csd_size < 8) {
return ERROR_MALFORMED;
@@ -2541,6 +2801,131 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
// mLastTrack->meta->setInt32(kKeyExtSampleRate, extSampleRate);
}
+ switch (numChannels) {
+ // values defined in 14496-3_2009 amendment-4 Table 1.19 - Channel Configuration
+ case 0:
+ case 1:// FC
+ case 2:// FL FR
+ case 3:// FC, FL FR
+ case 4:// FC, FL FR, RC
+ case 5:// FC, FL FR, SL SR
+ case 6:// FC, FL FR, SL SR, LFE
+ //numChannels already contains the right value
+ break;
+ case 11:// FC, FL FR, SL SR, RC, LFE
+ numChannels = 7;
+ break;
+ case 7: // FC, FCL FCR, FL FR, SL SR, LFE
+ case 12:// FC, FL FR, SL SR, RL RR, LFE
+ case 14:// FC, FL FR, SL SR, LFE, FHL FHR
+ numChannels = 8;
+ break;
+ default:
+ return ERROR_UNSUPPORTED;
+ }
+
+ {
+ if (objectType == AOT_SBR || objectType == AOT_PS) {
+ objectType = br.getBits(5);
+
+ if (objectType == AOT_ESCAPE) {
+ objectType = 32 + br.getBits(6);
+ }
+ }
+ if (objectType == AOT_AAC_LC || objectType == AOT_ER_AAC_LC ||
+ objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL ||
+ objectType == AOT_ER_BSAC) {
+ const int32_t frameLengthFlag __unused = br.getBits(1);
+
+ const int32_t dependsOnCoreCoder = br.getBits(1);
+
+ if (dependsOnCoreCoder ) {
+ const int32_t coreCoderDelay __unused = br.getBits(14);
+ }
+
+ int32_t extensionFlag = -1;
+ if (br.numBitsLeft() > 0) {
+ extensionFlag = br.getBits(1);
+ } else {
+ switch (objectType) {
+ // 14496-3 4.5.1.1 extensionFlag
+ case AOT_AAC_LC:
+ extensionFlag = 0;
+ break;
+ case AOT_ER_AAC_LC:
+ case AOT_ER_AAC_SCAL:
+ case AOT_ER_BSAC:
+ case AOT_ER_AAC_LD:
+ extensionFlag = 1;
+ break;
+ default:
+ TRESPASS();
+ break;
+ }
+ ALOGW("csd missing extension flag; assuming %d for object type %u.",
+ extensionFlag, objectType);
+ }
+
+ if (numChannels == 0) {
+ int32_t channelsEffectiveNum = 0;
+ int32_t channelsNum = 0;
+ const int32_t ElementInstanceTag __unused = br.getBits(4);
+ const int32_t Profile __unused = br.getBits(2);
+ const int32_t SamplingFrequencyIndex __unused = br.getBits(4);
+ const int32_t NumFrontChannelElements = br.getBits(4);
+ const int32_t NumSideChannelElements = br.getBits(4);
+ const int32_t NumBackChannelElements = br.getBits(4);
+ const int32_t NumLfeChannelElements = br.getBits(2);
+ const int32_t NumAssocDataElements __unused = br.getBits(3);
+ const int32_t NumValidCcElements __unused = br.getBits(4);
+
+ const int32_t MonoMixdownPresent = br.getBits(1);
+ if (MonoMixdownPresent != 0) {
+ const int32_t MonoMixdownElementNumber __unused = br.getBits(4);
+ }
+
+ const int32_t StereoMixdownPresent = br.getBits(1);
+ if (StereoMixdownPresent != 0) {
+ const int32_t StereoMixdownElementNumber __unused = br.getBits(4);
+ }
+
+ const int32_t MatrixMixdownIndexPresent = br.getBits(1);
+ if (MatrixMixdownIndexPresent != 0) {
+ const int32_t MatrixMixdownIndex __unused = br.getBits(2);
+ const int32_t PseudoSurroundEnable __unused = br.getBits(1);
+ }
+
+ int i;
+ for (i=0; i < NumFrontChannelElements; i++) {
+ const int32_t FrontElementIsCpe = br.getBits(1);
+ const int32_t FrontElementTagSelect __unused = br.getBits(4);
+ channelsNum += FrontElementIsCpe ? 2 : 1;
+ }
+
+ for (i=0; i < NumSideChannelElements; i++) {
+ const int32_t SideElementIsCpe = br.getBits(1);
+ const int32_t SideElementTagSelect __unused = br.getBits(4);
+ channelsNum += SideElementIsCpe ? 2 : 1;
+ }
+
+ for (i=0; i < NumBackChannelElements; i++) {
+ const int32_t BackElementIsCpe = br.getBits(1);
+ const int32_t BackElementTagSelect __unused = br.getBits(4);
+ channelsNum += BackElementIsCpe ? 2 : 1;
+ }
+ channelsEffectiveNum = channelsNum;
+
+ for (i=0; i < NumLfeChannelElements; i++) {
+ const int32_t LfeElementTagSelect __unused = br.getBits(4);
+ channelsNum += 1;
+ }
+ ALOGV("mpeg4 audio channelsNum = %d", channelsNum);
+ ALOGV("mpeg4 audio channelsEffectiveNum = %d", channelsEffectiveNum);
+ numChannels = channelsNum;
+ }
+ }
+ }
+
if (numChannels == 0) {
return ERROR_UNSUPPORTED;
}
@@ -2571,19 +2956,23 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
////////////////////////////////////////////////////////////////////////////////
MPEG4Source::MPEG4Source(
+ const sp<MPEG4Extractor> &owner,
const sp<MetaData> &format,
const sp<DataSource> &dataSource,
int32_t timeScale,
const sp<SampleTable> &sampleTable,
Vector<SidxEntry> &sidx,
+ const Trex *trex,
off64_t firstMoofOffset)
- : mFormat(format),
+ : mOwner(owner),
+ mFormat(format),
mDataSource(dataSource),
mTimescale(timeScale),
mSampleTable(sampleTable),
mCurrentSampleIndex(0),
mCurrentFragmentIndex(0),
mSegments(sidx),
+ mTrex(trex),
mFirstMoofOffset(firstMoofOffset),
mCurrentMoofOffset(firstMoofOffset),
mCurrentTime(0),
@@ -2592,6 +2981,7 @@ MPEG4Source::MPEG4Source(
mCurrentSampleInfoOffsetsAllocSize(0),
mCurrentSampleInfoOffsets(NULL),
mIsAVC(false),
+ mIsHEVC(false),
mNALLengthSize(0),
mStarted(false),
mGroup(NULL),
@@ -2599,6 +2989,8 @@ MPEG4Source::MPEG4Source(
mWantsNALFragments(false),
mSrcBuffer(NULL) {
+ memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
+
mFormat->findInt32(kKeyCryptoMode, &mCryptoMode);
mDefaultIVSize = 0;
mFormat->findInt32(kKeyCryptoDefaultIVSize, &mDefaultIVSize);
@@ -2616,6 +3008,7 @@ MPEG4Source::MPEG4Source(
CHECK(success);
mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+ mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
if (mIsAVC) {
uint32_t type;
@@ -2630,6 +3023,18 @@ MPEG4Source::MPEG4Source(
// The number of bytes used to encode the length of a NAL unit.
mNALLengthSize = 1 + (ptr[4] & 3);
+ } else if (mIsHEVC) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ CHECK(format->findData(kKeyHVCC, &type, &data, &size));
+
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ CHECK(size >= 7);
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+
+ mNALLengthSize = 1 + (ptr[14 + 7] & 3);
}
CHECK(format->findInt32(kKeyTrackID, &mTrackId));
@@ -2668,7 +3073,11 @@ status_t MPEG4Source::start(MetaData *params) {
mGroup->add_buffer(new MediaBuffer(max_size));
- mSrcBuffer = new uint8_t[max_size];
+ mSrcBuffer = new (std::nothrow) uint8_t[max_size];
+ if (mSrcBuffer == NULL) {
+ // file probably specified a bad max size
+ return ERROR_MALFORMED;
+ }
mStarted = true;
@@ -2741,9 +3150,20 @@ status_t MPEG4Source::parseChunk(off64_t *offset) {
}
}
if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
- // *offset points to the mdat box following this moof
- parseChunk(offset); // doesn't actually parse it, just updates offset
- mNextMoofOffset = *offset;
+ // *offset points to the box following this moof. Find the next moof from there.
+
+ while (true) {
+ if (mDataSource->readAt(*offset, hdr, 8) < 8) {
+ return ERROR_END_OF_STREAM;
+ }
+ chunk_size = ntohl(hdr[0]);
+ chunk_type = ntohl(hdr[1]);
+ if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ mNextMoofOffset = *offset;
+ break;
+ }
+ *offset += chunk_size;
+ }
}
break;
}
@@ -2802,7 +3222,8 @@ status_t MPEG4Source::parseChunk(off64_t *offset) {
return OK;
}
-status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size) {
+status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(
+ off64_t offset, off64_t /* size */) {
ALOGV("parseSampleAuxiliaryInformationSizes");
// 14496-12 8.7.12
uint8_t version;
@@ -2864,7 +3285,8 @@ status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(off64_t offset, off64
return OK;
}
-status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size) {
+status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(
+ off64_t offset, off64_t /* size */) {
ALOGV("parseSampleAuxiliaryInformationOffsets");
// 14496-12 8.7.13
uint8_t version;
@@ -3140,8 +3562,8 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
} else if (mTrackFragmentHeaderInfo.mFlags
& TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
- } else {
- sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
+ } else if (mTrex) {
+ sampleDuration = mTrex->default_sample_duration;
}
if (flags & kSampleSizePresent) {
@@ -3168,7 +3590,7 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
sampleCtsOffset = 0;
}
- if (size < sampleCount * bytesPerSample) {
+ if (size < (off64_t)sampleCount * bytesPerSample) {
return -EINVAL;
}
@@ -3202,7 +3624,7 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
offset += 4;
}
- ALOGV("adding sample %d at offset 0x%08llx, size %u, duration %u, "
+ ALOGV("adding sample %d at offset 0x%08" PRIx64 ", size %u, duration %u, "
" flags 0x%08x", i + 1,
dataOffset, sampleSize, sampleDuration,
(flags & kFirstSampleFlagsPresent) && i == 0
@@ -3210,6 +3632,7 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
tmp.offset = dataOffset;
tmp.size = sampleSize;
tmp.duration = sampleDuration;
+ tmp.compositionOffset = sampleCtsOffset;
mCurrentSamples.add(tmp);
dataOffset += sampleSize;
@@ -3281,7 +3704,7 @@ status_t MPEG4Source::read(
uint32_t sampleIndex;
status_t err = mSampleTable->findSampleAtTime(
- seekTimeUs * mTimescale / 1000000,
+ seekTimeUs, 1000000, mTimescale,
&sampleIndex, findFlags);
if (mode == ReadOptions::SEEK_CLOSEST) {
@@ -3343,7 +3766,7 @@ status_t MPEG4Source::read(
off64_t offset;
size_t size;
- uint32_t cts;
+ uint32_t cts, stts;
bool isSyncSample;
bool newBuffer = false;
if (mBuffer == NULL) {
@@ -3351,7 +3774,7 @@ status_t MPEG4Source::read(
status_t err =
mSampleTable->getMetaDataForSample(
- mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample);
+ mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
if (err != OK) {
return err;
@@ -3365,7 +3788,7 @@ status_t MPEG4Source::read(
}
}
- if (!mIsAVC || mWantsNALFragments) {
+ if ((!mIsAVC && !mIsHEVC) || mWantsNALFragments) {
if (newBuffer) {
ssize_t num_bytes_read =
mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);
@@ -3382,6 +3805,8 @@ status_t MPEG4Source::read(
mBuffer->meta_data()->clear();
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)stts * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3395,7 +3820,7 @@ status_t MPEG4Source::read(
++mCurrentSampleIndex;
}
- if (!mIsAVC) {
+ if (!mIsAVC && !mIsHEVC) {
*out = mBuffer;
mBuffer = NULL;
@@ -3504,6 +3929,8 @@ status_t MPEG4Source::read(
mBuffer->meta_data()->clear();
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)stts * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3546,7 +3973,7 @@ status_t MPEG4Source::fragmentedRead(
const SidxEntry *se = &mSegments[i];
if (totalTime + se->mDurationUs > seekTimeUs) {
// The requested time is somewhere in this segment
- if ((mode == ReadOptions::SEEK_NEXT_SYNC) ||
+ if ((mode == ReadOptions::SEEK_NEXT_SYNC && seekTimeUs > totalTime) ||
(mode == ReadOptions::SEEK_CLOSEST_SYNC &&
(seekTimeUs - totalTime) > (totalTime + se->mDurationUs - seekTimeUs))) {
// requested next sync, or closest sync and it was closer to the end of
@@ -3559,11 +3986,19 @@ status_t MPEG4Source::fragmentedRead(
totalTime += se->mDurationUs;
totalOffset += se->mSize;
}
- mCurrentMoofOffset = totalOffset;
- mCurrentSamples.clear();
- mCurrentSampleIndex = 0;
- parseChunk(&totalOffset);
- mCurrentTime = totalTime * mTimescale / 1000000ll;
+ mCurrentMoofOffset = totalOffset;
+ mCurrentSamples.clear();
+ mCurrentSampleIndex = 0;
+ parseChunk(&totalOffset);
+ mCurrentTime = totalTime * mTimescale / 1000000ll;
+ } else {
+ // without sidx boxes, we can only seek to 0
+ mCurrentMoofOffset = mFirstMoofOffset;
+ mCurrentSamples.clear();
+ mCurrentSampleIndex = 0;
+ off64_t tmp = mCurrentMoofOffset;
+ parseChunk(&tmp);
+ mCurrentTime = 0;
}
if (mBuffer != NULL) {
@@ -3575,7 +4010,7 @@ status_t MPEG4Source::fragmentedRead(
}
off64_t offset = 0;
- size_t size;
+ size_t size = 0;
uint32_t cts = 0;
bool isSyncSample = false;
bool newBuffer = false;
@@ -3583,22 +4018,24 @@ status_t MPEG4Source::fragmentedRead(
newBuffer = true;
if (mCurrentSampleIndex >= mCurrentSamples.size()) {
- // move to next fragment
- Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1];
- off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size;
+ // move to next fragment if there is one
+ if (mNextMoofOffset <= mCurrentMoofOffset) {
+ return ERROR_END_OF_STREAM;
+ }
+ off64_t nextMoof = mNextMoofOffset;
mCurrentMoofOffset = nextMoof;
mCurrentSamples.clear();
mCurrentSampleIndex = 0;
parseChunk(&nextMoof);
- if (mCurrentSampleIndex >= mCurrentSamples.size()) {
- return ERROR_END_OF_STREAM;
- }
+ if (mCurrentSampleIndex >= mCurrentSamples.size()) {
+ return ERROR_END_OF_STREAM;
+ }
}
const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
offset = smpl->offset;
size = smpl->size;
- cts = mCurrentTime;
+ cts = mCurrentTime + smpl->compositionOffset;
mCurrentTime += smpl->duration;
isSyncSample = (mCurrentSampleIndex == 0); // XXX
@@ -3626,7 +4063,7 @@ status_t MPEG4Source::fragmentedRead(
bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16);
}
- if (!mIsAVC || mWantsNALFragments) {
+ if ((!mIsAVC && !mIsHEVC)|| mWantsNALFragments) {
if (newBuffer) {
ssize_t num_bytes_read =
mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);
@@ -3643,6 +4080,8 @@ status_t MPEG4Source::fragmentedRead(
mBuffer->set_range(0, size);
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)smpl->duration * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3656,7 +4095,7 @@ status_t MPEG4Source::fragmentedRead(
++mCurrentSampleIndex;
}
- if (!mIsAVC) {
+ if (!mIsAVC && !mIsHEVC) {
*out = mBuffer;
mBuffer = NULL;
@@ -3766,6 +4205,8 @@ status_t MPEG4Source::fragmentedRead(
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)smpl->duration * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3828,6 +4269,8 @@ static bool isCompatibleBrand(uint32_t fourcc) {
FOURCC('i', 's', 'o', 'm'),
FOURCC('i', 's', 'o', '2'),
FOURCC('a', 'v', 'c', '1'),
+ FOURCC('h', 'v', 'c', '1'),
+ FOURCC('h', 'e', 'v', '1'),
FOURCC('3', 'g', 'p', '4'),
FOURCC('m', 'p', '4', '1'),
FOURCC('m', 'p', '4', '2'),
@@ -3899,7 +4342,7 @@ static bool BetterSniffMPEG4(
char chunkstring[5];
MakeFourCCString(chunkType, chunkstring);
- ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset);
+ ALOGV("saw chunk type %s, size %" PRIu64 " @ %lld", chunkstring, chunkSize, offset);
switch (chunkType) {
case FOURCC('f', 't', 'y', 'p'):
{
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index d53fdab..844a019 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -16,13 +16,17 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MPEG4Writer"
-#include <inttypes.h>
-#include <utils/Log.h>
#include <arpa/inet.h>
-
+#include <fcntl.h>
+#include <inttypes.h>
#include <pthread.h>
#include <sys/prctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <utils/Log.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MPEG4Writer.h>
@@ -34,17 +38,26 @@
#include <media/stagefright/Utils.h>
#include <media/mediarecorder.h>
#include <cutils/properties.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <unistd.h>
#include "include/ESDS.h"
+
+#ifndef __predict_false
+#define __predict_false(exp) __builtin_expect((exp) != 0, 0)
+#endif
+
+#define WARN_UNLESS(condition, message, ...) \
+( (__predict_false(condition)) ? false : ({ \
+ ALOGW("Condition %s failed " message, #condition, ##__VA_ARGS__); \
+ true; \
+}))
+
namespace android {
static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
-static const int64_t kMax32BitFileSize = 0x007fffffffLL;
+static const int64_t kMax32BitFileSize = 0x00ffffffffLL; // 2^32-1 : max FAT32
+ // filesystem file size
+ // used by most SD cards
static const uint8_t kNalUnitTypeSeqParamSet = 0x07;
static const uint8_t kNalUnitTypePicParamSet = 0x08;
static const int64_t kInitialDelayTimeUs = 700000LL;
@@ -407,7 +420,7 @@ status_t MPEG4Writer::dump(
}
status_t MPEG4Writer::Track::dump(
- int fd, const Vector<String16>& args) const {
+ int fd, const Vector<String16>& /* args */) const {
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
@@ -433,7 +446,7 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {
// At most 2 tracks can be supported.
if (mTracks.size() >= 2) {
- ALOGE("Too many tracks (%d) to add", mTracks.size());
+ ALOGE("Too many tracks (%zu) to add", mTracks.size());
return ERROR_UNSUPPORTED;
}
@@ -547,8 +560,8 @@ int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
size = MAX_MOOV_BOX_SIZE;
}
- ALOGI("limits: %lld/%lld bytes/us, bit rate: %d bps and the estimated"
- " moov size %lld bytes",
+ ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the"
+ " estimated moov size %" PRId64 " bytes",
mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
return factor * size;
}
@@ -584,8 +597,8 @@ status_t MPEG4Writer::start(MetaData *param) {
// If file size is set to be larger than the 32 bit file
// size limit, treat it as an error.
if (mMaxFileSizeLimitBytes > kMax32BitFileSize) {
- ALOGW("32-bit file size limit (%lld bytes) too big. "
- "It is changed to %lld bytes",
+ ALOGW("32-bit file size limit (%" PRId64 " bytes) too big. "
+ "It is changed to %" PRId64 " bytes",
mMaxFileSizeLimitBytes, kMax32BitFileSize);
mMaxFileSizeLimitBytes = kMax32BitFileSize;
}
@@ -848,7 +861,7 @@ status_t MPEG4Writer::reset() {
}
if (mTracks.size() > 1) {
- ALOGD("Duration from tracks range is [%lld, %lld] us",
+ ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us",
minDurationUs, maxDurationUs);
}
@@ -863,11 +876,11 @@ status_t MPEG4Writer::reset() {
// Fix up the size of the 'mdat' chunk.
if (mUse32BitOffset) {
lseek64(mFd, mMdatOffset, SEEK_SET);
- int32_t size = htonl(static_cast<int32_t>(mOffset - mMdatOffset));
+ uint32_t size = htonl(static_cast<uint32_t>(mOffset - mMdatOffset));
::write(mFd, &size, 4);
} else {
lseek64(mFd, mMdatOffset + 8, SEEK_SET);
- int64_t size = mOffset - mMdatOffset;
+ uint64_t size = mOffset - mMdatOffset;
size = hton64(size);
::write(mFd, &size, 8);
}
@@ -975,13 +988,16 @@ void MPEG4Writer::writeFtypBox(MetaData *param) {
if (param && param->findInt32(kKeyFileType, &fileType) &&
fileType != OUTPUT_FORMAT_MPEG_4) {
writeFourcc("3gp4");
+ writeInt32(0);
+ writeFourcc("isom");
+ writeFourcc("3gp4");
} else {
+ writeFourcc("mp42");
+ writeInt32(0);
writeFourcc("isom");
+ writeFourcc("mp42");
}
- writeInt32(0);
- writeFourcc("isom");
- writeFourcc("3gp4");
endBox();
}
@@ -1312,12 +1328,12 @@ bool MPEG4Writer::reachedEOS() {
}
void MPEG4Writer::setStartTimestampUs(int64_t timeUs) {
- ALOGI("setStartTimestampUs: %lld", timeUs);
+ ALOGI("setStartTimestampUs: %" PRId64, timeUs);
CHECK_GE(timeUs, 0ll);
Mutex::Autolock autoLock(mLock);
if (mStartTimestampUs < 0 || mStartTimestampUs > timeUs) {
mStartTimestampUs = timeUs;
- ALOGI("Earliest track starting time: %lld", mStartTimestampUs);
+ ALOGI("Earliest track starting time: %" PRId64, mStartTimestampUs);
}
}
@@ -1518,7 +1534,7 @@ void MPEG4Writer::Track::initTrackingProgressStatus(MetaData *params) {
{
int64_t timeUs;
if (params && params->findInt64(kKeyTrackTimeStatus, &timeUs)) {
- ALOGV("Receive request to track progress status for every %lld us", timeUs);
+ ALOGV("Receive request to track progress status for every %" PRId64 " us", timeUs);
mTrackEveryTimeDurationUs = timeUs;
mTrackingProgressStatus = true;
}
@@ -1552,7 +1568,7 @@ void MPEG4Writer::bufferChunk(const Chunk& chunk) {
}
void MPEG4Writer::writeChunkToFile(Chunk* chunk) {
- ALOGV("writeChunkToFile: %lld from %s track",
+ ALOGV("writeChunkToFile: %" PRId64 " from %s track",
chunk->mTimeStampUs, chunk->mTrack->isAudio()? "audio": "video");
int32_t isFirstSample = true;
@@ -1728,7 +1744,7 @@ status_t MPEG4Writer::Track::start(MetaData *params) {
startTimeOffsetUs = kInitialDelayTimeUs;
}
startTimeUs += startTimeOffsetUs;
- ALOGI("Start time offset: %lld us", startTimeOffsetUs);
+ ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
}
meta->setInt64(kKeyTime, startTimeUs);
@@ -1763,7 +1779,7 @@ status_t MPEG4Writer::Track::pause() {
}
status_t MPEG4Writer::Track::stop() {
- ALOGD("Stopping %s track", mIsAudio? "Audio": "Video");
+ ALOGD("%s track stopping", mIsAudio? "Audio": "Video");
if (!mStarted) {
ALOGE("Stop() called but track is not started");
return ERROR_END_OF_STREAM;
@@ -1774,19 +1790,14 @@ status_t MPEG4Writer::Track::stop() {
}
mDone = true;
+ ALOGD("%s track source stopping", mIsAudio? "Audio": "Video");
+ mSource->stop();
+ ALOGD("%s track source stopped", mIsAudio? "Audio": "Video");
+
void *dummy;
pthread_join(mThread, &dummy);
-
status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
- ALOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
- {
- status_t status = mSource->stop();
- if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
- err = status;
- }
- }
-
ALOGD("%s track stopped", mIsAudio? "Audio": "Video");
return err;
}
@@ -1813,7 +1824,7 @@ static void getNalUnitType(uint8_t byte, uint8_t* type) {
static const uint8_t *findNextStartCode(
const uint8_t *data, size_t length) {
- ALOGV("findNextStartCode: %p %d", data, length);
+ ALOGV("findNextStartCode: %p %zu", data, length);
size_t bytesLeft = length;
while (bytesLeft > 4 &&
@@ -2100,6 +2111,7 @@ status_t MPEG4Writer::Track::threadEntry() {
status_t err = OK;
MediaBuffer *buffer;
+ const char *trackName = mIsAudio ? "Audio" : "Video";
while (!mDone && (err = mSource->read(&buffer)) == OK) {
if (buffer->range_length() == 0) {
buffer->release();
@@ -2195,15 +2207,27 @@ status_t MPEG4Writer::Track::threadEntry() {
if (mResumed) {
int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
- CHECK_GE(durExcludingEarlierPausesUs, 0ll);
+ if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
- CHECK_GE(pausedDurationUs, lastDurationUs);
+ if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
previousPausedDurationUs += pausedDurationUs - lastDurationUs;
mResumed = false;
}
timestampUs -= previousPausedDurationUs;
- CHECK_GE(timestampUs, 0ll);
+ if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
if (!mIsAudio) {
/*
* Composition time: timestampUs
@@ -2215,15 +2239,23 @@ status_t MPEG4Writer::Track::threadEntry() {
decodingTimeUs -= previousPausedDurationUs;
cttsOffsetTimeUs =
timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs;
- CHECK_GE(cttsOffsetTimeUs, 0ll);
+ if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
timestampUs = decodingTimeUs;
- ALOGV("decoding time: %lld and ctts offset time: %lld",
+ ALOGV("decoding time: %" PRId64 " and ctts offset time: %" PRId64,
timestampUs, cttsOffsetTimeUs);
// Update ctts box table if necessary
currCttsOffsetTimeTicks =
(cttsOffsetTimeUs * mTimeScale + 500000LL) / 1000000LL;
- CHECK_LE(currCttsOffsetTimeTicks, 0x0FFFFFFFFLL);
+ if (WARN_UNLESS(currCttsOffsetTimeTicks <= 0x0FFFFFFFFLL, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
if (mStszTableEntries->count() == 0) {
// Force the first ctts table entry to have one single entry
// so that we can do adjustment for the initial track start
@@ -2261,9 +2293,13 @@ status_t MPEG4Writer::Track::threadEntry() {
}
}
- CHECK_GE(timestampUs, 0ll);
- ALOGV("%s media time stamp: %lld and previous paused duration %lld",
- mIsAudio? "Audio": "Video", timestampUs, previousPausedDurationUs);
+ if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
+ ALOGV("%s media time stamp: %" PRId64 " and previous paused duration %" PRId64,
+ trackName, timestampUs, previousPausedDurationUs);
if (timestampUs > mTrackDurationUs) {
mTrackDurationUs = timestampUs;
}
@@ -2277,11 +2313,28 @@ status_t MPEG4Writer::Track::threadEntry() {
((timestampUs * mTimeScale + 500000LL) / 1000000LL -
(lastTimestampUs * mTimeScale + 500000LL) / 1000000LL);
if (currDurationTicks < 0ll) {
- ALOGE("timestampUs %lld < lastTimestampUs %lld for %s track",
- timestampUs, lastTimestampUs, mIsAudio? "Audio": "Video");
+ ALOGE("timestampUs %" PRId64 " < lastTimestampUs %" PRId64 " for %s track",
+ timestampUs, lastTimestampUs, trackName);
+ copy->release();
return UNKNOWN_ERROR;
}
+ // if the duration is different for this sample, see if it is close enough to the previous
+ // duration that we can fudge it and use the same value, to avoid filling the stts table
+ // with lots of near-identical entries.
+ // "close enough" here means that the current duration needs to be adjusted by less
+ // than 0.1 milliseconds
+ if (lastDurationTicks && (currDurationTicks != lastDurationTicks)) {
+ int64_t deltaUs = ((lastDurationTicks - currDurationTicks) * 1000000LL
+ + (mTimeScale / 2)) / mTimeScale;
+ if (deltaUs > -100 && deltaUs < 100) {
+ // use previous ticks, and adjust timestamp as if it was actually that number
+ // of ticks
+ currDurationTicks = lastDurationTicks;
+ timestampUs += deltaUs;
+ }
+ }
+
mStszTableEntries->add(htonl(sampleSize));
if (mStszTableEntries->count() > 2) {
@@ -2301,8 +2354,8 @@ status_t MPEG4Writer::Track::threadEntry() {
}
previousSampleSize = sampleSize;
}
- ALOGV("%s timestampUs/lastTimestampUs: %lld/%lld",
- mIsAudio? "Audio": "Video", timestampUs, lastTimestampUs);
+ ALOGV("%s timestampUs/lastTimestampUs: %" PRId64 "/%" PRId64,
+ trackName, timestampUs, lastTimestampUs);
lastDurationUs = timestampUs - lastTimestampUs;
lastDurationTicks = currDurationTicks;
lastTimestampUs = timestampUs;
@@ -2407,9 +2460,9 @@ status_t MPEG4Writer::Track::threadEntry() {
sendTrackSummary(hasMultipleTracks);
ALOGI("Received total/0-length (%d/%d) buffers and encoded %d frames. - %s",
- count, nZeroLengthFrames, mStszTableEntries->count(), mIsAudio? "audio": "video");
+ count, nZeroLengthFrames, mStszTableEntries->count(), trackName);
if (mIsAudio) {
- ALOGI("Audio track drift time: %lld us", mOwner->getDriftTimeUs());
+ ALOGI("Audio track drift time: %" PRId64 " us", mOwner->getDriftTimeUs());
}
if (err == ERROR_END_OF_STREAM) {
@@ -2492,11 +2545,11 @@ void MPEG4Writer::Track::sendTrackSummary(bool hasMultipleTracks) {
}
void MPEG4Writer::Track::trackProgressStatus(int64_t timeUs, status_t err) {
- ALOGV("trackProgressStatus: %lld us", timeUs);
+ ALOGV("trackProgressStatus: %" PRId64 " us", timeUs);
if (mTrackEveryTimeDurationUs > 0 &&
timeUs - mPreviousTrackTimeUs >= mTrackEveryTimeDurationUs) {
- ALOGV("Fire time tracking progress status at %lld us", timeUs);
+ ALOGV("Fire time tracking progress status at %" PRId64 " us", timeUs);
mOwner->trackProgressStatus(mTrackId, timeUs - mPreviousTrackTimeUs, err);
mPreviousTrackTimeUs = timeUs;
}
@@ -2530,13 +2583,13 @@ void MPEG4Writer::trackProgressStatus(
}
void MPEG4Writer::setDriftTimeUs(int64_t driftTimeUs) {
- ALOGV("setDriftTimeUs: %lld us", driftTimeUs);
+ ALOGV("setDriftTimeUs: %" PRId64 " us", driftTimeUs);
Mutex::Autolock autolock(mLock);
mDriftTimeUs = driftTimeUs;
}
int64_t MPEG4Writer::getDriftTimeUs() {
- ALOGV("getDriftTimeUs: %lld us", mDriftTimeUs);
+ ALOGV("getDriftTimeUs: %" PRId64 " us", mDriftTimeUs);
Mutex::Autolock autolock(mLock);
return mDriftTimeUs;
}
@@ -2749,8 +2802,10 @@ void MPEG4Writer::Track::writeMp4aEsdsBox() {
mOwner->writeInt16(0x03); // XXX
mOwner->writeInt8(0x00); // buffer size 24-bit
- mOwner->writeInt32(96000); // max bit rate
- mOwner->writeInt32(96000); // avg bit rate
+ int32_t bitRate;
+ bool success = mMeta->findInt32(kKeyBitRate, &bitRate);
+ mOwner->writeInt32(success ? bitRate : 96000); // max bit rate
+ mOwner->writeInt32(success ? bitRate : 96000); // avg bit rate
mOwner->writeInt8(0x05); // DecoderSpecificInfoTag
mOwner->writeInt8(mCodecSpecificDataSize);
@@ -2992,7 +3047,7 @@ void MPEG4Writer::Track::writeCttsBox() {
return;
}
- ALOGV("ctts box has %d entries with range [%lld, %lld]",
+ ALOGV("ctts box has %d entries with range [%" PRId64 ", %" PRId64 "]",
mCttsTableEntries->count(), mMinCttsOffsetTimeUs, mMaxCttsOffsetTimeUs);
mOwner->beginBox("ctts");
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
index 2484212..d680e0c 100644
--- a/media/libstagefright/MediaAdapter.cpp
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -36,7 +36,7 @@ MediaAdapter::~MediaAdapter() {
CHECK(mCurrentMediaBuffer == NULL);
}
-status_t MediaAdapter::start(MetaData *params) {
+status_t MediaAdapter::start(MetaData * /* params */) {
Mutex::Autolock autoLock(mAdapterLock);
if (!mStarted) {
mStarted = true;
@@ -75,7 +75,7 @@ void MediaAdapter::signalBufferReturned(MediaBuffer *buffer) {
}
status_t MediaAdapter::read(
- MediaBuffer **buffer, const ReadOptions *options) {
+ MediaBuffer **buffer, const ReadOptions * /* options */) {
Mutex::Autolock autoLock(mAdapterLock);
if (!mStarted) {
ALOGV("Read before even started!");
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index 11b80bf..1f80a47 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -27,7 +27,6 @@
#include <media/stagefright/MetaData.h>
#include <ui/GraphicBuffer.h>
-#include <sys/atomics.h>
namespace android {
@@ -92,7 +91,7 @@ void MediaBuffer::release() {
return;
}
- int prevCount = __atomic_dec(&mRefCount);
+ int prevCount = __sync_fetch_and_sub(&mRefCount, 1);
if (prevCount == 1) {
if (mObserver == NULL) {
delete this;
@@ -112,7 +111,7 @@ void MediaBuffer::claim() {
}
void MediaBuffer::add_ref() {
- (void) __atomic_inc(&mRefCount);
+ (void) __sync_fetch_and_add(&mRefCount, 1);
}
void *MediaBuffer::data() const {
@@ -135,7 +134,7 @@ size_t MediaBuffer::range_length() const {
void MediaBuffer::set_range(size_t offset, size_t length) {
if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
- ALOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
+ ALOGE("offset = %zu, length = %zu, mSize = %zu", offset, length, mSize);
}
CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
diff --git a/media/libstagefright/MediaBufferGroup.cpp b/media/libstagefright/MediaBufferGroup.cpp
index 80aae51..6ac6d4a 100644
--- a/media/libstagefright/MediaBufferGroup.cpp
+++ b/media/libstagefright/MediaBufferGroup.cpp
@@ -55,7 +55,8 @@ void MediaBufferGroup::add_buffer(MediaBuffer *buffer) {
mLastBuffer = buffer;
}
-status_t MediaBufferGroup::acquire_buffer(MediaBuffer **out) {
+status_t MediaBufferGroup::acquire_buffer(
+ MediaBuffer **out, bool nonBlocking) {
Mutex::Autolock autoLock(mLock);
for (;;) {
@@ -70,6 +71,11 @@ status_t MediaBufferGroup::acquire_buffer(MediaBuffer **out) {
}
}
+ if (nonBlocking) {
+ *out = NULL;
+ return WOULD_BLOCK;
+ }
+
// All buffers are in use. Block until one of them is returned to us.
mCondition.wait(mLock);
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index c4c47b3..6ca123a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -16,12 +16,13 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaCodec"
-#include <utils/Log.h>
-
-#include <media/stagefright/MediaCodec.h>
+#include <inttypes.h>
+#include "include/avc_utils.h"
#include "include/SoftwareRenderer.h"
+#include <binder/IBatteryStats.h>
+#include <binder/IServiceManager.h>
#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -31,45 +32,119 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/ACodec.h>
#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/NativeWindowWrapper.h>
-
-#include "include/avc_utils.h"
+#include <private/android_filesystem_config.h>
+#include <utils/Log.h>
+#include <utils/Singleton.h>
namespace android {
+struct MediaCodec::BatteryNotifier : public Singleton<BatteryNotifier> {
+ BatteryNotifier();
+
+ void noteStartVideo();
+ void noteStopVideo();
+ void noteStartAudio();
+ void noteStopAudio();
+
+private:
+ int32_t mVideoRefCount;
+ int32_t mAudioRefCount;
+ sp<IBatteryStats> mBatteryStatService;
+};
+
+ANDROID_SINGLETON_STATIC_INSTANCE(MediaCodec::BatteryNotifier)
+
+MediaCodec::BatteryNotifier::BatteryNotifier() :
+ mVideoRefCount(0),
+ mAudioRefCount(0) {
+ // get battery service
+ const sp<IServiceManager> sm(defaultServiceManager());
+ if (sm != NULL) {
+ const String16 name("batterystats");
+ mBatteryStatService = interface_cast<IBatteryStats>(sm->getService(name));
+ if (mBatteryStatService == NULL) {
+ ALOGE("batterystats service unavailable!");
+ }
+ }
+}
+
+void MediaCodec::BatteryNotifier::noteStartVideo() {
+ if (mVideoRefCount == 0 && mBatteryStatService != NULL) {
+ mBatteryStatService->noteStartVideo(AID_MEDIA);
+ }
+ mVideoRefCount++;
+}
+
+void MediaCodec::BatteryNotifier::noteStopVideo() {
+ if (mVideoRefCount == 0) {
+ ALOGW("BatteryNotifier::noteStop(): video refcount is broken!");
+ return;
+ }
+
+ mVideoRefCount--;
+ if (mVideoRefCount == 0 && mBatteryStatService != NULL) {
+ mBatteryStatService->noteStopVideo(AID_MEDIA);
+ }
+}
+
+void MediaCodec::BatteryNotifier::noteStartAudio() {
+ if (mAudioRefCount == 0 && mBatteryStatService != NULL) {
+ mBatteryStatService->noteStartAudio(AID_MEDIA);
+ }
+ mAudioRefCount++;
+}
+
+void MediaCodec::BatteryNotifier::noteStopAudio() {
+ if (mAudioRefCount == 0) {
+ ALOGW("BatteryNotifier::noteStop(): audio refcount is broken!");
+ return;
+ }
+
+ mAudioRefCount--;
+ if (mAudioRefCount == 0 && mBatteryStatService != NULL) {
+ mBatteryStatService->noteStopAudio(AID_MEDIA);
+ }
+}
// static
sp<MediaCodec> MediaCodec::CreateByType(
- const sp<ALooper> &looper, const char *mime, bool encoder) {
+ const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err) {
sp<MediaCodec> codec = new MediaCodec(looper);
- if (codec->init(mime, true /* nameIsType */, encoder) != OK) {
- return NULL;
- }
- return codec;
+ const status_t ret = codec->init(mime, true /* nameIsType */, encoder);
+ if (err != NULL) {
+ *err = ret;
+ }
+ return ret == OK ? codec : NULL; // NULL deallocates codec.
}
// static
sp<MediaCodec> MediaCodec::CreateByComponentName(
- const sp<ALooper> &looper, const char *name) {
+ const sp<ALooper> &looper, const char *name, status_t *err) {
sp<MediaCodec> codec = new MediaCodec(looper);
- if (codec->init(name, false /* nameIsType */, false /* encoder */) != OK) {
- return NULL;
- }
- return codec;
+ const status_t ret = codec->init(name, false /* nameIsType */, false /* encoder */);
+ if (err != NULL) {
+ *err = ret;
+ }
+ return ret == OK ? codec : NULL; // NULL deallocates codec.
}
MediaCodec::MediaCodec(const sp<ALooper> &looper)
: mState(UNINITIALIZED),
mLooper(looper),
- mCodec(new ACodec),
+ mCodec(NULL),
mReplyID(0),
mFlags(0),
+ mStickyError(OK),
mSoftRenderer(NULL),
+ mBatteryStatNotified(false),
+ mIsVideo(false),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
@@ -97,29 +172,42 @@ status_t MediaCodec::PostAndAwaitResponse(
return err;
}
-status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) {
+// static
+void MediaCodec::PostReplyWithError(int32_t replyID, int32_t err) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+}
+
+status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
+ // save init parameters for reset
+ mInitName = name;
+ mInitNameIsType = nameIsType;
+ mInitIsEncoder = encoder;
+
// Current video decoders do not return from OMX_FillThisBuffer
// quickly, violating the OpenMAX specs, until that is remedied
// we need to invest in an extra looper to free the main event
// queue.
+ mCodec = new ACodec;
bool needDedicatedLooper = false;
- if (nameIsType && !strncasecmp(name, "video/", 6)) {
+ if (nameIsType && !strncasecmp(name.c_str(), "video/", 6)) {
needDedicatedLooper = true;
} else {
AString tmp = name;
if (tmp.endsWith(".secure")) {
tmp.erase(tmp.size() - 7, 7);
}
- const MediaCodecList *mcl = MediaCodecList::getInstance();
+ const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
ssize_t codecIdx = mcl->findCodecByName(tmp.c_str());
if (codecIdx >= 0) {
- Vector<AString> types;
- if (mcl->getSupportedTypes(codecIdx, &types) == OK) {
- for (int i = 0; i < types.size(); i++) {
- if (types[i].startsWith("video/")) {
- needDedicatedLooper = true;
- break;
- }
+ const sp<MediaCodecInfo> info = mcl->getCodecInfo(codecIdx);
+ Vector<AString> mimes;
+ info->getSupportedMimes(&mimes);
+ for (size_t i = 0; i < mimes.size(); i++) {
+ if (mimes[i].startsWith("video/")) {
+ needDedicatedLooper = true;
+ break;
}
}
}
@@ -153,6 +241,14 @@ status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) {
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
+ sp<AMessage> msg = new AMessage(kWhatSetCallback, id());
+ msg->setMessage("callback", callback);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::configure(
const sp<AMessage> &format,
const sp<Surface> &nativeWindow,
@@ -174,7 +270,20 @@ status_t MediaCodec::configure(
}
sp<AMessage> response;
- return PostAndAwaitResponse(msg, &response);
+ status_t err = PostAndAwaitResponse(msg, &response);
+
+ if (err != OK && err != INVALID_OPERATION) {
+ // MediaCodec now set state to UNINITIALIZED upon any fatal error.
+ // To maintain backward-compatibility, do a reset() to put codec
+ // back into INITIALIZED state.
+ // But don't reset if the err is INVALID_OPERATION, which means
+ // the configure failure is due to wrong state.
+
+ ALOGE("configure failed with err 0x%08x, resetting...", err);
+ reset();
+ }
+
+ return err;
}
status_t MediaCodec::createInputSurface(
@@ -218,6 +327,41 @@ status_t MediaCodec::release() {
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::reset() {
+ /* When external-facing MediaCodec object is created,
+ it is already initialized. Thus, reset is essentially
+ release() followed by init(), plus clearing the state */
+
+ status_t err = release();
+
+ // unregister handlers
+ if (mCodec != NULL) {
+ if (mCodecLooper != NULL) {
+ mCodecLooper->unregisterHandler(mCodec->id());
+ } else {
+ mLooper->unregisterHandler(mCodec->id());
+ }
+ mCodec = NULL;
+ }
+ mLooper->unregisterHandler(id());
+
+ mFlags = 0; // clear all flags
+ mStickyError = OK;
+
+ // reset state not reset by setState(UNINITIALIZED)
+ mReplyID = 0;
+ mDequeueInputReplyID = 0;
+ mDequeueOutputReplyID = 0;
+ mDequeueInputTimeoutGeneration = 0;
+ mDequeueOutputTimeoutGeneration = 0;
+ mHaveInputSurface = false;
+
+ if (err == OK) {
+ err = init(mInitName, mInitNameIsType, mInitIsEncoder);
+ }
+ return err;
+}
+
status_t MediaCodec::queueInputBuffer(
size_t index,
size_t offset,
@@ -323,6 +467,16 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ msg->setSize("index", index);
+ msg->setInt32("render", true);
+ msg->setInt64("timestampNs", timestampNs);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::releaseOutputBuffer(size_t index) {
sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
msg->setSize("index", index);
@@ -352,6 +506,20 @@ status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
return OK;
}
+status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
+ sp<AMessage> msg = new AMessage(kWhatGetInputFormat, id());
+
+ sp<AMessage> response;
+ status_t err;
+ if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+ return err;
+ }
+
+ CHECK(response->findMessage("format", format));
+
+ return OK;
+}
+
status_t MediaCodec::getName(AString *name) const {
sp<AMessage> msg = new AMessage(kWhatGetName, id());
@@ -384,6 +552,55 @@ status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const {
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::getOutputBuffer(size_t index, sp<ABuffer> *buffer) {
+ sp<AMessage> format;
+ return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
+}
+
+status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
+ sp<ABuffer> buffer;
+ return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
+}
+
+status_t MediaCodec::getInputBuffer(size_t index, sp<ABuffer> *buffer) {
+ sp<AMessage> format;
+ return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
+}
+
+bool MediaCodec::isExecuting() const {
+ return mState == STARTED || mState == FLUSHED;
+}
+
+status_t MediaCodec::getBufferAndFormat(
+ size_t portIndex, size_t index,
+ sp<ABuffer> *buffer, sp<AMessage> *format) {
+ // use mutex instead of a context switch
+
+ buffer->clear();
+ format->clear();
+ if (!isExecuting()) {
+ return INVALID_OPERATION;
+ }
+
+ // we do not want mPortBuffers to change during this section
+ // we also don't want mOwnedByClient to change during this
+ Mutex::Autolock al(mBufferLock);
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+ if (index < buffers->size()) {
+ const BufferInfo &info = buffers->itemAt(index);
+ if (info.mOwnedByClient) {
+ // by the time buffers array is initialized, crypto is set
+ if (portIndex == kPortIndexInput && mCrypto != NULL) {
+ *buffer = info.mEncryptedData;
+ } else {
+ *buffer = info.mData;
+ }
+ *format = info.mFormat;
+ }
+ }
+ return OK;
+}
+
status_t MediaCodec::flush() {
sp<AMessage> msg = new AMessage(kWhatFlush, id());
@@ -407,9 +624,7 @@ void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
void MediaCodec::cancelPendingDequeueOperations() {
if (mFlags & kFlagDequeueInputPending) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
- response->postReply(mDequeueInputReplyID);
+ PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
++mDequeueInputTimeoutGeneration;
mDequeueInputReplyID = 0;
@@ -417,9 +632,7 @@ void MediaCodec::cancelPendingDequeueOperations() {
}
if (mFlags & kFlagDequeueOutputPending) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
- response->postReply(mDequeueOutputReplyID);
+ PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
++mDequeueOutputTimeoutGeneration;
mDequeueOutputReplyID = 0;
@@ -428,14 +641,12 @@ void MediaCodec::cancelPendingDequeueOperations() {
}
bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
- if (mState != STARTED
- || (mFlags & kFlagStickyError)
+ if (!isExecuting() || (mFlags & kFlagIsAsync)
|| (newRequest && (mFlags & kFlagDequeueInputPending))) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
-
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ return true;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
return true;
}
@@ -456,10 +667,11 @@ bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {
bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) {
sp<AMessage> response = new AMessage;
- if (mState != STARTED
- || (mFlags & kFlagStickyError)
+ if (!isExecuting() || (mFlags & kFlagIsAsync)
|| (newRequest && (mFlags & kFlagDequeueOutputPending))) {
response->setInt32("err", INVALID_OPERATION);
+ } else if (mFlags & kFlagStickyError) {
+ response->setInt32("err", getStickyError());
} else if (mFlags & kFlagOutputBuffersChanged) {
response->setInt32("err", INFO_OUTPUT_BUFFERS_CHANGED);
mFlags &= ~kFlagOutputBuffersChanged;
@@ -516,22 +728,20 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->findInt32("what", &what));
switch (what) {
- case ACodec::kWhatError:
+ case CodecBase::kWhatError:
{
- int32_t omxError, internalError;
- CHECK(msg->findInt32("omx-error", &omxError));
- CHECK(msg->findInt32("err", &internalError));
+ int32_t err, actionCode;
+ CHECK(msg->findInt32("err", &err));
+ CHECK(msg->findInt32("actionCode", &actionCode));
- ALOGE("Codec reported an error. "
- "(omx error 0x%08x, internalError %d)",
- omxError, internalError);
-
- if (omxError == OMX_ErrorResourcesLost
- && internalError == DEAD_OBJECT) {
+ ALOGE("Codec reported err %#x, actionCode %d, while in state %d",
+ err, actionCode, mState);
+ if (err == DEAD_OBJECT) {
mFlags |= kFlagSawMediaServerDie;
+ mFlags &= ~kFlagIsComponentAllocated;
}
- bool sendErrorReponse = true;
+ bool sendErrorResponse = true;
switch (mState) {
case INITIALIZING:
@@ -542,13 +752,15 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case CONFIGURING:
{
- setState(INITIALIZED);
+ setState(actionCode == ACTION_CODE_FATAL ?
+ UNINITIALIZED : INITIALIZED);
break;
}
case STARTING:
{
- setState(CONFIGURED);
+ setState(actionCode == ACTION_CODE_FATAL ?
+ UNINITIALIZED : CONFIGURED);
break;
}
@@ -558,7 +770,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// Ignore the error, assuming we'll still get
// the shutdown complete notification.
- sendErrorReponse = false;
+ sendErrorResponse = false;
if (mFlags & kFlagSawMediaServerDie) {
// MediaServer died, there definitely won't
@@ -569,7 +781,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// STOPPING->UNINITIALIZED, instead of the
// usual STOPPING->INITIALIZED state.
setState(UNINITIALIZED);
-
+ if (mState == RELEASING) {
+ mComponentName.clear();
+ }
(new AMessage)->postReply(mReplyID);
}
break;
@@ -577,51 +791,87 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
case FLUSHING:
{
- setState(STARTED);
+ if (actionCode == ACTION_CODE_FATAL) {
+ setState(UNINITIALIZED);
+ } else {
+ setState(
+ (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
+ }
break;
}
+ case FLUSHED:
case STARTED:
{
- sendErrorReponse = false;
+ sendErrorResponse = false;
- mFlags |= kFlagStickyError;
+ setStickyError(err);
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
+
+ if (mFlags & kFlagIsAsync) {
+ onError(err, actionCode);
+ }
+ switch (actionCode) {
+ case ACTION_CODE_TRANSIENT:
+ break;
+ case ACTION_CODE_RECOVERABLE:
+ setState(INITIALIZED);
+ break;
+ default:
+ setState(UNINITIALIZED);
+ break;
+ }
break;
}
default:
{
- sendErrorReponse = false;
+ sendErrorResponse = false;
- mFlags |= kFlagStickyError;
+ setStickyError(err);
postActivityNotificationIfPossible();
+
+ // actionCode in an uninitialized state is always fatal.
+ if (mState == UNINITIALIZED) {
+ actionCode = ACTION_CODE_FATAL;
+ }
+ if (mFlags & kFlagIsAsync) {
+ onError(err, actionCode);
+ }
+ switch (actionCode) {
+ case ACTION_CODE_TRANSIENT:
+ break;
+ case ACTION_CODE_RECOVERABLE:
+ setState(INITIALIZED);
+ break;
+ default:
+ setState(UNINITIALIZED);
+ break;
+ }
break;
}
}
- if (sendErrorReponse) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", UNKNOWN_ERROR);
-
- response->postReply(mReplyID);
+ if (sendErrorResponse) {
+ PostReplyWithError(mReplyID, err);
}
break;
}
- case ACodec::kWhatComponentAllocated:
+ case CodecBase::kWhatComponentAllocated:
{
CHECK_EQ(mState, INITIALIZING);
setState(INITIALIZED);
+ mFlags |= kFlagIsComponentAllocated;
CHECK(msg->findString("componentName", &mComponentName));
if (mComponentName.startsWith("OMX.google.")) {
- mFlags |= kFlagIsSoftwareCodec;
+ mFlags |= kFlagUsesSoftwareRenderer;
} else {
- mFlags &= ~kFlagIsSoftwareCodec;
+ mFlags &= ~kFlagUsesSoftwareRenderer;
}
if (mComponentName.endsWith(".secure")) {
@@ -634,21 +884,29 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case ACodec::kWhatComponentConfigured:
+ case CodecBase::kWhatComponentConfigured:
{
CHECK_EQ(mState, CONFIGURING);
- setState(CONFIGURED);
// reset input surface flag
mHaveInputSurface = false;
+ CHECK(msg->findMessage("input-format", &mInputFormat));
+ CHECK(msg->findMessage("output-format", &mOutputFormat));
+
+ int32_t usingSwRenderer;
+ if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
+ && usingSwRenderer) {
+ mFlags |= kFlagUsesSoftwareRenderer;
+ }
+ setState(CONFIGURED);
(new AMessage)->postReply(mReplyID);
break;
}
- case ACodec::kWhatInputSurfaceCreated:
+ case CodecBase::kWhatInputSurfaceCreated:
{
- // response to ACodec::kWhatCreateInputSurface
+ // response to initiateCreateInputSurface()
status_t err = NO_ERROR;
sp<AMessage> response = new AMessage();
if (!msg->findInt32("err", &err)) {
@@ -664,9 +922,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case ACodec::kWhatSignaledInputEOS:
+ case CodecBase::kWhatSignaledInputEOS:
{
- // response to ACodec::kWhatSignalEndOfInputStream
+ // response to signalEndOfInputStream()
sp<AMessage> response = new AMessage();
status_t err;
if (msg->findInt32("err", &err)) {
@@ -677,8 +935,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
- case ACodec::kWhatBuffersAllocated:
+ case CodecBase::kWhatBuffersAllocated:
{
+ Mutex::Autolock al(mBufferLock);
int32_t portIndex;
CHECK(msg->findInt32("portIndex", &portIndex));
@@ -695,8 +954,8 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
sp<RefBase> obj;
CHECK(msg->findObject("portDesc", &obj));
- sp<ACodec::PortDescription> portDesc =
- static_cast<ACodec::PortDescription *>(obj.get());
+ sp<CodecBase::PortDescription> portDesc =
+ static_cast<CodecBase::PortDescription *>(obj.get());
size_t numBuffers = portDesc->countBuffers();
@@ -729,40 +988,18 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case ACodec::kWhatOutputFormatChanged:
+ case CodecBase::kWhatOutputFormatChanged:
{
ALOGV("codec output format changed");
- if ((mFlags & kFlagIsSoftwareCodec)
- && mNativeWindow != NULL) {
+ if (mSoftRenderer == NULL &&
+ mNativeWindow != NULL &&
+ (mFlags & kFlagUsesSoftwareRenderer)) {
AString mime;
CHECK(msg->findString("mime", &mime));
- if (!strncasecmp("video/", mime.c_str(), 6)) {
- delete mSoftRenderer;
- mSoftRenderer = NULL;
-
- int32_t width, height;
- CHECK(msg->findInt32("width", &width));
- CHECK(msg->findInt32("height", &height));
-
- int32_t cropLeft, cropTop, cropRight, cropBottom;
- CHECK(msg->findRect("crop",
- &cropLeft, &cropTop, &cropRight, &cropBottom));
-
- int32_t colorFormat;
- CHECK(msg->findInt32(
- "color-format", &colorFormat));
-
- sp<MetaData> meta = new MetaData;
- meta->setInt32(kKeyWidth, width);
- meta->setInt32(kKeyHeight, height);
- meta->setRect(kKeyCropRect,
- cropLeft, cropTop, cropRight, cropBottom);
- meta->setInt32(kKeyColorFormat, colorFormat);
-
- mSoftRenderer =
- new SoftwareRenderer(mNativeWindow, meta);
+ if (mime.startsWithIgnoreCase("video/")) {
+ mSoftRenderer = new SoftwareRenderer(mNativeWindow);
}
}
@@ -773,14 +1010,28 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// collect codec specific data and amend the output
// format as necessary.
mFlags |= kFlagGatherCodecSpecificData;
+ } else if (mFlags & kFlagIsAsync) {
+ onOutputFormatChanged();
} else {
mFlags |= kFlagOutputFormatChanged;
postActivityNotificationIfPossible();
}
+
+ // Notify mCrypto of video resolution changes
+ if (mCrypto != NULL) {
+ int32_t left, top, right, bottom, width, height;
+ if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
+ } else if (mOutputFormat->findInt32("width", &width)
+ && mOutputFormat->findInt32("height", &height)) {
+ mCrypto->notifyResolution(width, height);
+ }
+ }
+
break;
}
- case ACodec::kWhatFillThisBuffer:
+ case CodecBase::kWhatFillThisBuffer:
{
/* size_t index = */updateBuffers(kPortIndexInput, msg);
@@ -807,7 +1058,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
ALOGE("queueCSDInputBuffer failed w/ error %d",
err);
- mFlags |= kFlagStickyError;
+ setStickyError(err);
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
@@ -815,7 +1066,11 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- if (mFlags & kFlagDequeueInputPending) {
+ if (mFlags & kFlagIsAsync) {
+ if (!mHaveInputSurface) {
+ onInputBufferAvailable();
+ }
+ } else if (mFlags & kFlagDequeueInputPending) {
CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
++mDequeueInputTimeoutGeneration;
@@ -827,7 +1082,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case ACodec::kWhatDrainThisBuffer:
+ case CodecBase::kWhatDrainThisBuffer:
{
/* size_t index = */updateBuffers(kPortIndexOutput, msg);
@@ -862,10 +1117,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
mFlags &= ~kFlagGatherCodecSpecificData;
- mFlags |= kFlagOutputFormatChanged;
+ if (mFlags & kFlagIsAsync) {
+ onOutputFormatChanged();
+ } else {
+ mFlags |= kFlagOutputFormatChanged;
+ }
}
- if (mFlags & kFlagDequeueOutputPending) {
+ if (mFlags & kFlagIsAsync) {
+ onOutputBufferAvailable();
+ } else if (mFlags & kFlagDequeueOutputPending) {
CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
++mDequeueOutputTimeoutGeneration;
@@ -878,32 +1139,42 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case ACodec::kWhatEOS:
+ case CodecBase::kWhatEOS:
{
// We already notify the client of this by using the
// corresponding flag in "onOutputBufferReady".
break;
}
- case ACodec::kWhatShutdownCompleted:
+ case CodecBase::kWhatShutdownCompleted:
{
if (mState == STOPPING) {
setState(INITIALIZED);
} else {
CHECK_EQ(mState, RELEASING);
setState(UNINITIALIZED);
+ mComponentName.clear();
}
+ mFlags &= ~kFlagIsComponentAllocated;
(new AMessage)->postReply(mReplyID);
break;
}
- case ACodec::kWhatFlushCompleted:
+ case CodecBase::kWhatFlushCompleted:
{
- CHECK_EQ(mState, FLUSHING);
- setState(STARTED);
+ if (mState != FLUSHING) {
+ ALOGW("received FlushCompleted message in state %d",
+ mState);
+ break;
+ }
- mCodec->signalResume();
+ if (mFlags & kFlagIsAsync) {
+ setState(FLUSHED);
+ } else {
+ setState(STARTED);
+ mCodec->signalResume();
+ }
(new AMessage)->postReply(mReplyID);
break;
@@ -921,10 +1192,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState != UNINITIALIZED) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -954,16 +1222,45 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetCallback:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState == UNINITIALIZED
+ || mState == INITIALIZING
+ || isExecuting()) {
+ // callback can't be set after codec is executing,
+ // or before it's initialized (as the callback
+ // will be cleared when it goes to INITIALIZED)
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
+ sp<AMessage> callback;
+ CHECK(msg->findMessage("callback", &callback));
+
+ mCallback = callback;
+
+ if (mCallback != NULL) {
+ ALOGI("MediaCodec will operate in async mode");
+ mFlags |= kFlagIsAsync;
+ } else {
+ mFlags &= ~kFlagIsAsync;
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
case kWhatConfigure:
{
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState != INITIALIZED) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -983,10 +1280,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
->getSurfaceTextureClient());
if (err != OK) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
-
- response->postReply(replyID);
+ PostReplyWithError(replyID, err);
break;
}
} else {
@@ -1024,10 +1318,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// Must be configured, but can't have been started yet.
if (mState != CONFIGURED) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -1041,11 +1332,13 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != CONFIGURED) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if (mState == FLUSHED) {
+ setState(STARTED);
+ mCodec->signalResume();
+ PostReplyWithError(replyID, OK);
+ break;
+ } else if (mState != CONFIGURED) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -1065,20 +1358,24 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != INITIALIZED
- && mState != CONFIGURED && mState != STARTED) {
- // We may be in "UNINITIALIZED" state already without the
+ if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
+ && mState != INITIALIZED
+ && mState != CONFIGURED && !isExecuting()) {
+ // 1) Permit release to shut down the component if allocated.
+ //
+ // 2) We may be in "UNINITIALIZED" state already and
+ // also shutdown the encoder/decoder without the
// client being aware of this if media server died while
// we were being stopped. The client would assume that
// after stop() returned, it would be safe to call release()
// and it should be in this case, no harm to allow a release()
// if we're already uninitialized.
- // Similarly stopping a stopped MediaCodec should be benign.
sp<AMessage> response = new AMessage;
- response->setInt32(
- "err",
- mState == targetState ? OK : INVALID_OPERATION);
-
+ status_t err = mState == targetState ? OK : INVALID_OPERATION;
+ response->setInt32("err", err);
+ if (err == OK && targetState == UNINITIALIZED) {
+ mComponentName.clear();
+ }
response->postReply(replyID);
break;
}
@@ -1087,6 +1384,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// It's dead, Jim. Don't expect initiateShutdown to yield
// any useful results now...
setState(UNINITIALIZED);
+ if (targetState == UNINITIALIZED) {
+ mComponentName.clear();
+ }
(new AMessage)->postReply(replyID);
break;
}
@@ -1106,11 +1406,15 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ if (mFlags & kFlagIsAsync) {
+ ALOGE("dequeueOutputBuffer can't be used in async mode");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
if (mHaveInputSurface) {
ALOGE("dequeueInputBuffer can't be used with input surface");
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
- response->postReply(replyID);
+ PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -1122,9 +1426,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->findInt64("timeoutUs", &timeoutUs));
if (timeoutUs == 0ll) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", -EAGAIN);
- response->postReply(replyID);
+ PostReplyWithError(replyID, -EAGAIN);
break;
}
@@ -1153,9 +1455,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(mFlags & kFlagDequeueInputPending);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", -EAGAIN);
- response->postReply(mDequeueInputReplyID);
+ PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
mFlags &= ~kFlagDequeueInputPending;
mDequeueInputReplyID = 0;
@@ -1167,19 +1467,17 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != STARTED || (mFlags & kFlagStickyError)) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if (!isExecuting()) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
break;
}
status_t err = onQueueInputBuffer(msg);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
- response->postReply(replyID);
+ PostReplyWithError(replyID, err);
break;
}
@@ -1188,6 +1486,12 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ if (mFlags & kFlagIsAsync) {
+ ALOGE("dequeueOutputBuffer can't be used in async mode");
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ }
+
if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
break;
}
@@ -1196,9 +1500,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->findInt64("timeoutUs", &timeoutUs));
if (timeoutUs == 0ll) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", -EAGAIN);
- response->postReply(replyID);
+ PostReplyWithError(replyID, -EAGAIN);
break;
}
@@ -1227,9 +1529,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(mFlags & kFlagDequeueOutputPending);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", -EAGAIN);
- response->postReply(mDequeueOutputReplyID);
+ PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
mFlags &= ~kFlagDequeueOutputPending;
mDequeueOutputReplyID = 0;
@@ -1241,19 +1541,17 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != STARTED || (mFlags & kFlagStickyError)) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if (!isExecuting()) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
break;
}
status_t err = onReleaseOutputBuffer(msg);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
- response->postReply(replyID);
+ PostReplyWithError(replyID, err);
break;
}
@@ -1262,11 +1560,11 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != STARTED || (mFlags & kFlagStickyError)) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if (!isExecuting()) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
break;
}
@@ -1280,11 +1578,11 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != STARTED || (mFlags & kFlagStickyError)) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if (!isExecuting() || (mFlags & kFlagIsAsync)) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
break;
}
@@ -1314,15 +1612,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != STARTED || (mFlags & kFlagStickyError)) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if (!isExecuting()) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
break;
}
mReplyID = replyID;
+ // TODO: skip flushing if already FLUSHED
setState(FLUSHING);
mCodec->signalFlush();
@@ -1330,23 +1629,28 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatGetInputFormat:
case kWhatGetOutputFormat:
{
+ sp<AMessage> format =
+ (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
+
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if ((mState != STARTED && mState != FLUSHING)
- || (mFlags & kFlagStickyError)
- || mOutputFormat == NULL) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ if ((mState != CONFIGURED && mState != STARTING &&
+ mState != STARTED && mState != FLUSHING &&
+ mState != FLUSHED)
+ || format == NULL) {
+ PostReplyWithError(replyID, INVALID_OPERATION);
+ break;
+ } else if (mFlags & kFlagStickyError) {
+ PostReplyWithError(replyID, getStickyError());
break;
}
sp<AMessage> response = new AMessage;
- response->setMessage("format", mOutputFormat);
+ response->setMessage("format", format);
response->postReply(replyID);
break;
}
@@ -1372,10 +1676,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->senderAwaitsResponse(&replyID));
if (mComponentName.empty()) {
- sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
-
- response->postReply(replyID);
+ PostReplyWithError(replyID, INVALID_OPERATION);
break;
}
@@ -1395,10 +1696,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
status_t err = onSetParameters(params);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
-
- response->postReply(replyID);
+ PostReplyWithError(replyID, err);
break;
}
@@ -1413,7 +1711,7 @@ void MediaCodec::extractCSD(const sp<AMessage> &format) {
size_t i = 0;
for (;;) {
sp<ABuffer> csd;
- if (!format->findBuffer(StringPrintf("csd-%u", i).c_str(), &csd)) {
+ if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
break;
}
@@ -1421,14 +1719,14 @@ void MediaCodec::extractCSD(const sp<AMessage> &format) {
++i;
}
- ALOGV("Found %u pieces of codec specific data.", mCSD.size());
+ ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
}
status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
CHECK(!mCSD.empty());
- BufferInfo *info =
- &mPortBuffers[kPortIndexInput].editItemAt(bufferIndex);
+ const BufferInfo *info =
+ &mPortBuffers[kPortIndexInput].itemAt(bufferIndex);
sp<ABuffer> csd = *mCSD.begin();
mCSD.erase(mCSD.begin());
@@ -1463,18 +1761,23 @@ void MediaCodec::setState(State newState) {
mCrypto.clear();
setNativeWindow(NULL);
+ mInputFormat.clear();
mOutputFormat.clear();
mFlags &= ~kFlagOutputFormatChanged;
mFlags &= ~kFlagOutputBuffersChanged;
mFlags &= ~kFlagStickyError;
mFlags &= ~kFlagIsEncoder;
mFlags &= ~kFlagGatherCodecSpecificData;
+ mFlags &= ~kFlagIsAsync;
+ mStickyError = OK;
mActivityNotify.clear();
+ mCallback.clear();
}
if (newState == UNINITIALIZED) {
- mComponentName.clear();
+ // return any straggling buffers, e.g. if we got here on an error
+ returnBuffersToCodec();
// The component is gone, mediaserver's probably back up already
// but should definitely be back up should we try to instantiate
@@ -1485,6 +1788,8 @@ void MediaCodec::setState(State newState) {
mState = newState;
cancelPendingDequeueOperations();
+
+ updateBatteryStat();
}
void MediaCodec::returnBuffersToCodec() {
@@ -1494,6 +1799,7 @@ void MediaCodec::returnBuffersToCodec() {
void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+ Mutex::Autolock al(mBufferLock);
Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
@@ -1520,8 +1826,8 @@ size_t MediaCodec::updateBuffers(
int32_t portIndex, const sp<AMessage> &msg) {
CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
- void *bufferID;
- CHECK(msg->findPointer("buffer-id", &bufferID));
+ uint32_t bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
@@ -1532,6 +1838,8 @@ size_t MediaCodec::updateBuffers(
CHECK(info->mNotify == NULL);
CHECK(msg->findMessage("reply", &info->mNotify));
+ info->mFormat =
+ (portIndex == kPortIndexInput) ? mInputFormat : mOutputFormat;
mAvailPortBuffers[portIndex].push_back(i);
return i;
@@ -1648,11 +1956,15 @@ status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
info->mData->setRange(0, result);
}
+ // synchronization boundary for getBufferAndFormat
+ {
+ Mutex::Autolock al(mBufferLock);
+ info->mOwnedByClient = false;
+ }
reply->setBuffer("buffer", info->mData);
reply->post();
info->mNotify = NULL;
- info->mOwnedByClient = false;
return OK;
}
@@ -1666,7 +1978,7 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
render = 0;
}
- if (mState != STARTED) {
+ if (!isExecuting()) {
return -EINVAL;
}
@@ -1680,18 +1992,40 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
return -EACCES;
}
+ // synchronization boundary for getBufferAndFormat
+ {
+ Mutex::Autolock al(mBufferLock);
+ info->mOwnedByClient = false;
+ }
+
if (render && info->mData != NULL && info->mData->size() != 0) {
info->mNotify->setInt32("render", true);
+ int64_t timestampNs = 0;
+ if (msg->findInt64("timestampNs", &timestampNs)) {
+ info->mNotify->setInt64("timestampNs", timestampNs);
+ } else {
+ // TODO: it seems like we should use the timestamp
+ // in the (media)buffer as it potentially came from
+ // an input surface, but we did not propagate it prior to
+ // API 20. Perhaps check for target SDK version.
+#if 0
+ if (info->mData->meta()->findInt64("timeUs", &timestampNs)) {
+ ALOGV("using buffer PTS of %" PRId64, timestampNs);
+ timestampNs *= 1000;
+ }
+#endif
+ }
+
if (mSoftRenderer != NULL) {
mSoftRenderer->render(
- info->mData->data(), info->mData->size(), NULL);
+ info->mData->data(), info->mData->size(),
+ timestampNs, NULL, info->mFormat);
}
}
info->mNotify->post();
info->mNotify = NULL;
- info->mOwnedByClient = false;
return OK;
}
@@ -1710,7 +2044,22 @@ ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
BufferInfo *info = &mPortBuffers[portIndex].editItemAt(index);
CHECK(!info->mOwnedByClient);
- info->mOwnedByClient = true;
+ {
+ Mutex::Autolock al(mBufferLock);
+ info->mOwnedByClient = true;
+
+ // set image-data
+ if (info->mFormat != NULL) {
+ sp<ABuffer> imageData;
+ if (info->mFormat->findBuffer("image-data", &imageData)) {
+ info->mData->meta()->setBuffer("image-data", imageData);
+ }
+ int32_t left, top, right, bottom;
+ if (info->mFormat->findRect("crop", &left, &top, &right, &bottom)) {
+ info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
+ }
+ }
+ }
return index;
}
@@ -1748,16 +2097,100 @@ status_t MediaCodec::setNativeWindow(
return OK;
}
+void MediaCodec::onInputBufferAvailable() {
+ int32_t index;
+ while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
+ sp<AMessage> msg = mCallback->dup();
+ msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
+ msg->setInt32("index", index);
+ msg->post();
+ }
+}
+
+void MediaCodec::onOutputBufferAvailable() {
+ int32_t index;
+ while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
+ const sp<ABuffer> &buffer =
+ mPortBuffers[kPortIndexOutput].itemAt(index).mData;
+ sp<AMessage> msg = mCallback->dup();
+ msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
+ msg->setInt32("index", index);
+ msg->setSize("offset", buffer->offset());
+ msg->setSize("size", buffer->size());
+
+ int64_t timeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ msg->setInt64("timeUs", timeUs);
+
+ int32_t omxFlags;
+ CHECK(buffer->meta()->findInt32("omxFlags", &omxFlags));
+
+ uint32_t flags = 0;
+ if (omxFlags & OMX_BUFFERFLAG_SYNCFRAME) {
+ flags |= BUFFER_FLAG_SYNCFRAME;
+ }
+ if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ flags |= BUFFER_FLAG_CODECCONFIG;
+ }
+ if (omxFlags & OMX_BUFFERFLAG_EOS) {
+ flags |= BUFFER_FLAG_EOS;
+ }
+
+ msg->setInt32("flags", flags);
+
+ msg->post();
+ }
+}
+
+void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
+ if (mCallback != NULL) {
+ sp<AMessage> msg = mCallback->dup();
+ msg->setInt32("callbackID", CB_ERROR);
+ msg->setInt32("err", err);
+ msg->setInt32("actionCode", actionCode);
+
+ if (detail != NULL) {
+ msg->setString("detail", detail);
+ }
+
+ msg->post();
+ }
+}
+
+void MediaCodec::onOutputFormatChanged() {
+ if (mCallback != NULL) {
+ sp<AMessage> msg = mCallback->dup();
+ msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
+ msg->setMessage("format", mOutputFormat);
+ msg->post();
+ }
+}
+
+
void MediaCodec::postActivityNotificationIfPossible() {
if (mActivityNotify == NULL) {
return;
}
- if ((mFlags & (kFlagStickyError
+ bool isErrorOrOutputChanged =
+ (mFlags & (kFlagStickyError
| kFlagOutputBuffersChanged
- | kFlagOutputFormatChanged))
+ | kFlagOutputFormatChanged));
+
+ if (isErrorOrOutputChanged
|| !mAvailPortBuffers[kPortIndexInput].empty()
|| !mAvailPortBuffers[kPortIndexOutput].empty()) {
+ mActivityNotify->setInt32("input-buffers",
+ mAvailPortBuffers[kPortIndexInput].size());
+
+ if (isErrorOrOutputChanged) {
+ // we want consumer to dequeue as many times as it can
+ mActivityNotify->setInt32("output-buffers", INT32_MAX);
+ } else {
+ mActivityNotify->setInt32("output-buffers",
+ mAvailPortBuffers[kPortIndexOutput].size());
+ }
mActivityNotify->post();
mActivityNotify.clear();
}
@@ -1801,7 +2234,7 @@ status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
memcpy(csd->data() + 4, nalStart, nalSize);
mOutputFormat->setBuffer(
- StringPrintf("csd-%u", csdIndex).c_str(), csd);
+ AStringPrintf("csd-%u", csdIndex).c_str(), csd);
++csdIndex;
}
@@ -1818,4 +2251,34 @@ status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
return OK;
}
+void MediaCodec::updateBatteryStat() {
+ if (mState == CONFIGURED && !mBatteryStatNotified) {
+ AString mime;
+ CHECK(mOutputFormat != NULL &&
+ mOutputFormat->findString("mime", &mime));
+
+ mIsVideo = mime.startsWithIgnoreCase("video/");
+
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+
+ if (mIsVideo) {
+ notifier.noteStartVideo();
+ } else {
+ notifier.noteStartAudio();
+ }
+
+ mBatteryStatNotified = true;
+ } else if (mState == UNINITIALIZED && mBatteryStatNotified) {
+ BatteryNotifier& notifier(BatteryNotifier::getInstance());
+
+ if (mIsVideo) {
+ notifier.noteStopVideo();
+ } else {
+ notifier.noteStopAudio();
+ }
+
+ mBatteryStatNotified = false;
+ }
+}
+
} // namespace android
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 6248e90..cf6e937 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -18,12 +18,19 @@
#define LOG_TAG "MediaCodecList"
#include <utils/Log.h>
-#include <media/stagefright/MediaCodecList.h>
+#include <binder/IServiceManager.h>
+
+#include <media/IMediaCodecList.h>
+#include <media/IMediaPlayerService.h>
+#include <media/MediaCodecInfo.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
+
#include <utils/threads.h>
#include <libexpat/expat.h>
@@ -32,38 +39,139 @@ namespace android {
static Mutex sInitMutex;
+static MediaCodecList *gCodecList = NULL;
+
// static
-MediaCodecList *MediaCodecList::sCodecList;
+sp<IMediaCodecList> MediaCodecList::sCodecList;
// static
-const MediaCodecList *MediaCodecList::getInstance() {
+sp<IMediaCodecList> MediaCodecList::getLocalInstance() {
Mutex::Autolock autoLock(sInitMutex);
- if (sCodecList == NULL) {
- sCodecList = new MediaCodecList;
+ if (gCodecList == NULL) {
+ gCodecList = new MediaCodecList;
+ if (gCodecList->initCheck() == OK) {
+ sCodecList = gCodecList;
+ }
}
- return sCodecList->initCheck() == OK ? sCodecList : NULL;
+ return sCodecList;
+}
+
+static Mutex sRemoteInitMutex;
+
+sp<IMediaCodecList> MediaCodecList::sRemoteList;
+
+sp<MediaCodecList::BinderDeathObserver> MediaCodecList::sBinderDeathObserver;
+
+void MediaCodecList::BinderDeathObserver::binderDied(const wp<IBinder> &who __unused) {
+ Mutex::Autolock _l(sRemoteInitMutex);
+ sRemoteList.clear();
+ sBinderDeathObserver.clear();
+}
+
+// static
+sp<IMediaCodecList> MediaCodecList::getInstance() {
+ Mutex::Autolock _l(sRemoteInitMutex);
+ if (sRemoteList == NULL) {
+ sp<IBinder> binder =
+ defaultServiceManager()->getService(String16("media.player"));
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+ if (service.get() != NULL) {
+ sRemoteList = service->getCodecList();
+ if (sRemoteList != NULL) {
+ sBinderDeathObserver = new BinderDeathObserver();
+ binder->linkToDeath(sBinderDeathObserver.get());
+ }
+ }
+ if (sRemoteList == NULL) {
+ // if failed to get remote list, create local list
+ sRemoteList = getLocalInstance();
+ }
+ }
+ return sRemoteList;
}
MediaCodecList::MediaCodecList()
: mInitCheck(NO_INIT) {
- FILE *file = fopen("/etc/media_codecs.xml", "r");
+ parseTopLevelXMLFile("/etc/media_codecs.xml");
+}
- if (file == NULL) {
- ALOGW("unable to open media codecs configuration xml file.");
+void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {
+ // get href_base
+ char *href_base_end = strrchr(codecs_xml, '/');
+ if (href_base_end != NULL) {
+ mHrefBase = AString(codecs_xml, href_base_end - codecs_xml + 1);
+ }
+
+ mInitCheck = OK; // keeping this here for safety
+ mCurrentSection = SECTION_TOPLEVEL;
+ mDepth = 0;
+
+ OMXClient client;
+ mInitCheck = client.connect();
+ if (mInitCheck != OK) {
return;
}
+ mOMX = client.interface();
+ parseXMLFile(codecs_xml);
+ mOMX.clear();
- parseXMLFile(file);
+ if (mInitCheck != OK) {
+ mCodecInfos.clear();
+ return;
+ }
- if (mInitCheck == OK) {
- // These are currently still used by the video editing suite.
+ for (size_t i = mCodecInfos.size(); i-- > 0;) {
+ const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();
- addMediaCodec(true /* encoder */, "AACEncoder", "audio/mp4a-latm");
+ if (info.mCaps.size() == 0) {
+ // No types supported by this component???
+ ALOGW("Component %s does not support any type of media?",
+ info.mName.c_str());
- addMediaCodec(
- false /* encoder */, "OMX.google.raw.decoder", "audio/raw");
+ mCodecInfos.removeAt(i);
+#if LOG_NDEBUG == 0
+ } else {
+ for (size_t type_ix = 0; type_ix < info.mCaps.size(); ++type_ix) {
+ AString mime = info.mCaps.keyAt(type_ix);
+ const sp<MediaCodecInfo::Capabilities> &caps = info.mCaps.valueAt(type_ix);
+
+ ALOGV("%s codec info for %s: %s", info.mName.c_str(), mime.c_str(),
+ caps->getDetails()->debugString().c_str());
+ ALOGV(" flags=%d", caps->getFlags());
+ {
+ Vector<uint32_t> colorFormats;
+ caps->getSupportedColorFormats(&colorFormats);
+ AString nice;
+ for (size_t ix = 0; ix < colorFormats.size(); ix++) {
+ if (ix > 0) {
+ nice.append(", ");
+ }
+ nice.append(colorFormats.itemAt(ix));
+ }
+ ALOGV(" colors=[%s]", nice.c_str());
+ }
+ {
+ Vector<MediaCodecInfo::ProfileLevel> profileLevels;
+ caps->getSupportedProfileLevels(&profileLevels);
+ AString nice;
+ for (size_t ix = 0; ix < profileLevels.size(); ix++) {
+ if (ix > 0) {
+ nice.append(", ");
+ }
+ const MediaCodecInfo::ProfileLevel &pl =
+ profileLevels.itemAt(ix);
+ nice.append(pl.mProfile);
+ nice.append("/");
+ nice.append(pl.mLevel);
+ }
+ ALOGV(" levels=[%s]", nice.c_str());
+ }
+ }
+#endif
+ }
}
#if 0
@@ -84,9 +192,6 @@ MediaCodecList::MediaCodecList()
ALOGI("%s", line.c_str());
}
#endif
-
- fclose(file);
- file = NULL;
}
MediaCodecList::~MediaCodecList() {
@@ -96,10 +201,14 @@ status_t MediaCodecList::initCheck() const {
return mInitCheck;
}
-void MediaCodecList::parseXMLFile(FILE *file) {
- mInitCheck = OK;
- mCurrentSection = SECTION_TOPLEVEL;
- mDepth = 0;
+void MediaCodecList::parseXMLFile(const char *path) {
+ FILE *file = fopen(path, "r");
+
+ if (file == NULL) {
+ ALOGW("unable to open media codecs configuration xml file: %s", path);
+ mInitCheck = NAME_NOT_FOUND;
+ return;
+ }
XML_Parser parser = ::XML_ParserCreate(NULL);
CHECK(parser != NULL);
@@ -112,7 +221,7 @@ void MediaCodecList::parseXMLFile(FILE *file) {
while (mInitCheck == OK) {
void *buff = ::XML_GetBuffer(parser, BUFF_SIZE);
if (buff == NULL) {
- ALOGE("failed to in call to XML_GetBuffer()");
+ ALOGE("failed in call to XML_GetBuffer()");
mInitCheck = UNKNOWN_ERROR;
break;
}
@@ -124,8 +233,9 @@ void MediaCodecList::parseXMLFile(FILE *file) {
break;
}
- if (::XML_ParseBuffer(parser, bytes_read, bytes_read == 0)
- != XML_STATUS_OK) {
+ XML_Status status = ::XML_ParseBuffer(parser, bytes_read, bytes_read == 0);
+ if (status != XML_STATUS_OK) {
+ ALOGE("malformed (%s)", ::XML_ErrorString(::XML_GetErrorCode(parser)));
mInitCheck = ERROR_MALFORMED;
break;
}
@@ -137,25 +247,8 @@ void MediaCodecList::parseXMLFile(FILE *file) {
::XML_ParserFree(parser);
- if (mInitCheck == OK) {
- for (size_t i = mCodecInfos.size(); i-- > 0;) {
- CodecInfo *info = &mCodecInfos.editItemAt(i);
-
- if (info->mTypes == 0) {
- // No types supported by this component???
-
- ALOGW("Component %s does not support any type of media?",
- info->mName.c_str());
-
- mCodecInfos.removeAt(i);
- }
- }
- }
-
- if (mInitCheck != OK) {
- mCodecInfos.clear();
- mCodecQuirks.clear();
- }
+ fclose(file);
+ file = NULL;
}
// static
@@ -169,12 +262,65 @@ void MediaCodecList::EndElementHandlerWrapper(void *me, const char *name) {
static_cast<MediaCodecList *>(me)->endElementHandler(name);
}
+status_t MediaCodecList::includeXMLFile(const char **attrs) {
+ const char *href = NULL;
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (!strcmp(attrs[i], "href")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ href = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+ ++i;
+ }
+
+ // For security reasons and for simplicity, file names can only contain
+ // [a-zA-Z0-9_.] and must start with media_codecs_ and end with .xml
+ for (i = 0; href[i] != '\0'; i++) {
+ if (href[i] == '.' || href[i] == '_' ||
+ (href[i] >= '0' && href[i] <= '9') ||
+ (href[i] >= 'A' && href[i] <= 'Z') ||
+ (href[i] >= 'a' && href[i] <= 'z')) {
+ continue;
+ }
+ ALOGE("invalid include file name: %s", href);
+ return -EINVAL;
+ }
+
+ AString filename = href;
+ if (!filename.startsWith("media_codecs_") ||
+ !filename.endsWith(".xml")) {
+ ALOGE("invalid include file name: %s", href);
+ return -EINVAL;
+ }
+ filename.insert(mHrefBase, 0);
+
+ parseXMLFile(filename.c_str());
+ return mInitCheck;
+}
+
void MediaCodecList::startElementHandler(
const char *name, const char **attrs) {
if (mInitCheck != OK) {
return;
}
+ bool inType = true;
+
+ if (!strcmp(name, "Include")) {
+ mInitCheck = includeXMLFile(attrs);
+ if (mInitCheck == OK) {
+ mPastSections.push(mCurrentSection);
+ mCurrentSection = SECTION_INCLUDE;
+ }
+ ++mDepth;
+ return;
+ }
+
switch (mCurrentSection) {
case SECTION_TOPLEVEL:
{
@@ -215,6 +361,25 @@ void MediaCodecList::startElementHandler(
mInitCheck = addQuirk(attrs);
} else if (!strcmp(name, "Type")) {
mInitCheck = addTypeFromAttributes(attrs);
+ mCurrentSection =
+ (mCurrentSection == SECTION_DECODER
+ ? SECTION_DECODER_TYPE : SECTION_ENCODER_TYPE);
+ }
+ }
+ inType = false;
+ // fall through
+
+ case SECTION_DECODER_TYPE:
+ case SECTION_ENCODER_TYPE:
+ {
+ // ignore limits and features specified outside of type
+ bool outside = !inType && !mCurrentInfo->mHasSoleMime;
+ if (outside && (!strcmp(name, "Limit") || !strcmp(name, "Feature"))) {
+ ALOGW("ignoring %s specified outside of a Type", name);
+ } else if (!strcmp(name, "Limit")) {
+ mInitCheck = addLimit(attrs);
+ } else if (!strcmp(name, "Feature")) {
+ mInitCheck = addFeature(attrs);
}
break;
}
@@ -248,10 +413,25 @@ void MediaCodecList::endElementHandler(const char *name) {
break;
}
+ case SECTION_DECODER_TYPE:
+ case SECTION_ENCODER_TYPE:
+ {
+ if (!strcmp(name, "Type")) {
+ mCurrentSection =
+ (mCurrentSection == SECTION_DECODER_TYPE
+ ? SECTION_DECODER : SECTION_ENCODER);
+
+ mCurrentInfo->complete();
+ }
+ break;
+ }
+
case SECTION_DECODER:
{
if (!strcmp(name, "MediaCodec")) {
mCurrentSection = SECTION_DECODERS;
+ mCurrentInfo->complete();
+ mCurrentInfo = NULL;
}
break;
}
@@ -260,6 +440,17 @@ void MediaCodecList::endElementHandler(const char *name) {
{
if (!strcmp(name, "MediaCodec")) {
mCurrentSection = SECTION_ENCODERS;
+ mCurrentInfo->complete();;
+ mCurrentInfo = NULL;
+ }
+ break;
+ }
+
+ case SECTION_INCLUDE:
+ {
+ if (!strcmp(name, "Include") && mPastSections.size() > 0) {
+ mCurrentSection = mPastSections.top();
+ mPastSections.pop();
}
break;
}
@@ -301,23 +492,37 @@ status_t MediaCodecList::addMediaCodecFromAttributes(
return -EINVAL;
}
- addMediaCodec(encoder, name, type);
-
+ mCurrentInfo = new MediaCodecInfo(name, encoder, type);
+ // The next step involves trying to load the codec, which may
+ // fail. Only list the codec if this succeeds.
+ // However, keep mCurrentInfo object around until parsing
+ // of full codec info is completed.
+ if (initializeCapabilities(type) == OK) {
+ mCodecInfos.push_back(mCurrentInfo);
+ }
return OK;
}
-void MediaCodecList::addMediaCodec(
- bool encoder, const char *name, const char *type) {
- mCodecInfos.push();
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mName = name;
- info->mIsEncoder = encoder;
- info->mTypes = 0;
- info->mQuirks = 0;
+status_t MediaCodecList::initializeCapabilities(const char *type) {
+ if (type == NULL) {
+ return OK;
+ }
+
+ ALOGV("initializeCapabilities %s:%s",
+ mCurrentInfo->mName.c_str(), type);
- if (type != NULL) {
- addType(type);
+ CodecCapabilities caps;
+ status_t err = QueryCodec(
+ mOMX,
+ mCurrentInfo->mName.c_str(),
+ type,
+ mCurrentInfo->mIsEncoder,
+ &caps);
+ if (err != OK) {
+ return err;
}
+
+ return mCurrentInfo->initializeCapabilities(caps);
}
status_t MediaCodecList::addQuirk(const char **attrs) {
@@ -342,24 +547,7 @@ status_t MediaCodecList::addQuirk(const char **attrs) {
return -EINVAL;
}
- uint32_t bit;
- ssize_t index = mCodecQuirks.indexOfKey(name);
- if (index < 0) {
- bit = mCodecQuirks.size();
-
- if (bit == 32) {
- ALOGW("Too many distinct quirk names in configuration.");
- return OK;
- }
-
- mCodecQuirks.add(name, bit);
- } else {
- bit = mCodecQuirks.valueAt(index);
- }
-
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mQuirks |= 1ul << bit;
-
+ mCurrentInfo->addQuirk(name);
return OK;
}
@@ -385,172 +573,291 @@ status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {
return -EINVAL;
}
- addType(name);
-
- return OK;
-}
-
-void MediaCodecList::addType(const char *name) {
- uint32_t bit;
- ssize_t index = mTypes.indexOfKey(name);
- if (index < 0) {
- bit = mTypes.size();
-
- if (bit == 32) {
- ALOGW("Too many distinct type names in configuration.");
- return;
- }
-
- mTypes.add(name, bit);
- } else {
- bit = mTypes.valueAt(index);
+ status_t ret = mCurrentInfo->addMime(name);
+ if (ret != OK) {
+ return ret;
}
- CodecInfo *info = &mCodecInfos.editItemAt(mCodecInfos.size() - 1);
- info->mTypes |= 1ul << bit;
+ // The next step involves trying to load the codec, which may
+ // fail. Handle this gracefully (by not reporting such mime).
+ if (initializeCapabilities(name) != OK) {
+ mCurrentInfo->removeMime(name);
+ }
+ return OK;
}
+// legacy method for non-advanced codecs
ssize_t MediaCodecList::findCodecByType(
const char *type, bool encoder, size_t startIndex) const {
- ssize_t typeIndex = mTypes.indexOfKey(type);
-
- if (typeIndex < 0) {
- return -ENOENT;
- }
+ static const char *advancedFeatures[] = {
+ "feature-secure-playback",
+ "feature-tunneled-playback",
+ };
- uint32_t typeMask = 1ul << mTypes.valueAt(typeIndex);
+ size_t numCodecs = mCodecInfos.size();
+ for (; startIndex < numCodecs; ++startIndex) {
+ const MediaCodecInfo &info = *mCodecInfos.itemAt(startIndex).get();
- while (startIndex < mCodecInfos.size()) {
- const CodecInfo &info = mCodecInfos.itemAt(startIndex);
-
- if (info.mIsEncoder == encoder && (info.mTypes & typeMask)) {
- return startIndex;
+ if (info.isEncoder() != encoder) {
+ continue;
+ }
+ sp<MediaCodecInfo::Capabilities> capabilities = info.getCapabilitiesFor(type);
+ if (capabilities == NULL) {
+ continue;
+ }
+ const sp<AMessage> &details = capabilities->getDetails();
+
+ int32_t required;
+ bool isAdvanced = false;
+ for (size_t ix = 0; ix < ARRAY_SIZE(advancedFeatures); ix++) {
+ if (details->findInt32(advancedFeatures[ix], &required) &&
+ required != 0) {
+ isAdvanced = true;
+ break;
+ }
}
- ++startIndex;
- }
-
- return -ENOENT;
-}
-
-ssize_t MediaCodecList::findCodecByName(const char *name) const {
- for (size_t i = 0; i < mCodecInfos.size(); ++i) {
- const CodecInfo &info = mCodecInfos.itemAt(i);
-
- if (info.mName == name) {
- return i;
+ if (!isAdvanced) {
+ return startIndex;
}
}
return -ENOENT;
}
-size_t MediaCodecList::countCodecs() const {
- return mCodecInfos.size();
+static status_t limitFoundMissingAttr(AString name, const char *attr, bool found = true) {
+ ALOGE("limit '%s' with %s'%s' attribute", name.c_str(),
+ (found ? "" : "no "), attr);
+ return -EINVAL;
}
-const char *MediaCodecList::getCodecName(size_t index) const {
- if (index >= mCodecInfos.size()) {
- return NULL;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
- return info.mName.c_str();
+static status_t limitError(AString name, const char *msg) {
+ ALOGE("limit '%s' %s", name.c_str(), msg);
+ return -EINVAL;
}
-bool MediaCodecList::isEncoder(size_t index) const {
- if (index >= mCodecInfos.size()) {
- return NULL;
- }
-
- const CodecInfo &info = mCodecInfos.itemAt(index);
- return info.mIsEncoder;
+static status_t limitInvalidAttr(AString name, const char *attr, AString value) {
+ ALOGE("limit '%s' with invalid '%s' attribute (%s)", name.c_str(),
+ attr, value.c_str());
+ return -EINVAL;
}
-bool MediaCodecList::codecHasQuirk(
- size_t index, const char *quirkName) const {
- if (index >= mCodecInfos.size()) {
- return NULL;
- }
+status_t MediaCodecList::addLimit(const char **attrs) {
+ sp<AMessage> msg = new AMessage();
- const CodecInfo &info = mCodecInfos.itemAt(index);
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
- if (info.mQuirks != 0) {
- ssize_t index = mCodecQuirks.indexOfKey(quirkName);
- if (index >= 0 && info.mQuirks & (1ul << mCodecQuirks.valueAt(index))) {
- return true;
+ // attributes with values
+ if (!strcmp(attrs[i], "name")
+ || !strcmp(attrs[i], "default")
+ || !strcmp(attrs[i], "in")
+ || !strcmp(attrs[i], "max")
+ || !strcmp(attrs[i], "min")
+ || !strcmp(attrs[i], "range")
+ || !strcmp(attrs[i], "ranges")
+ || !strcmp(attrs[i], "scale")
+ || !strcmp(attrs[i], "value")) {
+ msg->setString(attrs[i], attrs[i + 1]);
+ ++i;
+ } else {
+ return -EINVAL;
}
+ ++i;
}
- return false;
-}
+ AString name;
+ if (!msg->findString("name", &name)) {
+ ALOGE("limit with no 'name' attribute");
+ return -EINVAL;
+ }
-status_t MediaCodecList::getSupportedTypes(
- size_t index, Vector<AString> *types) const {
- types->clear();
+ // size, blocks, bitrate, frame-rate, blocks-per-second, aspect-ratio: range
+ // quality: range + default + [scale]
+ // complexity: range + default
+ bool found;
+
+ if (name == "aspect-ratio" || name == "bitrate" || name == "block-count"
+ || name == "blocks-per-second" || name == "complexity"
+ || name == "frame-rate" || name == "quality" || name == "size") {
+ AString min, max;
+ if (msg->findString("min", &min) && msg->findString("max", &max)) {
+ min.append("-");
+ min.append(max);
+ if (msg->contains("range") || msg->contains("value")) {
+ return limitError(name, "has 'min' and 'max' as well as 'range' or "
+ "'value' attributes");
+ }
+ msg->setString("range", min);
+ } else if (msg->contains("min") || msg->contains("max")) {
+ return limitError(name, "has only 'min' or 'max' attribute");
+ } else if (msg->findString("value", &max)) {
+ min = max;
+ min.append("-");
+ min.append(max);
+ if (msg->contains("range")) {
+ return limitError(name, "has both 'range' and 'value' attributes");
+ }
+ msg->setString("range", min);
+ }
- if (index >= mCodecInfos.size()) {
- return -ERANGE;
- }
+ AString range, scale = "linear", def, in_;
+ if (!msg->findString("range", &range)) {
+ return limitError(name, "with no 'range', 'value' or 'min'/'max' attributes");
+ }
- const CodecInfo &info = mCodecInfos.itemAt(index);
+ if ((name == "quality" || name == "complexity") ^
+ (found = msg->findString("default", &def))) {
+ return limitFoundMissingAttr(name, "default", found);
+ }
+ if (name != "quality" && msg->findString("scale", &scale)) {
+ return limitFoundMissingAttr(name, "scale");
+ }
+ if ((name == "aspect-ratio") ^ (found = msg->findString("in", &in_))) {
+ return limitFoundMissingAttr(name, "in", found);
+ }
- for (size_t i = 0; i < mTypes.size(); ++i) {
- uint32_t typeMask = 1ul << mTypes.valueAt(i);
+ if (name == "aspect-ratio") {
+ if (!(in_ == "pixels") && !(in_ == "blocks")) {
+ return limitInvalidAttr(name, "in", in_);
+ }
+ in_.erase(5, 1); // (pixel|block)-aspect-ratio
+ in_.append("-");
+ in_.append(name);
+ name = in_;
+ }
+ if (name == "quality") {
+ mCurrentInfo->addDetail("quality-scale", scale);
+ }
+ if (name == "quality" || name == "complexity") {
+ AString tag = name;
+ tag.append("-default");
+ mCurrentInfo->addDetail(tag, def);
+ }
+ AString tag = name;
+ tag.append("-range");
+ mCurrentInfo->addDetail(tag, range);
+ } else {
+ AString max, value, ranges;
+ if (msg->contains("default")) {
+ return limitFoundMissingAttr(name, "default");
+ } else if (msg->contains("in")) {
+ return limitFoundMissingAttr(name, "in");
+ } else if ((name == "channel-count") ^
+ (found = msg->findString("max", &max))) {
+ return limitFoundMissingAttr(name, "max", found);
+ } else if (msg->contains("min")) {
+ return limitFoundMissingAttr(name, "min");
+ } else if (msg->contains("range")) {
+ return limitFoundMissingAttr(name, "range");
+ } else if ((name == "sample-rate") ^
+ (found = msg->findString("ranges", &ranges))) {
+ return limitFoundMissingAttr(name, "ranges", found);
+ } else if (msg->contains("scale")) {
+ return limitFoundMissingAttr(name, "scale");
+ } else if ((name == "alignment" || name == "block-size") ^
+ (found = msg->findString("value", &value))) {
+ return limitFoundMissingAttr(name, "value", found);
+ }
- if (info.mTypes & typeMask) {
- types->push(mTypes.keyAt(i));
+ if (max.size()) {
+ AString tag = "max-";
+ tag.append(name);
+ mCurrentInfo->addDetail(tag, max);
+ } else if (value.size()) {
+ mCurrentInfo->addDetail(name, value);
+ } else if (ranges.size()) {
+ AString tag = name;
+ tag.append("-ranges");
+ mCurrentInfo->addDetail(tag, ranges);
+ } else {
+ ALOGW("Ignoring unrecognized limit '%s'", name.c_str());
}
}
-
return OK;
}
-status_t MediaCodecList::getCodecCapabilities(
- size_t index, const char *type,
- Vector<ProfileLevel> *profileLevels,
- Vector<uint32_t> *colorFormats,
- uint32_t *flags) const {
- profileLevels->clear();
- colorFormats->clear();
-
- if (index >= mCodecInfos.size()) {
- return -ERANGE;
+static bool parseBoolean(const char *s) {
+ if (!strcasecmp(s, "true") || !strcasecmp(s, "yes") || !strcasecmp(s, "y")) {
+ return true;
}
+ char *end;
+ unsigned long res = strtoul(s, &end, 10);
+ return *s != '\0' && *end == '\0' && res > 0;
+}
- const CodecInfo &info = mCodecInfos.itemAt(index);
+status_t MediaCodecList::addFeature(const char **attrs) {
+ size_t i = 0;
+ const char *name = NULL;
+ int32_t optional = -1;
+ int32_t required = -1;
+ const char *value = NULL;
- OMXClient client;
- status_t err = client.connect();
- if (err != OK) {
- return err;
- }
+ while (attrs[i] != NULL) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
- CodecCapabilities caps;
- err = QueryCodec(
- client.interface(),
- info.mName.c_str(), type, info.mIsEncoder, &caps);
+ // attributes with values
+ if (!strcmp(attrs[i], "name")) {
+ name = attrs[i + 1];
+ ++i;
+ } else if (!strcmp(attrs[i], "optional") || !strcmp(attrs[i], "required")) {
+ int value = (int)parseBoolean(attrs[i + 1]);
+ if (!strcmp(attrs[i], "optional")) {
+ optional = value;
+ } else {
+ required = value;
+ }
+ ++i;
+ } else if (!strcmp(attrs[i], "value")) {
+ value = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+ ++i;
+ }
+ if (name == NULL) {
+ ALOGE("feature with no 'name' attribute");
+ return -EINVAL;
+ }
- if (err != OK) {
- return err;
+ if (optional == required && optional != -1) {
+ ALOGE("feature '%s' is both/neither optional and required", name);
+ return -EINVAL;
}
- for (size_t i = 0; i < caps.mProfileLevels.size(); ++i) {
- const CodecProfileLevel &src = caps.mProfileLevels.itemAt(i);
+ if ((optional != -1 || required != -1) && (value != NULL)) {
+ ALOGE("feature '%s' has both a value and optional/required attribute", name);
+ return -EINVAL;
+ }
- ProfileLevel profileLevel;
- profileLevel.mProfile = src.mProfile;
- profileLevel.mLevel = src.mLevel;
- profileLevels->push(profileLevel);
+ if (value != NULL) {
+ mCurrentInfo->addFeature(name, value);
+ } else {
+ mCurrentInfo->addFeature(name, (required == 1) || (optional == 0));
}
+ return OK;
+}
- for (size_t i = 0; i < caps.mColorFormats.size(); ++i) {
- colorFormats->push(caps.mColorFormats.itemAt(i));
+ssize_t MediaCodecList::findCodecByName(const char *name) const {
+ for (size_t i = 0; i < mCodecInfos.size(); ++i) {
+ const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();
+
+ if (info.mName == name) {
+ return i;
+ }
}
- *flags = caps.mFlags;
+ return -ENOENT;
+}
- return OK;
+size_t MediaCodecList::countCodecs() const {
+ return mCodecInfos.size();
}
} // namespace android
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
new file mode 100644
index 0000000..c26e909
--- /dev/null
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -0,0 +1,831 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecSource"
+#define DEBUG_DRIFT_TIME 0
+
+#include <inttypes.h>
+
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaCodecSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+struct MediaCodecSource::Puller : public AHandler {
+ Puller(const sp<MediaSource> &source);
+
+ status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
+ void stop();
+
+ void pause();
+ void resume();
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual ~Puller();
+
+private:
+ enum {
+ kWhatStart = 'msta',
+ kWhatStop,
+ kWhatPull,
+ kWhatPause,
+ kWhatResume,
+ };
+
+ sp<MediaSource> mSource;
+ sp<AMessage> mNotify;
+ sp<ALooper> mLooper;
+ int32_t mPullGeneration;
+ bool mIsAudio;
+ bool mPaused;
+ bool mReachedEOS;
+
+ status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
+ void schedulePull();
+ void handleEOS();
+
+ DISALLOW_EVIL_CONSTRUCTORS(Puller);
+};
+
+MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
+ : mSource(source),
+ mLooper(new ALooper()),
+ mPullGeneration(0),
+ mIsAudio(false),
+ mPaused(false),
+ mReachedEOS(false) {
+ sp<MetaData> meta = source->getFormat();
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ mIsAudio = !strncasecmp(mime, "audio/", 6);
+
+ mLooper->setName("pull_looper");
+}
+
+MediaCodecSource::Puller::~Puller() {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+}
+
+status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
+ const sp<AMessage> &msg) {
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
+ const sp<AMessage> &notify) {
+ ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
+ mLooper->start(
+ false /* runOnCallingThread */,
+ false /* canCallJava */,
+ PRIORITY_AUDIO);
+ mLooper->registerHandler(this);
+ mNotify = notify;
+
+ sp<AMessage> msg = new AMessage(kWhatStart, id());
+ msg->setObject("meta", meta);
+ return postSynchronouslyAndReturnError(msg);
+}
+
+void MediaCodecSource::Puller::stop() {
+ // Stop source from caller's thread instead of puller's looper.
+ // mSource->stop() is thread-safe, doing it outside the puller's
+ // looper allows us to at least stop if source gets stuck.
+ // If source gets stuck in read(), the looper would never
+ // be able to process the stop(), which could lead to ANR.
+
+ ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
+ mSource->stop();
+ ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
+
+ (new AMessage(kWhatStop, id()))->post();
+}
+
+void MediaCodecSource::Puller::pause() {
+ (new AMessage(kWhatPause, id()))->post();
+}
+
+void MediaCodecSource::Puller::resume() {
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+void MediaCodecSource::Puller::schedulePull() {
+ sp<AMessage> msg = new AMessage(kWhatPull, id());
+ msg->setInt32("generation", mPullGeneration);
+ msg->post();
+}
+
+void MediaCodecSource::Puller::handleEOS() {
+ if (!mReachedEOS) {
+ ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
+ mReachedEOS = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setPointer("accessUnit", NULL);
+ notify->post();
+ }
+}
+
+void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatStart:
+ {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("meta", &obj));
+
+ mReachedEOS = false;
+
+ status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
+
+ if (err == OK) {
+ schedulePull();
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStop:
+ {
+ ++mPullGeneration;
+
+ handleEOS();
+ break;
+ }
+
+ case kWhatPull:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mPullGeneration) {
+ break;
+ }
+
+ MediaBuffer *mbuf;
+ status_t err = mSource->read(&mbuf);
+
+ if (mPaused) {
+ if (err == OK) {
+ mbuf->release();
+ mbuf = NULL;
+ }
+
+ msg->post();
+ break;
+ }
+
+ if (err != OK) {
+ if (err == ERROR_END_OF_STREAM) {
+ ALOGV("stream ended, mbuf %p", mbuf);
+ } else {
+ ALOGE("error %d reading stream.", err);
+ }
+ handleEOS();
+ } else {
+ sp<AMessage> notify = mNotify->dup();
+
+ notify->setPointer("accessUnit", mbuf);
+ notify->post();
+
+ msg->post();
+ }
+ break;
+ }
+
+ case kWhatPause:
+ {
+ mPaused = true;
+ break;
+ }
+
+ case kWhatResume:
+ {
+ mPaused = false;
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+// static
+sp<MediaCodecSource> MediaCodecSource::Create(
+ const sp<ALooper> &looper,
+ const sp<AMessage> &format,
+ const sp<MediaSource> &source,
+ uint32_t flags) {
+ sp<MediaCodecSource> mediaSource =
+ new MediaCodecSource(looper, format, source, flags);
+
+ if (mediaSource->init() == OK) {
+ return mediaSource;
+ }
+ return NULL;
+}
+
+status_t MediaCodecSource::start(MetaData* params) {
+ sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id());
+ msg->setObject("meta", params);
+ return postSynchronouslyAndReturnError(msg);
+}
+
+status_t MediaCodecSource::stop() {
+ sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id());
+ status_t err = postSynchronouslyAndReturnError(msg);
+
+ // mPuller->stop() needs to be done outside MediaCodecSource's looper,
+ // as it contains a synchronous call to stop the underlying MediaSource,
+ // which often waits for all outstanding MediaBuffers to return, but
+ // MediaBuffers are only returned when MediaCodecSource looper gets
+ // to process them.
+
+ if (mPuller != NULL) {
+ ALOGI("puller (%s) stopping", mIsVideo ? "video" : "audio");
+ mPuller->stop();
+ ALOGI("puller (%s) stopped", mIsVideo ? "video" : "audio");
+ }
+
+ return err;
+}
+
+status_t MediaCodecSource::pause() {
+ (new AMessage(kWhatPause, mReflector->id()))->post();
+ return OK;
+}
+
+sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
+ CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+ return mGraphicBufferProducer;
+}
+
+status_t MediaCodecSource::read(
+ MediaBuffer** buffer, const ReadOptions* /* options */) {
+ Mutex::Autolock autolock(mOutputBufferLock);
+
+ *buffer = NULL;
+ while (mOutputBufferQueue.size() == 0 && !mEncoderReachedEOS) {
+ mOutputBufferCond.wait(mOutputBufferLock);
+ }
+ if (!mEncoderReachedEOS) {
+ *buffer = *mOutputBufferQueue.begin();
+ mOutputBufferQueue.erase(mOutputBufferQueue.begin());
+ return OK;
+ }
+ return mErrorCode;
+}
+
+void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
+ buffer->setObserver(0);
+ buffer->release();
+}
+
+MediaCodecSource::MediaCodecSource(
+ const sp<ALooper> &looper,
+ const sp<AMessage> &outputFormat,
+ const sp<MediaSource> &source,
+ uint32_t flags)
+ : mLooper(looper),
+ mOutputFormat(outputFormat),
+ mMeta(new MetaData),
+ mFlags(flags),
+ mIsVideo(false),
+ mStarted(false),
+ mStopping(false),
+ mDoMoreWorkPending(false),
+ mFirstSampleTimeUs(-1ll),
+ mEncoderReachedEOS(false),
+ mErrorCode(OK) {
+ CHECK(mLooper != NULL);
+
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+
+ if (!strncasecmp("video/", mime.c_str(), 6)) {
+ mIsVideo = true;
+ }
+
+ if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+ mPuller = new Puller(source);
+ }
+}
+
+MediaCodecSource::~MediaCodecSource() {
+ releaseEncoder();
+
+ mCodecLooper->stop();
+ mLooper->unregisterHandler(mReflector->id());
+}
+
+status_t MediaCodecSource::init() {
+ status_t err = initEncoder();
+
+ if (err != OK) {
+ releaseEncoder();
+ }
+
+ return err;
+}
+
+status_t MediaCodecSource::initEncoder() {
+ mReflector = new AHandlerReflector<MediaCodecSource>(this);
+ mLooper->registerHandler(mReflector);
+
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("codec_looper");
+ mCodecLooper->start();
+
+ if (mFlags & FLAG_USE_METADATA_INPUT) {
+ mOutputFormat->setInt32("store-metadata-in-buffers", 1);
+ }
+
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ mOutputFormat->setInt32("create-input-buffers-suspended", 1);
+ }
+
+ AString outputMIME;
+ CHECK(mOutputFormat->findString("mime", &outputMIME));
+
+ mEncoder = MediaCodec::CreateByType(
+ mCodecLooper, outputMIME.c_str(), true /* encoder */);
+
+ if (mEncoder == NULL) {
+ return NO_INIT;
+ }
+
+ ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
+
+ status_t err = mEncoder->configure(
+ mOutputFormat,
+ NULL /* nativeWindow */,
+ NULL /* crypto */,
+ MediaCodec::CONFIGURE_FLAG_ENCODE);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mEncoder->getOutputFormat(&mOutputFormat);
+ convertMessageToMetaData(mOutputFormat, mMeta);
+
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ CHECK(mIsVideo);
+
+ err = mEncoder->createInputSurface(&mGraphicBufferProducer);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ mEncoderActivityNotify = new AMessage(
+ kWhatEncoderActivity, mReflector->id());
+ mEncoder->setCallback(mEncoderActivityNotify);
+
+ err = mEncoder->start();
+
+ if (err != OK) {
+ return err;
+ }
+
+ mEncoderReachedEOS = false;
+ mErrorCode = OK;
+
+ return OK;
+}
+
+void MediaCodecSource::releaseEncoder() {
+ if (mEncoder == NULL) {
+ return;
+ }
+
+ mEncoder->release();
+ mEncoder.clear();
+
+ while (!mInputBufferQueue.empty()) {
+ MediaBuffer *mbuf = *mInputBufferQueue.begin();
+ mInputBufferQueue.erase(mInputBufferQueue.begin());
+ if (mbuf != NULL) {
+ mbuf->release();
+ }
+ }
+}
+
+status_t MediaCodecSource::postSynchronouslyAndReturnError(
+ const sp<AMessage> &msg) {
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+void MediaCodecSource::signalEOS(status_t err) {
+ if (!mEncoderReachedEOS) {
+ ALOGV("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
+ {
+ Mutex::Autolock autoLock(mOutputBufferLock);
+ // release all unread media buffers
+ for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
+ it != mOutputBufferQueue.end(); it++) {
+ (*it)->release();
+ }
+ mOutputBufferQueue.clear();
+ mEncoderReachedEOS = true;
+ mErrorCode = err;
+ mOutputBufferCond.signal();
+ }
+
+ releaseEncoder();
+ }
+ if (mStopping && mEncoderReachedEOS) {
+ ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");
+ // posting reply to everyone that's waiting
+ List<uint32_t>::iterator it;
+ for (it = mStopReplyIDQueue.begin();
+ it != mStopReplyIDQueue.end(); it++) {
+ (new AMessage)->postReply(*it);
+ }
+ mStopReplyIDQueue.clear();
+ mStopping = false;
+ }
+}
+
+void MediaCodecSource::suspend() {
+ CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+ if (mEncoder != NULL) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("drop-input-frames", true);
+ mEncoder->setParameters(params);
+ }
+}
+
+void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
+ CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+ if (mEncoder != NULL) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("drop-input-frames", false);
+ if (skipFramesBeforeUs > 0) {
+ params->setInt64("skip-frames-before", skipFramesBeforeUs);
+ }
+ mEncoder->setParameters(params);
+ }
+}
+
+status_t MediaCodecSource::feedEncoderInputBuffers() {
+ while (!mInputBufferQueue.empty()
+ && !mAvailEncoderInputIndices.empty()) {
+ MediaBuffer* mbuf = *mInputBufferQueue.begin();
+ mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+ size_t bufferIndex = *mAvailEncoderInputIndices.begin();
+ mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
+
+ int64_t timeUs = 0ll;
+ uint32_t flags = 0;
+ size_t size = 0;
+
+ if (mbuf != NULL) {
+ CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
+
+ // push decoding time for video, or drift time for audio
+ if (mIsVideo) {
+ mDecodingTimeQueue.push_back(timeUs);
+ } else {
+#if DEBUG_DRIFT_TIME
+ if (mFirstSampleTimeUs < 0ll) {
+ mFirstSampleTimeUs = timeUs;
+ }
+
+ int64_t driftTimeUs = 0;
+ if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
+ && driftTimeUs) {
+ driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
+ }
+ mDriftTimeQueue.push_back(driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+ }
+
+ sp<ABuffer> inbuf;
+ status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
+ if (err != OK || inbuf == NULL) {
+ mbuf->release();
+ signalEOS();
+ break;
+ }
+
+ size = mbuf->size();
+
+ memcpy(inbuf->data(), mbuf->data(), size);
+
+ if (mIsVideo) {
+ // video encoder will release MediaBuffer when done
+ // with underlying data.
+ inbuf->setMediaBufferBase(mbuf);
+ } else {
+ mbuf->release();
+ }
+ } else {
+ flags = MediaCodec::BUFFER_FLAG_EOS;
+ }
+
+ status_t err = mEncoder->queueInputBuffer(
+ bufferIndex, 0, size, timeUs, flags);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t MediaCodecSource::onStart(MetaData *params) {
+ if (mStopping) {
+ ALOGE("Failed to start while we're stopping");
+ return INVALID_OPERATION;
+ }
+
+ if (mStarted) {
+ ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ resume();
+ } else {
+ CHECK(mPuller != NULL);
+ mPuller->resume();
+ }
+ return OK;
+ }
+
+ ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
+
+ status_t err = OK;
+
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ int64_t startTimeUs;
+ if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
+ startTimeUs = -1ll;
+ }
+ resume(startTimeUs);
+ } else {
+ CHECK(mPuller != NULL);
+ sp<AMessage> notify = new AMessage(
+ kWhatPullerNotify, mReflector->id());
+ err = mPuller->start(params, notify);
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
+
+ mStarted = true;
+ return OK;
+}
+
+void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatPullerNotify:
+ {
+ MediaBuffer *mbuf;
+ CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
+
+ if (mbuf == NULL) {
+ ALOGV("puller (%s) reached EOS",
+ mIsVideo ? "video" : "audio");
+ signalEOS();
+ }
+
+ if (mEncoder == NULL) {
+ ALOGV("got msg '%s' after encoder shutdown.",
+ msg->debugString().c_str());
+
+ if (mbuf != NULL) {
+ mbuf->release();
+ }
+
+ break;
+ }
+
+ mInputBufferQueue.push_back(mbuf);
+
+ feedEncoderInputBuffers();
+
+ break;
+ }
+ case kWhatEncoderActivity:
+ {
+ if (mEncoder == NULL) {
+ break;
+ }
+
+ int32_t cbID;
+ CHECK(msg->findInt32("callbackID", &cbID));
+ if (cbID == MediaCodec::CB_INPUT_AVAILABLE) {
+ int32_t index;
+ CHECK(msg->findInt32("index", &index));
+
+ mAvailEncoderInputIndices.push_back(index);
+ feedEncoderInputBuffers();
+ } else if (cbID == MediaCodec::CB_OUTPUT_AVAILABLE) {
+ int32_t index;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ int32_t flags;
+ native_handle_t* handle = NULL;
+
+ CHECK(msg->findInt32("index", &index));
+ CHECK(msg->findSize("offset", &offset));
+ CHECK(msg->findSize("size", &size));
+ CHECK(msg->findInt64("timeUs", &timeUs));
+ CHECK(msg->findInt32("flags", &flags));
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ mEncoder->releaseOutputBuffer(index);
+ signalEOS();
+ break;
+ }
+
+ sp<ABuffer> outbuf;
+ status_t err = mEncoder->getOutputBuffer(index, &outbuf);
+ if (err != OK || outbuf == NULL) {
+ signalEOS();
+ break;
+ }
+
+ MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
+ memcpy(mbuf->data(), outbuf->data(), outbuf->size());
+
+ if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
+ if (mIsVideo) {
+ int64_t decodingTimeUs;
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ // GraphicBufferSource is supposed to discard samples
+ // queued before start, and offset timeUs by start time
+ CHECK_GE(timeUs, 0ll);
+ // TODO:
+ // Decoding time for surface source is unavailable,
+ // use presentation time for now. May need to move
+ // this logic into MediaCodec.
+ decodingTimeUs = timeUs;
+ } else {
+ CHECK(!mDecodingTimeQueue.empty());
+ decodingTimeUs = *(mDecodingTimeQueue.begin());
+ mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
+ }
+ mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
+
+ ALOGV("[video] time %" PRId64 " us (%.2f secs), dts/pts diff %" PRId64,
+ timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
+ } else {
+ int64_t driftTimeUs = 0;
+#if DEBUG_DRIFT_TIME
+ CHECK(!mDriftTimeQueue.empty());
+ driftTimeUs = *(mDriftTimeQueue.begin());
+ mDriftTimeQueue.erase(mDriftTimeQueue.begin());
+ mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+ ALOGV("[audio] time %" PRId64 " us (%.2f secs), drift %" PRId64,
+ timeUs, timeUs / 1E6, driftTimeUs);
+ }
+ mbuf->meta_data()->setInt64(kKeyTime, timeUs);
+ } else {
+ mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
+ }
+ if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
+ mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+ mbuf->setObserver(this);
+ mbuf->add_ref();
+
+ {
+ Mutex::Autolock autoLock(mOutputBufferLock);
+ mOutputBufferQueue.push_back(mbuf);
+ mOutputBufferCond.signal();
+ }
+
+ mEncoder->releaseOutputBuffer(index);
+ } else if (cbID == MediaCodec::CB_ERROR) {
+ status_t err;
+ CHECK(msg->findInt32("err", &err));
+ ALOGE("Encoder (%s) reported error : 0x%x",
+ mIsVideo ? "video" : "audio", err);
+ signalEOS();
+ }
+ break;
+ }
+ case kWhatStart:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("meta", &obj));
+ MetaData *params = static_cast<MetaData *>(obj.get());
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", onStart(params));
+ response->postReply(replyID);
+ break;
+ }
+ case kWhatStop:
+ {
+ ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio");
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mEncoderReachedEOS) {
+ // if we already reached EOS, reply and return now
+ ALOGI("encoder (%s) already stopped",
+ mIsVideo ? "video" : "audio");
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
+ mStopReplyIDQueue.push_back(replyID);
+ if (mStopping) {
+ // nothing to do if we're already stopping, reply will be posted
+ // to all when we're stopped.
+ break;
+ }
+
+ mStopping = true;
+
+ // if using surface, signal source EOS and wait for EOS to come back.
+ // otherwise, release encoder and post EOS if haven't done already
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ mEncoder->signalEndOfInputStream();
+ } else {
+ signalEOS();
+ }
+ break;
+ }
+ case kWhatPause:
+ {
+ if (mFlags && FLAG_USE_SURFACE_INPUT) {
+ suspend();
+ } else {
+ CHECK(mPuller != NULL);
+ mPuller->pause();
+ }
+ break;
+ }
+ default:
+ TRESPASS();
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index b5d4e44..c48a5ae 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -23,6 +23,7 @@ const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg";
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
const char *MEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
const char *MEDIA_MIMETYPE_VIDEO_MPEG2 = "video/mpeg2";
@@ -33,15 +34,19 @@ const char *MEDIA_MIMETYPE_AUDIO_AMR_WB = "audio/amr-wb";
const char *MEDIA_MIMETYPE_AUDIO_MPEG = "audio/mpeg";
const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I = "audio/mpeg-L1";
const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II = "audio/mpeg-L2";
+const char *MEDIA_MIMETYPE_AUDIO_MIDI = "audio/midi";
const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
const char *MEDIA_MIMETYPE_AUDIO_QCELP = "audio/qcelp";
const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
+const char *MEDIA_MIMETYPE_AUDIO_OPUS = "audio/opus";
const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW = "audio/g711-alaw";
const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw";
const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw";
const char *MEDIA_MIMETYPE_AUDIO_FLAC = "audio/flac";
const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS = "audio/aac-adts";
const char *MEDIA_MIMETYPE_AUDIO_MSGSM = "audio/gsm";
+const char *MEDIA_MIMETYPE_AUDIO_AC3 = "audio/ac3";
+const char *MEDIA_MIMETYPE_AUDIO_EAC3 = "audio/eac3";
const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mp4";
const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/x-wav";
@@ -55,5 +60,7 @@ const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
const char *MEDIA_MIMETYPE_TEXT_3GPP = "text/3gpp-tt";
const char *MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
+const char *MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";
+const char *MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608";
} // namespace android
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 9ab6611..e21fe6e 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -29,6 +29,7 @@
#include "include/WVMExtractor.h"
#include "include/FLACExtractor.h"
#include "include/AACExtractor.h"
+#include "include/MidiExtractor.h"
#include "matroska/MatroskaExtractor.h"
@@ -116,6 +117,8 @@ sp<MediaExtractor> MediaExtractor::Create(
ret = new AACExtractor(source, meta);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2PS)) {
ret = new MPEG2PSExtractor(source);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MIDI)) {
+ ret = new MidiExtractor(source);
}
if (ret != NULL) {
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index d87e910..c7c6f34 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaMuxer"
+
+#include "webm/WebmWriter.h"
+
#include <utils/Log.h>
#include <media/stagefright/MediaMuxer.h>
@@ -36,19 +39,30 @@
namespace android {
MediaMuxer::MediaMuxer(const char *path, OutputFormat format)
- : mState(UNINITIALIZED) {
+ : mFormat(format),
+ mState(UNINITIALIZED) {
if (format == OUTPUT_FORMAT_MPEG_4) {
mWriter = new MPEG4Writer(path);
+ } else if (format == OUTPUT_FORMAT_WEBM) {
+ mWriter = new WebmWriter(path);
+ }
+
+ if (mWriter != NULL) {
mFileMeta = new MetaData;
mState = INITIALIZED;
}
-
}
MediaMuxer::MediaMuxer(int fd, OutputFormat format)
- : mState(UNINITIALIZED) {
+ : mFormat(format),
+ mState(UNINITIALIZED) {
if (format == OUTPUT_FORMAT_MPEG_4) {
mWriter = new MPEG4Writer(fd);
+ } else if (format == OUTPUT_FORMAT_WEBM) {
+ mWriter = new WebmWriter(fd);
+ }
+
+ if (mWriter != NULL) {
mFileMeta = new MetaData;
mState = INITIALIZED;
}
@@ -109,8 +123,13 @@ status_t MediaMuxer::setLocation(int latitude, int longitude) {
ALOGE("setLocation() must be called before start().");
return INVALID_OPERATION;
}
+ if (mFormat != OUTPUT_FORMAT_MPEG_4) {
+ ALOGE("setLocation() is only supported for .mp4 output.");
+ return INVALID_OPERATION;
+ }
+
ALOGV("Setting location: latitude = %d, longitude = %d", latitude, longitude);
- return mWriter->setGeoData(latitude, longitude);
+ return static_cast<MPEG4Writer*>(mWriter.get())->setGeoData(latitude, longitude);
}
status_t MediaMuxer::start() {
@@ -157,7 +176,7 @@ status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackInde
}
if (trackIndex >= mTrackList.size()) {
- ALOGE("WriteSampleData() get an invalid index %d", trackIndex);
+ ALOGE("WriteSampleData() get an invalid index %zu", trackIndex);
return -EINVAL;
}
diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp
index fd0e79c..576471a 100644
--- a/media/libstagefright/MediaSource.cpp
+++ b/media/libstagefright/MediaSource.cpp
@@ -32,6 +32,19 @@ void MediaSource::ReadOptions::reset() {
mOptions = 0;
mSeekTimeUs = 0;
mLatenessUs = 0;
+ mNonBlocking = false;
+}
+
+void MediaSource::ReadOptions::setNonBlocking() {
+ mNonBlocking = true;
+}
+
+void MediaSource::ReadOptions::clearNonBlocking() {
+ mNonBlocking = false;
+}
+
+bool MediaSource::ReadOptions::getNonBlocking() const {
+ return mNonBlocking;
}
void MediaSource::ReadOptions::setSeekTo(int64_t time_us, SeekMode mode) {
diff --git a/media/libstagefright/MidiExtractor.cpp b/media/libstagefright/MidiExtractor.cpp
new file mode 100644
index 0000000..66fab77
--- /dev/null
+++ b/media/libstagefright/MidiExtractor.cpp
@@ -0,0 +1,325 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MidiExtractor"
+#include <utils/Log.h>
+
+#include "include/MidiExtractor.h"
+
+#include <media/MidiIoWrapper.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaSource.h>
+#include <libsonivox/eas_reverb.h>
+
+namespace android {
+
+// how many Sonivox output buffers to aggregate into one MediaBuffer
+static const int NUM_COMBINE_BUFFERS = 4;
+
+class MidiSource : public MediaSource {
+
+public:
+ MidiSource(
+ const sp<MidiEngine> &engine,
+ const sp<MetaData> &trackMetadata);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+protected:
+ virtual ~MidiSource();
+
+private:
+ sp<MidiEngine> mEngine;
+ sp<MetaData> mTrackMetadata;
+ bool mInitCheck;
+ bool mStarted;
+
+ status_t init();
+
+ // no copy constructor or assignment
+ MidiSource(const MidiSource &);
+ MidiSource &operator=(const MidiSource &);
+
+};
+
+
+// Midisource
+
+MidiSource::MidiSource(
+ const sp<MidiEngine> &engine,
+ const sp<MetaData> &trackMetadata)
+ : mEngine(engine),
+ mTrackMetadata(trackMetadata),
+ mInitCheck(false),
+ mStarted(false)
+{
+ ALOGV("MidiSource ctor");
+ mInitCheck = init();
+}
+
+MidiSource::~MidiSource()
+{
+ ALOGV("MidiSource dtor");
+ if (mStarted) {
+ stop();
+ }
+}
+
+status_t MidiSource::start(MetaData * /* params */)
+{
+ ALOGV("MidiSource::start");
+
+ CHECK(!mStarted);
+ mStarted = true;
+ mEngine->allocateBuffers();
+ return OK;
+}
+
+status_t MidiSource::stop()
+{
+ ALOGV("MidiSource::stop");
+
+ CHECK(mStarted);
+ mStarted = false;
+ mEngine->releaseBuffers();
+
+ return OK;
+}
+
+sp<MetaData> MidiSource::getFormat()
+{
+ return mTrackMetadata;
+}
+
+status_t MidiSource::read(
+ MediaBuffer **outBuffer, const ReadOptions *options)
+{
+ ALOGV("MidiSource::read");
+ MediaBuffer *buffer;
+ // process an optional seek request
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode mode;
+ if ((NULL != options) && options->getSeekTo(&seekTimeUs, &mode)) {
+ if (seekTimeUs <= 0LL) {
+ seekTimeUs = 0LL;
+ }
+ mEngine->seekTo(seekTimeUs);
+ }
+ buffer = mEngine->readBuffer();
+ *outBuffer = buffer;
+ ALOGV("MidiSource::read %p done", this);
+ return buffer != NULL ? (status_t) OK : (status_t) ERROR_END_OF_STREAM;
+}
+
+status_t MidiSource::init()
+{
+ ALOGV("MidiSource::init");
+ return OK;
+}
+
+// MidiEngine
+
+MidiEngine::MidiEngine(const sp<DataSource> &dataSource,
+ const sp<MetaData> &fileMetadata,
+ const sp<MetaData> &trackMetadata) :
+ mGroup(NULL),
+ mEasData(NULL),
+ mEasHandle(NULL),
+ mEasConfig(NULL),
+ mIsInitialized(false) {
+ mIoWrapper = new MidiIoWrapper(dataSource);
+ // spin up a new EAS engine
+ EAS_I32 temp;
+ EAS_RESULT result = EAS_Init(&mEasData);
+
+ if (result == EAS_SUCCESS) {
+ result = EAS_OpenFile(mEasData, mIoWrapper->getLocator(), &mEasHandle);
+ }
+ if (result == EAS_SUCCESS) {
+ result = EAS_Prepare(mEasData, mEasHandle);
+ }
+ if (result == EAS_SUCCESS) {
+ result = EAS_ParseMetaData(mEasData, mEasHandle, &temp);
+ }
+
+ if (result != EAS_SUCCESS) {
+ return;
+ }
+
+ if (fileMetadata != NULL) {
+ fileMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MIDI);
+ }
+
+ if (trackMetadata != NULL) {
+ trackMetadata->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+ trackMetadata->setInt64(kKeyDuration, 1000ll * temp); // milli->micro
+ mEasConfig = EAS_Config();
+ trackMetadata->setInt32(kKeySampleRate, mEasConfig->sampleRate);
+ trackMetadata->setInt32(kKeyChannelCount, mEasConfig->numChannels);
+ }
+ mIsInitialized = true;
+}
+
+MidiEngine::~MidiEngine() {
+ if (mEasHandle) {
+ EAS_CloseFile(mEasData, mEasHandle);
+ }
+ if (mEasData) {
+ EAS_Shutdown(mEasData);
+ }
+ delete mGroup;
+
+}
+
+status_t MidiEngine::initCheck() {
+ return mIsInitialized ? OK : UNKNOWN_ERROR;
+}
+
+status_t MidiEngine::allocateBuffers() {
+ // select reverb preset and enable
+ EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_PRESET, EAS_PARAM_REVERB_CHAMBER);
+ EAS_SetParameter(mEasData, EAS_MODULE_REVERB, EAS_PARAM_REVERB_BYPASS, EAS_FALSE);
+
+ mGroup = new MediaBufferGroup;
+ int bufsize = sizeof(EAS_PCM)
+ * mEasConfig->mixBufferSize * mEasConfig->numChannels * NUM_COMBINE_BUFFERS;
+ ALOGV("using %d byte buffer", bufsize);
+ mGroup->add_buffer(new MediaBuffer(bufsize));
+ return OK;
+}
+
+status_t MidiEngine::releaseBuffers() {
+ delete mGroup;
+ mGroup = NULL;
+ return OK;
+}
+
+status_t MidiEngine::seekTo(int64_t positionUs) {
+ ALOGV("seekTo %lld", positionUs);
+ EAS_RESULT result = EAS_Locate(mEasData, mEasHandle, positionUs / 1000, false);
+ return result == EAS_SUCCESS ? OK : UNKNOWN_ERROR;
+}
+
+MediaBuffer* MidiEngine::readBuffer() {
+ EAS_STATE state;
+ EAS_State(mEasData, mEasHandle, &state);
+ if ((state == EAS_STATE_STOPPED) || (state == EAS_STATE_ERROR)) {
+ return NULL;
+ }
+ MediaBuffer *buffer;
+ status_t err = mGroup->acquire_buffer(&buffer);
+ if (err != OK) {
+ ALOGE("readBuffer: no buffer");
+ return NULL;
+ }
+ EAS_I32 timeMs;
+ EAS_GetLocation(mEasData, mEasHandle, &timeMs);
+ int64_t timeUs = 1000ll * timeMs;
+ buffer->meta_data()->setInt64(kKeyTime, timeUs);
+
+ EAS_PCM* p = (EAS_PCM*) buffer->data();
+ int numBytesOutput = 0;
+ for (int i = 0; i < NUM_COMBINE_BUFFERS; i++) {
+ EAS_I32 numRendered;
+ EAS_RESULT result = EAS_Render(mEasData, p, mEasConfig->mixBufferSize, &numRendered);
+ if (result != EAS_SUCCESS) {
+ ALOGE("EAS_Render returned %ld", result);
+ break;
+ }
+ p += numRendered * mEasConfig->numChannels;
+ numBytesOutput += numRendered * mEasConfig->numChannels * sizeof(EAS_PCM);
+ }
+ buffer->set_range(0, numBytesOutput);
+ ALOGV("readBuffer: returning %zd in buffer %p", buffer->range_length(), buffer);
+ return buffer;
+}
+
+
+// MidiExtractor
+
+MidiExtractor::MidiExtractor(
+ const sp<DataSource> &dataSource)
+ : mDataSource(dataSource),
+ mInitCheck(false)
+{
+ ALOGV("MidiExtractor ctor");
+ mFileMetadata = new MetaData;
+ mTrackMetadata = new MetaData;
+ mEngine = new MidiEngine(mDataSource, mFileMetadata, mTrackMetadata);
+ mInitCheck = mEngine->initCheck();
+}
+
+MidiExtractor::~MidiExtractor()
+{
+ ALOGV("MidiExtractor dtor");
+}
+
+size_t MidiExtractor::countTracks()
+{
+ return mInitCheck == OK ? 1 : 0;
+}
+
+sp<MediaSource> MidiExtractor::getTrack(size_t index)
+{
+ if (mInitCheck != OK || index > 0) {
+ return NULL;
+ }
+ return new MidiSource(mEngine, mTrackMetadata);
+}
+
+sp<MetaData> MidiExtractor::getTrackMetaData(
+ size_t index, uint32_t /* flags */) {
+ ALOGV("MidiExtractor::getTrackMetaData");
+ if (mInitCheck != OK || index > 0) {
+ return NULL;
+ }
+ return mTrackMetadata;
+}
+
+sp<MetaData> MidiExtractor::getMetaData()
+{
+ ALOGV("MidiExtractor::getMetaData");
+ return mFileMetadata;
+}
+
+// Sniffer
+
+bool SniffMidi(
+ const sp<DataSource> &source, String8 *mimeType, float *confidence,
+ sp<AMessage> *)
+{
+ sp<MidiEngine> p = new MidiEngine(source, NULL, NULL);
+ if (p->initCheck() == OK) {
+ *mimeType = MEDIA_MIMETYPE_AUDIO_MIDI;
+ *confidence = 0.8;
+ ALOGV("SniffMidi: yes");
+ return true;
+ }
+ ALOGV("SniffMidi: no");
+ return false;
+
+}
+
+} // namespace android
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 05e599b..7d7d631 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "NuCachedSource2"
#include <utils/Log.h>
@@ -135,7 +137,7 @@ size_t PageCache::releaseFromStart(size_t maxBytes) {
}
void PageCache::copy(size_t from, void *data, size_t size) {
- ALOGV("copy from %d size %d", from, size);
+ ALOGV("copy from %zu size %zu", from, size);
if (size == 0) {
return;
@@ -189,6 +191,7 @@ NuCachedSource2::NuCachedSource2(
mFinalStatus(OK),
mLastAccessPos(0),
mFetching(true),
+ mDisconnecting(false),
mLastFetchTimeUs(-1),
mNumRetriesLeft(kMaxNumRetries),
mHighwaterThresholdBytes(kDefaultHighWaterThreshold),
@@ -213,7 +216,14 @@ NuCachedSource2::NuCachedSource2(
mLooper->setName("NuCachedSource2");
mLooper->registerHandler(mReflector);
- mLooper->start();
+
+ // Since it may not be obvious why our looper thread needs to be
+ // able to call into java since it doesn't appear to do so at all...
+ // IMediaHTTPConnection may be (and most likely is) implemented in JAVA
+ // and a local JAVA IBinder will call directly into JNI methods.
+ // So whenever we call DataSource::readAt it may end up in a call to
+ // IMediaHTTPConnection::readAt and therefore call back into JAVA.
+ mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);
Mutex::Autolock autoLock(mLock);
(new AMessage(kWhatFetchMore, mReflector->id()))->post();
@@ -235,6 +245,27 @@ status_t NuCachedSource2::getEstimatedBandwidthKbps(int32_t *kbps) {
return ERROR_UNSUPPORTED;
}
+void NuCachedSource2::disconnect() {
+ if (mSource->flags() & kIsHTTPBasedSource) {
+ ALOGV("disconnecting HTTPBasedSource");
+
+ {
+ Mutex::Autolock autoLock(mLock);
+ // set mDisconnecting to true, if a fetch returns after
+ // this, the source will be marked as EOS.
+ mDisconnecting = true;
+
+ // explicitly signal mCondition so that the pending readAt()
+ // will immediately return
+ mCondition.signal();
+ }
+
+ // explicitly disconnect from the source, to allow any
+ // pending reads to return more promptly
+ static_cast<HTTPBase *>(mSource.get())->disconnect();
+ }
+}
+
status_t NuCachedSource2::setCacheStatCollectFreq(int32_t freqMs) {
if (mSource->flags() & kIsHTTPBasedSource) {
HTTPBase *source = static_cast<HTTPBase *>(mSource.get());
@@ -298,7 +329,11 @@ void NuCachedSource2::fetchInternal() {
Mutex::Autolock autoLock(mLock);
- if (err == ERROR_UNSUPPORTED || err == -EPIPE) {
+ if (mDisconnecting) {
+ mNumRetriesLeft = 0;
+ mFinalStatus = ERROR_END_OF_STREAM;
+ return;
+ } else if (err == ERROR_UNSUPPORTED || err == -EPIPE) {
// These are errors that are not likely to go away even if we
// retry, i.e. the server doesn't support range requests or similar.
mNumRetriesLeft = 0;
@@ -318,7 +353,14 @@ void NuCachedSource2::fetchInternal() {
Mutex::Autolock autoLock(mLock);
- if (n < 0) {
+ if (n == 0 || mDisconnecting) {
+ ALOGI("caching reached eos.");
+
+ mNumRetriesLeft = 0;
+ mFinalStatus = ERROR_END_OF_STREAM;
+
+ mCache->releasePage(page);
+ } else if (n < 0) {
mFinalStatus = n;
if (n == ERROR_UNSUPPORTED || n == -EPIPE) {
// These are errors that are not likely to go away even if we
@@ -326,14 +368,7 @@ void NuCachedSource2::fetchInternal() {
mNumRetriesLeft = 0;
}
- ALOGE("source returned error %ld, %d retries left", n, mNumRetriesLeft);
- mCache->releasePage(page);
- } else if (n == 0) {
- ALOGI("ERROR_END_OF_STREAM");
-
- mNumRetriesLeft = 0;
- mFinalStatus = ERROR_END_OF_STREAM;
-
+ ALOGE("source returned error %zd, %d retries left", n, mNumRetriesLeft);
mCache->releasePage(page);
} else {
if (mFinalStatus != OK) {
@@ -421,6 +456,10 @@ void NuCachedSource2::onRead(const sp<AMessage> &msg) {
}
Mutex::Autolock autoLock(mLock);
+ if (mDisconnecting) {
+ mCondition.signal();
+ return;
+ }
CHECK(mAsyncResult == NULL);
@@ -457,16 +496,19 @@ void NuCachedSource2::restartPrefetcherIfNecessary_l(
size_t actualBytes = mCache->releaseFromStart(maxBytes);
mCacheOffset += actualBytes;
- ALOGI("restarting prefetcher, totalSize = %d", mCache->totalSize());
+ ALOGI("restarting prefetcher, totalSize = %zu", mCache->totalSize());
mFetching = true;
}
ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoSerializer(mSerializer);
- ALOGV("readAt offset %lld, size %d", offset, size);
+ ALOGV("readAt offset %lld, size %zu", offset, size);
Mutex::Autolock autoLock(mLock);
+ if (mDisconnecting) {
+ return ERROR_END_OF_STREAM;
+ }
// If the request can be completely satisfied from the cache, do so.
@@ -488,10 +530,15 @@ ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
CHECK(mAsyncResult == NULL);
msg->post();
- while (mAsyncResult == NULL) {
+ while (mAsyncResult == NULL && !mDisconnecting) {
mCondition.wait(mLock);
}
+ if (mDisconnecting) {
+ mAsyncResult.clear();
+ return ERROR_END_OF_STREAM;
+ }
+
int32_t result;
CHECK(mAsyncResult->findInt32("result", &result));
@@ -532,7 +579,7 @@ size_t NuCachedSource2::approxDataRemaining_l(status_t *finalStatus) const {
ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) {
CHECK_LE(size, (size_t)mHighwaterThresholdBytes);
- ALOGV("readInternal offset %lld size %d", offset, size);
+ ALOGV("readInternal offset %lld size %zu", offset, size);
Mutex::Autolock autoLock(mLock);
@@ -641,7 +688,7 @@ void NuCachedSource2::updateCacheParamsFromString(const char *s) {
ssize_t lowwaterMarkKb, highwaterMarkKb;
int keepAliveSecs;
- if (sscanf(s, "%ld/%ld/%d",
+ if (sscanf(s, "%zd/%zd/%d",
&lowwaterMarkKb, &highwaterMarkKb, &keepAliveSecs) != 3) {
ALOGE("Failed to parse cache parameters from '%s'.", s);
return;
@@ -672,7 +719,7 @@ void NuCachedSource2::updateCacheParamsFromString(const char *s) {
mKeepAliveIntervalUs = kDefaultKeepAliveIntervalUs;
}
- ALOGV("lowwater = %d bytes, highwater = %d bytes, keepalive = %lld us",
+ ALOGV("lowwater = %zu bytes, highwater = %zu bytes, keepalive = %" PRId64 " us",
mLowwaterThresholdBytes,
mHighwaterThresholdBytes,
mKeepAliveIntervalUs);
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 7bc7da2..f24cf3a 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -58,7 +58,9 @@ NuMediaExtractor::~NuMediaExtractor() {
}
status_t NuMediaExtractor::setDataSource(
- const char *path, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *path,
+ const KeyedVector<String8, String8> *headers) {
Mutex::Autolock autoLock(mLock);
if (mImpl != NULL) {
@@ -66,7 +68,7 @@ status_t NuMediaExtractor::setDataSource(
}
sp<DataSource> dataSource =
- DataSource::CreateFromURI(path, headers);
+ DataSource::CreateFromURI(httpService, path, headers);
if (dataSource == NULL) {
return -ENOENT;
@@ -387,7 +389,7 @@ ssize_t NuMediaExtractor::fetchTrackSamples(
info->mFinalResult = err;
if (info->mFinalResult != ERROR_END_OF_STREAM) {
- ALOGW("read on track %d failed with error %d",
+ ALOGW("read on track %zu failed with error %d",
info->mTrackIndex, err);
}
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 9f9352d..230c1f7 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -16,6 +16,11 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "OMXClient"
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
#include <utils/Log.h>
#include <binder/IServiceManager.h>
@@ -32,7 +37,7 @@ struct MuxOMX : public IOMX {
MuxOMX(const sp<IOMX> &remoteOMX);
virtual ~MuxOMX();
- virtual IBinder *onAsBinder() { return mRemoteOMX->asBinder().get(); }
+ virtual IBinder *onAsBinder() { return IInterface::asBinder(mRemoteOMX).get(); }
virtual bool livesLocally(node_id node, pid_t pid);
@@ -73,6 +78,10 @@ struct MuxOMX : public IOMX {
node_id node, OMX_U32 port_index, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
+
virtual status_t enableGraphicBuffers(
node_id node, OMX_U32 port_index, OMX_BOOL enable);
@@ -141,7 +150,7 @@ private:
const sp<IOMX> &getOMX(node_id node) const;
const sp<IOMX> &getOMX_l(node_id node) const;
- static bool IsSoftwareComponent(const char *name);
+ static bool CanLiveLocally(const char *name);
DISALLOW_EVIL_CONSTRUCTORS(MuxOMX);
};
@@ -164,8 +173,15 @@ bool MuxOMX::isLocalNode_l(node_id node) const {
}
// static
-bool MuxOMX::IsSoftwareComponent(const char *name) {
+bool MuxOMX::CanLiveLocally(const char *name) {
+#ifdef __LP64__
+ (void)name; // disable unused parameter warning
+ // 64 bit processes always run OMX remote on MediaServer
+ return false;
+#else
+ // 32 bit processes run only OMX.google.* components locally
return !strncasecmp(name, "OMX.google.", 11);
+#endif
}
const sp<IOMX> &MuxOMX::getOMX(node_id node) const {
@@ -197,7 +213,7 @@ status_t MuxOMX::allocateNode(
sp<IOMX> omx;
- if (IsSoftwareComponent(name)) {
+ if (CanLiveLocally(name)) {
if (mLocalOMX == NULL) {
mLocalOMX = new OMX;
}
@@ -279,6 +295,13 @@ status_t MuxOMX::prepareForAdaptivePlayback(
node, port_index, enable, maxFrameWidth, maxFrameHeight);
}
+status_t MuxOMX::configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL enable,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
+ return getOMX(node)->configureVideoTunnelMode(
+ node, portIndex, enable, audioHwSync, sidebandHandle);
+}
+
status_t MuxOMX::enableGraphicBuffers(
node_id node, OMX_U32 port_index, OMX_BOOL enable) {
return getOMX(node)->enableGraphicBuffers(node, port_index, enable);
@@ -382,7 +405,7 @@ status_t OMXClient::connect() {
mOMX = service->getOMX();
CHECK(mOMX.get() != NULL);
- if (!mOMX->livesLocally(NULL /* node */, getpid())) {
+ if (!mOMX->livesLocally(0 /* node */, getpid())) {
ALOGI("Using client-side OMX mux.");
mOMX = new MuxOMX(mOMX);
}
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 43736ad..4d30069 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -14,8 +14,15 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "OMXCodec"
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
#include <utils/Log.h>
#include "include/AACEncoder.h"
@@ -28,6 +35,7 @@
#include <HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/IMediaPlayerService.h>
+#include <media/stagefright/ACodec.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
@@ -39,8 +47,11 @@
#include <media/stagefright/SkipCutBuffer.h>
#include <utils/Vector.h>
-#include <OMX_Audio.h>
+#include <OMX_AudioExt.h>
#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include <OMX_VideoExt.h>
+#include <OMX_AsString.h>
#include "include/avc_utils.h"
@@ -90,9 +101,10 @@ static sp<MediaSource> InstantiateSoftwareEncoder(
#undef FACTORY_CREATE_ENCODER
#undef FACTORY_REF
-#define CODEC_LOGI(x, ...) ALOGI("[%s] "x, mComponentName, ##__VA_ARGS__)
-#define CODEC_LOGV(x, ...) ALOGV("[%s] "x, mComponentName, ##__VA_ARGS__)
-#define CODEC_LOGE(x, ...) ALOGE("[%s] "x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGI(x, ...) ALOGI("[%s] " x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGV(x, ...) ALOGV("[%s] " x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGW(x, ...) ALOGW("[%s] " x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGE(x, ...) ALOGE("[%s] " x, mComponentName, ##__VA_ARGS__)
struct OMXCodecObserver : public BnOMXObserver {
OMXCodecObserver() {
@@ -125,6 +137,7 @@ private:
template<class T>
static void InitOMXParams(T *params) {
+ COMPILE_TIME_ASSERT_FUNCTION_SCOPE(sizeof(OMX_PTR) == 4); // check OMX_PTR is 4 bytes.
params->nSize = sizeof(T);
params->nVersion.s.nVersionMajor = 1;
params->nVersion.s.nVersionMinor = 0;
@@ -186,7 +199,7 @@ void OMXCodec::findMatchingCodecs(
Vector<CodecNameAndQuirks> *matchingCodecs) {
matchingCodecs->clear();
- const MediaCodecList *list = MediaCodecList::getInstance();
+ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
if (list == NULL) {
return;
}
@@ -202,7 +215,9 @@ void OMXCodec::findMatchingCodecs(
index = matchIndex + 1;
- const char *componentName = list->getCodecName(matchIndex);
+ const sp<MediaCodecInfo> info = list->getCodecInfo(matchIndex);
+ CHECK(info != NULL);
+ const char *componentName = info->getCodecName();
// If a specific codec is requested, skip the non-matching ones.
if (matchComponentName && strcmp(componentName, matchComponentName)) {
@@ -220,7 +235,7 @@ void OMXCodec::findMatchingCodecs(
ssize_t index = matchingCodecs->add();
CodecNameAndQuirks *entry = &matchingCodecs->editItemAt(index);
entry->mName = String8(componentName);
- entry->mQuirks = getComponentQuirks(list, matchIndex);
+ entry->mQuirks = getComponentQuirks(info);
ALOGV("matching '%s' quirks 0x%08x",
entry->mName.string(), entry->mQuirks);
@@ -234,18 +249,15 @@ void OMXCodec::findMatchingCodecs(
// static
uint32_t OMXCodec::getComponentQuirks(
- const MediaCodecList *list, size_t index) {
+ const sp<MediaCodecInfo> &info) {
uint32_t quirks = 0;
- if (list->codecHasQuirk(
- index, "requires-allocate-on-input-ports")) {
+ if (info->hasQuirk("requires-allocate-on-input-ports")) {
quirks |= kRequiresAllocateBufferOnInputPorts;
}
- if (list->codecHasQuirk(
- index, "requires-allocate-on-output-ports")) {
+ if (info->hasQuirk("requires-allocate-on-output-ports")) {
quirks |= kRequiresAllocateBufferOnOutputPorts;
}
- if (list->codecHasQuirk(
- index, "output-buffers-are-unreadable")) {
+ if (info->hasQuirk("output-buffers-are-unreadable")) {
quirks |= kOutputBuffersAreUnreadable;
}
@@ -254,8 +266,7 @@ uint32_t OMXCodec::getComponentQuirks(
// static
bool OMXCodec::findCodecQuirks(const char *componentName, uint32_t *quirks) {
- const MediaCodecList *list = MediaCodecList::getInstance();
-
+ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
if (list == NULL) {
return false;
}
@@ -266,7 +277,9 @@ bool OMXCodec::findCodecQuirks(const char *componentName, uint32_t *quirks) {
return false;
}
- *quirks = getComponentQuirks(list, index);
+ const sp<MediaCodecInfo> info = list->getCodecInfo(index);
+ CHECK(info != NULL);
+ *quirks = getComponentQuirks(info);
return true;
}
@@ -370,6 +383,57 @@ sp<MediaSource> OMXCodec::Create(
return NULL;
}
+status_t OMXCodec::parseHEVCCodecSpecificData(
+ const void *data, size_t size,
+ unsigned *profile, unsigned *level) {
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ // verify minimum size and configurationVersion == 1.
+ if (size < 7 || ptr[0] != 1) {
+ return ERROR_MALFORMED;
+ }
+
+ *profile = (ptr[1] & 31);
+ *level = ptr[12];
+
+ ptr += 22;
+ size -= 22;
+
+ size_t numofArrays = (char)ptr[0];
+ ptr += 1;
+ size -= 1;
+ size_t j = 0, i = 0;
+ for (i = 0; i < numofArrays; i++) {
+ ptr += 1;
+ size -= 1;
+
+ // Num of nals
+ size_t numofNals = U16_AT(ptr);
+ ptr += 2;
+ size -= 2;
+
+ for (j = 0;j < numofNals;j++) {
+ if (size < 2) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ if (size < length) {
+ return ERROR_MALFORMED;
+ }
+ addCodecSpecificData(ptr, length);
+
+ ptr += length;
+ size -= length;
+ }
+ }
+ return OK;
+}
+
status_t OMXCodec::parseAVCCodecSpecificData(
const void *data, size_t size,
unsigned *profile, unsigned *level) {
@@ -387,7 +451,7 @@ status_t OMXCodec::parseAVCCodecSpecificData(
// assertion, let's be lenient for now...
// CHECK((ptr[4] >> 2) == 0x3f); // reserved
- size_t lengthSize = 1 + (ptr[4] & 3);
+ size_t lengthSize __unused = 1 + (ptr[4] & 3);
// commented out check below as H264_QVGA_500_NO_AUDIO.3gp
// violates it...
@@ -482,11 +546,32 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
CODEC_LOGI(
"AVC profile = %u (%s), level = %u",
profile, AVCProfileToString(profile), level);
+ } else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
+ // Parse the HEVCDecoderConfigurationRecord
+
+ unsigned profile, level;
+ status_t err;
+ if ((err = parseHEVCCodecSpecificData(
+ data, size, &profile, &level)) != OK) {
+ ALOGE("Malformed HEVC codec specific data.");
+ return err;
+ }
+
+ CODEC_LOGI(
+ "HEVC profile = %u , level = %u",
+ profile, level);
} else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
addCodecSpecificData(data, size);
CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size));
addCodecSpecificData(data, size);
+ } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
+ addCodecSpecificData(data, size);
+
+ CHECK(meta->findData(kKeyOpusCodecDelay, &type, &data, &size));
+ addCodecSpecificData(data, size);
+ CHECK(meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size));
+ addCodecSpecificData(data, size);
}
}
@@ -528,15 +613,30 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
sampleRate,
numChannels);
}
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AC3, mMIME)) {
+ int32_t numChannels;
+ int32_t sampleRate;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ status_t err = setAC3Format(numChannels, sampleRate);
+ if (err != OK) {
+ CODEC_LOGE("setAC3Format() failed (err = %d)", err);
+ return err;
+ }
} else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME)
|| !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) {
// These are PCM-like formats with a fixed sample rate but
// a variable number of channels.
+ int32_t sampleRate;
int32_t numChannels;
CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ if (!meta->findInt32(kKeySampleRate, &sampleRate)) {
+ sampleRate = 8000;
+ }
- setG711Format(numChannels);
+ setG711Format(sampleRate, numChannels);
} else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mMIME)) {
CHECK(!mIsEncoder);
@@ -666,7 +766,7 @@ status_t OMXCodec::setVideoPortFormatType(
// CHECK_EQ(format.nIndex, index);
#if 1
- CODEC_LOGV("portIndex: %ld, index: %ld, eCompressionFormat=%d eColorFormat=%d",
+ CODEC_LOGV("portIndex: %u, index: %u, eCompressionFormat=%d eColorFormat=%d",
portIndex,
index, format.eCompressionFormat, format.eColorFormat);
#endif
@@ -722,6 +822,7 @@ static size_t getFrameSize(
CHECK(!"Should not be here. Unsupported color format.");
break;
}
+ return 0;
}
status_t OMXCodec::findTargetColorFormat(
@@ -768,7 +869,7 @@ status_t OMXCodec::isColorFormatSupported(
portFormat.nIndex = index;
if (index >= kMaxColorFormatSupported) {
- CODEC_LOGE("More than %ld color formats are supported???", index);
+ CODEC_LOGE("More than %u color formats are supported???", index);
break;
}
}
@@ -793,6 +894,8 @@ void OMXCodec::setVideoInputFormat(
OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
compressionFormat = OMX_VIDEO_CodingAVC;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingHEVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
compressionFormat = OMX_VIDEO_CodingMPEG4;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
@@ -897,7 +1000,6 @@ static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
return 0;
}
OMX_U32 ret = frameRate * iFramesInterval - 1;
- CHECK(ret > 1);
return ret;
}
@@ -1188,6 +1290,8 @@ status_t OMXCodec::setVideoOutputFormat(
compressionFormat = OMX_VIDEO_CodingAVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
compressionFormat = OMX_VIDEO_CodingMPEG4;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingHEVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
compressionFormat = OMX_VIDEO_CodingH263;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP8, mime)) {
@@ -1374,12 +1478,16 @@ void OMXCodec::setComponentRole(
"audio_decoder.aac", "audio_encoder.aac" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS,
"audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW,
"audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW,
"audio_decoder.g711alaw", "audio_encoder.g711alaw" },
{ MEDIA_MIMETYPE_VIDEO_AVC,
"video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4,
"video_decoder.mpeg4", "video_encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_H263,
@@ -1394,6 +1502,10 @@ void OMXCodec::setComponentRole(
"audio_decoder.flac", "audio_encoder.flac" },
{ MEDIA_MIMETYPE_AUDIO_MSGSM,
"audio_decoder.gsm", "audio_encoder.gsm" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2,
+ "video_decoder.mpeg2", "video_encoder.mpeg2" },
+ { MEDIA_MIMETYPE_AUDIO_AC3,
+ "audio_decoder.ac3", "audio_encoder.ac3" },
};
static const size_t kNumMimeToRole =
@@ -1445,7 +1557,7 @@ OMXCodec::~OMXCodec() {
status_t err = mOMX->freeNode(mNode);
CHECK_EQ(err, (status_t)OK);
- mNode = NULL;
+ mNode = 0;
setState(DEAD);
clearCodecSpecificData();
@@ -1598,15 +1710,15 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
info.mMediaBuffer = NULL;
if (portIndex == kPortIndexOutput) {
- if (!(mOMXLivesLocally
- && (mQuirks & kRequiresAllocateBufferOnOutputPorts)
- && (mQuirks & kDefersOutputBufferAllocation))) {
- // If the node does not fill in the buffer ptr at this time,
- // we will defer creating the MediaBuffer until receiving
- // the first FILL_BUFFER_DONE notification instead.
- info.mMediaBuffer = new MediaBuffer(info.mData, info.mSize);
- info.mMediaBuffer->setObserver(this);
- }
+ // Fail deferred MediaBuffer creation until FILL_BUFFER_DONE;
+ // this legacy mode is no longer supported.
+ LOG_ALWAYS_FATAL_IF((mOMXLivesLocally
+ && (mQuirks & kRequiresAllocateBufferOnOutputPorts)
+ && (mQuirks & kDefersOutputBufferAllocation)),
+ "allocateBuffersOnPort cannot defer buffer allocation");
+
+ info.mMediaBuffer = new MediaBuffer(info.mData, info.mSize);
+ info.mMediaBuffer->setObserver(this);
}
mPortBuffers[portIndex].push(info);
@@ -1777,21 +1889,42 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
strerror(-err), -err);
return err;
}
-
- // XXX: Is this the right logic to use? It's not clear to me what the OMX
- // buffer counts refer to - how do they account for the renderer holding on
- // to buffers?
- if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) {
- OMX_U32 newBufferCount = def.nBufferCountMin + minUndequeuedBufs;
+ // FIXME: assume that surface is controlled by app (native window
+ // returns the number for the case when surface is not controlled by app)
+ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
+ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
+
+ // Use conservative allocation while also trying to reduce starvation
+ //
+ // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
+ // minimum needed for the consumer to be able to work
+ // 2. try to allocate two (2) additional buffers to reduce starvation from
+ // the consumer
+ // plus an extra buffer to account for incorrect minUndequeuedBufs
+ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1",
+ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
+
+ for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
+ OMX_U32 newBufferCount =
+ def.nBufferCountMin + minUndequeuedBufs + extraBuffers;
def.nBufferCountActual = newBufferCount;
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
- if (err != OK) {
- CODEC_LOGE("setting nBufferCountActual to %lu failed: %d",
- newBufferCount, err);
+
+ if (err == OK) {
+ minUndequeuedBufs += extraBuffers;
+ break;
+ }
+
+ CODEC_LOGW("setting nBufferCountActual to %u failed: %d",
+ newBufferCount, err);
+ /* exit condition */
+ if (extraBuffers == 0) {
return err;
}
}
+ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1",
+ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
err = native_window_set_buffer_count(
mNativeWindow.get(), def.nBufferCountActual);
@@ -1801,7 +1934,7 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
return err;
}
- CODEC_LOGV("allocating %lu buffers from a native window of size %lu on "
+ CODEC_LOGV("allocating %u buffers from a native window of size %u on "
"output port", def.nBufferCountActual, def.nBufferSize);
// Dequeue buffers and send them to OMX
@@ -1834,7 +1967,7 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
mPortBuffers[kPortIndexOutput].editItemAt(i).mBuffer = bufferId;
- CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+ CODEC_LOGV("registered graphic buffer with ID %u (pointer = %p)",
bufferId, graphicBuffer.get());
}
@@ -1861,7 +1994,7 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
- CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+ CODEC_LOGV("Calling cancelBuffer on buffer %u", info->mBuffer);
int err = mNativeWindow->cancelBuffer(
mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get(), -1);
if (err != 0) {
@@ -1877,7 +2010,6 @@ status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
// Dequeue the next buffer from the native window.
ANativeWindowBuffer* buf;
- int fenceFd = -1;
int err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
if (err != 0) {
CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
@@ -1982,7 +2114,6 @@ status_t OMXCodec::pushBlankBuffersToNativeWindow() {
// on the screen and then been replaced, so an previous video frames are
// guaranteed NOT to be currently displayed.
for (int i = 0; i < numBufs + 1; i++) {
- int fenceFd = -1;
err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb);
if (err != NO_ERROR) {
ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)",
@@ -2099,7 +2230,7 @@ void OMXCodec::on_message(const omx_message &msg) {
{
IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
- CODEC_LOGV("EMPTY_BUFFER_DONE(buffer: %p)", buffer);
+ CODEC_LOGV("EMPTY_BUFFER_DONE(buffer: %u)", buffer);
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
size_t i = 0;
@@ -2109,7 +2240,7 @@ void OMXCodec::on_message(const omx_message &msg) {
CHECK(i < buffers->size());
if ((*buffers)[i].mStatus != OWNED_BY_COMPONENT) {
- ALOGW("We already own input buffer %p, yet received "
+ ALOGW("We already own input buffer %u, yet received "
"an EMPTY_BUFFER_DONE.", buffer);
}
@@ -2123,7 +2254,7 @@ void OMXCodec::on_message(const omx_message &msg) {
}
if (mPortStatus[kPortIndexInput] == DISABLING) {
- CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
+ CODEC_LOGV("Port is disabled, freeing buffer %u", buffer);
status_t err = freeBuffer(kPortIndexInput, i);
CHECK_EQ(err, (status_t)OK);
@@ -2145,7 +2276,7 @@ void OMXCodec::on_message(const omx_message &msg) {
IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
OMX_U32 flags = msg.u.extended_buffer_data.flags;
- CODEC_LOGV("FILL_BUFFER_DONE(buffer: %p, size: %ld, flags: 0x%08lx, timestamp: %lld us (%.2f secs))",
+ CODEC_LOGV("FILL_BUFFER_DONE(buffer: %u, size: %u, flags: 0x%08x, timestamp: %lld us (%.2f secs))",
buffer,
msg.u.extended_buffer_data.range_length,
flags,
@@ -2162,14 +2293,14 @@ void OMXCodec::on_message(const omx_message &msg) {
BufferInfo *info = &buffers->editItemAt(i);
if (info->mStatus != OWNED_BY_COMPONENT) {
- ALOGW("We already own output buffer %p, yet received "
+ ALOGW("We already own output buffer %u, yet received "
"a FILL_BUFFER_DONE.", buffer);
}
info->mStatus = OWNED_BY_US;
if (mPortStatus[kPortIndexOutput] == DISABLING) {
- CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
+ CODEC_LOGV("Port is disabled, freeing buffer %u", buffer);
status_t err = freeBuffer(kPortIndexOutput, i);
CHECK_EQ(err, (status_t)OK);
@@ -2184,22 +2315,6 @@ void OMXCodec::on_message(const omx_message &msg) {
} else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
- if (info->mMediaBuffer == NULL) {
- CHECK(mOMXLivesLocally);
- CHECK(mQuirks & kRequiresAllocateBufferOnOutputPorts);
- CHECK(mQuirks & kDefersOutputBufferAllocation);
-
- // The qcom video decoders on Nexus don't actually allocate
- // output buffer memory on a call to OMX_AllocateBuffer
- // the "pBuffer" member of the OMX_BUFFERHEADERTYPE
- // structure is only filled in later.
-
- info->mMediaBuffer = new MediaBuffer(
- msg.u.extended_buffer_data.data_ptr,
- info->mSize);
- info->mMediaBuffer->setObserver(this);
- }
-
MediaBuffer *buffer = info->mMediaBuffer;
bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
@@ -2234,11 +2349,7 @@ void OMXCodec::on_message(const omx_message &msg) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
- buffer->meta_data()->setPointer(
- kKeyPlatformPrivate,
- msg.u.extended_buffer_data.platform_private);
-
- buffer->meta_data()->setPointer(
+ buffer->meta_data()->setInt32(
kKeyBufferID,
msg.u.extended_buffer_data.buffer);
@@ -2380,7 +2491,7 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
case OMX_EventError:
{
- CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2);
+ CODEC_LOGE("OMX_EventError(0x%08x, %u)", data1, data2);
setState(ERROR);
break;
@@ -2388,16 +2499,10 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
case OMX_EventPortSettingsChanged:
{
- CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%u, data2=0x%08x)",
data1, data2);
if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
- // There is no need to check whether mFilledBuffers is empty or not
- // when the OMX_EventPortSettingsChanged is not meant for reallocating
- // the output buffers.
- if (data1 == kPortIndexOutput) {
- CHECK(mFilledBuffers.empty());
- }
onPortSettingsChanged(data1);
} else if (data1 == kPortIndexOutput &&
(data2 == OMX_IndexConfigCommonOutputCrop ||
@@ -2428,7 +2533,7 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
// The scale is in 16.16 format.
// scale 1.0 = 0x010000. When there is no
// need to change the display, skip it.
- ALOGV("Get OMX_IndexConfigScale: 0x%lx/0x%lx",
+ ALOGV("Get OMX_IndexConfigScale: 0x%x/0x%x",
scale.xWidth, scale.xHeight);
if (scale.xWidth != 0x010000) {
@@ -2462,7 +2567,7 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
default:
{
- CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2);
+ CODEC_LOGV("EVENT(%d, %u, %u)", event, data1, data2);
break;
}
}
@@ -2479,7 +2584,7 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
case OMX_CommandPortDisable:
{
OMX_U32 portIndex = data;
- CODEC_LOGV("PORT_DISABLED(%ld)", portIndex);
+ CODEC_LOGV("PORT_DISABLED(%u)", portIndex);
CHECK(mState == EXECUTING || mState == RECONFIGURING);
CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING);
@@ -2503,7 +2608,7 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
status_t err = enablePortAsync(portIndex);
if (err != OK) {
- CODEC_LOGE("enablePortAsync(%ld) failed (err = %d)", portIndex, err);
+ CODEC_LOGE("enablePortAsync(%u) failed (err = %d)", portIndex, err);
setState(ERROR);
} else {
err = allocateBuffersOnPort(portIndex);
@@ -2524,7 +2629,7 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
case OMX_CommandPortEnable:
{
OMX_U32 portIndex = data;
- CODEC_LOGV("PORT_ENABLED(%ld)", portIndex);
+ CODEC_LOGV("PORT_ENABLED(%u)", portIndex);
CHECK(mState == EXECUTING || mState == RECONFIGURING);
CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING);
@@ -2545,7 +2650,7 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
{
OMX_U32 portIndex = data;
- CODEC_LOGV("FLUSH_DONE(%ld)", portIndex);
+ CODEC_LOGV("FLUSH_DONE(%u)", portIndex);
CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN);
mPortStatus[portIndex] = ENABLED;
@@ -2791,7 +2896,7 @@ status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex);
- CHECK_EQ((int)mState, (int)EXECUTING);
+ CHECK(mState == EXECUTING || mState == EXECUTING_TO_IDLE);
CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
CHECK(!mOutputPortSettingsChangedPending);
@@ -2973,7 +3078,8 @@ bool OMXCodec::drainInputBuffer(BufferInfo *info) {
size_t size = specific->mSize;
- if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME)
+ if ((!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME) ||
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mMIME))
&& !(mQuirks & kWantsNALFragments)) {
static const uint8_t kNALStartCode[4] =
{ 0x00, 0x00, 0x00, 0x01 };
@@ -3489,9 +3595,34 @@ status_t OMXCodec::setAACFormat(
return OK;
}
-void OMXCodec::setG711Format(int32_t numChannels) {
+status_t OMXCodec::setAC3Format(int32_t numChannels, int32_t sampleRate) {
+ OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexInput;
+
+ status_t err = mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+
+ if (err != OK) {
+ return err;
+ }
+
+ def.nChannels = numChannels;
+ def.nSampleRate = sampleRate;
+
+ return mOMX->setParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
+ &def,
+ sizeof(def));
+}
+
+void OMXCodec::setG711Format(int32_t sampleRate, int32_t numChannels) {
CHECK(!mIsEncoder);
- setRawAudioFormat(kPortIndexInput, 8000, numChannels);
+ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
}
void OMXCodec::setImageOutputFormat(
@@ -3838,7 +3969,7 @@ status_t OMXCodec::read(
return UNKNOWN_ERROR;
}
- CODEC_LOGV("seeking to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
+ CODEC_LOGV("seeking to %" PRId64 " us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
mSignalledEOS = false;
@@ -3950,219 +4081,6 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
CHECK(!"should not be here.");
}
-static const char *imageCompressionFormatString(OMX_IMAGE_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_IMAGE_CodingUnused",
- "OMX_IMAGE_CodingAutoDetect",
- "OMX_IMAGE_CodingJPEG",
- "OMX_IMAGE_CodingJPEG2K",
- "OMX_IMAGE_CodingEXIF",
- "OMX_IMAGE_CodingTIFF",
- "OMX_IMAGE_CodingGIF",
- "OMX_IMAGE_CodingPNG",
- "OMX_IMAGE_CodingLZW",
- "OMX_IMAGE_CodingBMP",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *colorFormatString(OMX_COLOR_FORMATTYPE type) {
- static const char *kNames[] = {
- "OMX_COLOR_FormatUnused",
- "OMX_COLOR_FormatMonochrome",
- "OMX_COLOR_Format8bitRGB332",
- "OMX_COLOR_Format12bitRGB444",
- "OMX_COLOR_Format16bitARGB4444",
- "OMX_COLOR_Format16bitARGB1555",
- "OMX_COLOR_Format16bitRGB565",
- "OMX_COLOR_Format16bitBGR565",
- "OMX_COLOR_Format18bitRGB666",
- "OMX_COLOR_Format18bitARGB1665",
- "OMX_COLOR_Format19bitARGB1666",
- "OMX_COLOR_Format24bitRGB888",
- "OMX_COLOR_Format24bitBGR888",
- "OMX_COLOR_Format24bitARGB1887",
- "OMX_COLOR_Format25bitARGB1888",
- "OMX_COLOR_Format32bitBGRA8888",
- "OMX_COLOR_Format32bitARGB8888",
- "OMX_COLOR_FormatYUV411Planar",
- "OMX_COLOR_FormatYUV411PackedPlanar",
- "OMX_COLOR_FormatYUV420Planar",
- "OMX_COLOR_FormatYUV420PackedPlanar",
- "OMX_COLOR_FormatYUV420SemiPlanar",
- "OMX_COLOR_FormatYUV422Planar",
- "OMX_COLOR_FormatYUV422PackedPlanar",
- "OMX_COLOR_FormatYUV422SemiPlanar",
- "OMX_COLOR_FormatYCbYCr",
- "OMX_COLOR_FormatYCrYCb",
- "OMX_COLOR_FormatCbYCrY",
- "OMX_COLOR_FormatCrYCbY",
- "OMX_COLOR_FormatYUV444Interleaved",
- "OMX_COLOR_FormatRawBayer8bit",
- "OMX_COLOR_FormatRawBayer10bit",
- "OMX_COLOR_FormatRawBayer8bitcompressed",
- "OMX_COLOR_FormatL2",
- "OMX_COLOR_FormatL4",
- "OMX_COLOR_FormatL8",
- "OMX_COLOR_FormatL16",
- "OMX_COLOR_FormatL24",
- "OMX_COLOR_FormatL32",
- "OMX_COLOR_FormatYUV420PackedSemiPlanar",
- "OMX_COLOR_FormatYUV422PackedSemiPlanar",
- "OMX_COLOR_Format18BitBGR666",
- "OMX_COLOR_Format24BitARGB6666",
- "OMX_COLOR_Format24BitABGR6666",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
- return "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar";
- } else if (type == OMX_QCOM_COLOR_FormatYVU420SemiPlanar) {
- return "OMX_QCOM_COLOR_FormatYVU420SemiPlanar";
- } else if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *videoCompressionFormatString(OMX_VIDEO_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_VIDEO_CodingUnused",
- "OMX_VIDEO_CodingAutoDetect",
- "OMX_VIDEO_CodingMPEG2",
- "OMX_VIDEO_CodingH263",
- "OMX_VIDEO_CodingMPEG4",
- "OMX_VIDEO_CodingWMV",
- "OMX_VIDEO_CodingRV",
- "OMX_VIDEO_CodingAVC",
- "OMX_VIDEO_CodingMJPEG",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *audioCodingTypeString(OMX_AUDIO_CODINGTYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_CodingUnused",
- "OMX_AUDIO_CodingAutoDetect",
- "OMX_AUDIO_CodingPCM",
- "OMX_AUDIO_CodingADPCM",
- "OMX_AUDIO_CodingAMR",
- "OMX_AUDIO_CodingGSMFR",
- "OMX_AUDIO_CodingGSMEFR",
- "OMX_AUDIO_CodingGSMHR",
- "OMX_AUDIO_CodingPDCFR",
- "OMX_AUDIO_CodingPDCEFR",
- "OMX_AUDIO_CodingPDCHR",
- "OMX_AUDIO_CodingTDMAFR",
- "OMX_AUDIO_CodingTDMAEFR",
- "OMX_AUDIO_CodingQCELP8",
- "OMX_AUDIO_CodingQCELP13",
- "OMX_AUDIO_CodingEVRC",
- "OMX_AUDIO_CodingSMV",
- "OMX_AUDIO_CodingG711",
- "OMX_AUDIO_CodingG723",
- "OMX_AUDIO_CodingG726",
- "OMX_AUDIO_CodingG729",
- "OMX_AUDIO_CodingAAC",
- "OMX_AUDIO_CodingMP3",
- "OMX_AUDIO_CodingSBC",
- "OMX_AUDIO_CodingVORBIS",
- "OMX_AUDIO_CodingWMA",
- "OMX_AUDIO_CodingRA",
- "OMX_AUDIO_CodingMIDI",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *audioPCMModeString(OMX_AUDIO_PCMMODETYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_PCMModeLinear",
- "OMX_AUDIO_PCMModeALaw",
- "OMX_AUDIO_PCMModeMULaw",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *amrBandModeString(OMX_AUDIO_AMRBANDMODETYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_AMRBandModeUnused",
- "OMX_AUDIO_AMRBandModeNB0",
- "OMX_AUDIO_AMRBandModeNB1",
- "OMX_AUDIO_AMRBandModeNB2",
- "OMX_AUDIO_AMRBandModeNB3",
- "OMX_AUDIO_AMRBandModeNB4",
- "OMX_AUDIO_AMRBandModeNB5",
- "OMX_AUDIO_AMRBandModeNB6",
- "OMX_AUDIO_AMRBandModeNB7",
- "OMX_AUDIO_AMRBandModeWB0",
- "OMX_AUDIO_AMRBandModeWB1",
- "OMX_AUDIO_AMRBandModeWB2",
- "OMX_AUDIO_AMRBandModeWB3",
- "OMX_AUDIO_AMRBandModeWB4",
- "OMX_AUDIO_AMRBandModeWB5",
- "OMX_AUDIO_AMRBandModeWB6",
- "OMX_AUDIO_AMRBandModeWB7",
- "OMX_AUDIO_AMRBandModeWB8",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
-static const char *amrFrameFormatString(OMX_AUDIO_AMRFRAMEFORMATTYPE type) {
- static const char *kNames[] = {
- "OMX_AUDIO_AMRFrameFormatConformance",
- "OMX_AUDIO_AMRFrameFormatIF1",
- "OMX_AUDIO_AMRFrameFormatIF2",
- "OMX_AUDIO_AMRFrameFormatFSF",
- "OMX_AUDIO_AMRFrameFormatRTPPayload",
- "OMX_AUDIO_AMRFrameFormatITU",
- };
-
- size_t numNames = sizeof(kNames) / sizeof(kNames[0]);
-
- if (type < 0 || (size_t)type >= numNames) {
- return "UNKNOWN";
- } else {
- return kNames[type];
- }
-}
-
void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -4177,9 +4095,9 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
CHECK((portIndex == kPortIndexInput && def.eDir == OMX_DirInput)
|| (portIndex == kPortIndexOutput && def.eDir == OMX_DirOutput));
- printf(" nBufferCountActual = %ld\n", def.nBufferCountActual);
- printf(" nBufferCountMin = %ld\n", def.nBufferCountMin);
- printf(" nBufferSize = %ld\n", def.nBufferSize);
+ printf(" nBufferCountActual = %" PRIu32 "\n", def.nBufferCountActual);
+ printf(" nBufferCountMin = %" PRIu32 "\n", def.nBufferCountMin);
+ printf(" nBufferSize = %" PRIu32 "\n", def.nBufferSize);
switch (def.eDomain) {
case OMX_PortDomainImage:
@@ -4188,15 +4106,15 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("\n");
printf(" // Image\n");
- printf(" nFrameWidth = %ld\n", imageDef->nFrameWidth);
- printf(" nFrameHeight = %ld\n", imageDef->nFrameHeight);
- printf(" nStride = %ld\n", imageDef->nStride);
+ printf(" nFrameWidth = %" PRIu32 "\n", imageDef->nFrameWidth);
+ printf(" nFrameHeight = %" PRIu32 "\n", imageDef->nFrameHeight);
+ printf(" nStride = %" PRIu32 "\n", imageDef->nStride);
printf(" eCompressionFormat = %s\n",
- imageCompressionFormatString(imageDef->eCompressionFormat));
+ asString(imageDef->eCompressionFormat));
printf(" eColorFormat = %s\n",
- colorFormatString(imageDef->eColorFormat));
+ asString(imageDef->eColorFormat));
break;
}
@@ -4207,15 +4125,15 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("\n");
printf(" // Video\n");
- printf(" nFrameWidth = %ld\n", videoDef->nFrameWidth);
- printf(" nFrameHeight = %ld\n", videoDef->nFrameHeight);
- printf(" nStride = %ld\n", videoDef->nStride);
+ printf(" nFrameWidth = %" PRIu32 "\n", videoDef->nFrameWidth);
+ printf(" nFrameHeight = %" PRIu32 "\n", videoDef->nFrameHeight);
+ printf(" nStride = %" PRIu32 "\n", videoDef->nStride);
printf(" eCompressionFormat = %s\n",
- videoCompressionFormatString(videoDef->eCompressionFormat));
+ asString(videoDef->eCompressionFormat));
printf(" eColorFormat = %s\n",
- colorFormatString(videoDef->eColorFormat));
+ asString(videoDef->eColorFormat));
break;
}
@@ -4227,7 +4145,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("\n");
printf(" // Audio\n");
printf(" eEncoding = %s\n",
- audioCodingTypeString(audioDef->eEncoding));
+ asString(audioDef->eEncoding));
if (audioDef->eEncoding == OMX_AUDIO_CodingPCM) {
OMX_AUDIO_PARAM_PCMMODETYPE params;
@@ -4238,16 +4156,16 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
CHECK_EQ(err, (status_t)OK);
- printf(" nSamplingRate = %ld\n", params.nSamplingRate);
- printf(" nChannels = %ld\n", params.nChannels);
+ printf(" nSamplingRate = %" PRIu32 "\n", params.nSamplingRate);
+ printf(" nChannels = %" PRIu32 "\n", params.nChannels);
printf(" bInterleaved = %d\n", params.bInterleaved);
- printf(" nBitPerSample = %ld\n", params.nBitPerSample);
+ printf(" nBitPerSample = %" PRIu32 "\n", params.nBitPerSample);
printf(" eNumData = %s\n",
params.eNumData == OMX_NumericalDataSigned
? "signed" : "unsigned");
- printf(" ePCMMode = %s\n", audioPCMModeString(params.ePCMMode));
+ printf(" ePCMMode = %s\n", asString(params.ePCMMode));
} else if (audioDef->eEncoding == OMX_AUDIO_CodingAMR) {
OMX_AUDIO_PARAM_AMRTYPE amr;
InitOMXParams(&amr);
@@ -4257,11 +4175,11 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
CHECK_EQ(err, (status_t)OK);
- printf(" nChannels = %ld\n", amr.nChannels);
+ printf(" nChannels = %" PRIu32 "\n", amr.nChannels);
printf(" eAMRBandMode = %s\n",
- amrBandModeString(amr.eAMRBandMode));
+ asString(amr.eAMRBandMode));
printf(" eAMRFrameFormat = %s\n",
- amrFrameFormatString(amr.eAMRFrameFormat));
+ asString(amr.eAMRFrameFormat));
}
break;
@@ -4422,6 +4340,17 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
mOutputFormat->setInt32(kKeyChannelCount, numChannels);
mOutputFormat->setInt32(kKeySampleRate, sampleRate);
mOutputFormat->setInt32(kKeyBitRate, bitRate);
+ } else if (audio_def->eEncoding ==
+ (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidAC3) {
+ mOutputFormat->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AC3);
+ int32_t numChannels, sampleRate, bitRate;
+ inputFormat->findInt32(kKeyChannelCount, &numChannels);
+ inputFormat->findInt32(kKeySampleRate, &sampleRate);
+ inputFormat->findInt32(kKeyBitRate, &bitRate);
+ mOutputFormat->setInt32(kKeyChannelCount, numChannels);
+ mOutputFormat->setInt32(kKeySampleRate, sampleRate);
+ mOutputFormat->setInt32(kKeyBitRate, bitRate);
} else {
CHECK(!"Should not be here. Unknown audio encoding.");
}
@@ -4559,12 +4488,7 @@ status_t QueryCodec(
const char *componentName, const char *mime,
bool isEncoder,
CodecCapabilities *caps) {
- if (strncmp(componentName, "OMX.", 4)) {
- // Not an OpenMax component but a software codec.
- caps->mFlags = 0;
- caps->mComponentName = componentName;
- return OK;
- }
+ bool isVideo = !strncasecmp(mime, "video/", 6);
sp<OMXCodecObserver> observer = new OMXCodecObserver;
IOMX::node_id node;
@@ -4579,42 +4503,63 @@ status_t QueryCodec(
caps->mFlags = 0;
caps->mComponentName = componentName;
- OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
- InitOMXParams(&param);
+ // NOTE: OMX does not provide a way to query AAC profile support
+ if (isVideo) {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
+ InitOMXParams(&param);
- param.nPortIndex = !isEncoder ? 0 : 1;
+ param.nPortIndex = !isEncoder ? 0 : 1;
- for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
- err = omx->getParameter(
- node, OMX_IndexParamVideoProfileLevelQuerySupported,
- &param, sizeof(param));
+ for (param.nProfileIndex = 0;; ++param.nProfileIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoProfileLevelQuerySupported,
+ &param, sizeof(param));
- if (err != OK) {
- break;
- }
+ if (err != OK) {
+ break;
+ }
- CodecProfileLevel profileLevel;
- profileLevel.mProfile = param.eProfile;
- profileLevel.mLevel = param.eLevel;
+ CodecProfileLevel profileLevel;
+ profileLevel.mProfile = param.eProfile;
+ profileLevel.mLevel = param.eLevel;
- caps->mProfileLevels.push(profileLevel);
- }
+ caps->mProfileLevels.push(profileLevel);
+ }
- // Color format query
- OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
- InitOMXParams(&portFormat);
- portFormat.nPortIndex = !isEncoder ? 1 : 0;
- for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
- err = omx->getParameter(
- node, OMX_IndexParamVideoPortFormat,
- &portFormat, sizeof(portFormat));
- if (err != OK) {
- break;
+ // Color format query
+ // return colors in the order reported by the OMX component
+ // prefix "flexible" standard ones with the flexible equivalent
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = !isEncoder ? 1 : 0;
+ for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat));
+ if (err != OK) {
+ break;
+ }
+
+ OMX_U32 flexibleEquivalent;
+ if (ACodec::isFlexibleColorFormat(
+ omx, node, portFormat.eColorFormat, false /* usingNativeWindow */,
+ &flexibleEquivalent)) {
+ bool marked = false;
+ for (size_t i = 0; i < caps->mColorFormats.size(); i++) {
+ if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
+ marked = true;
+ break;
+ }
+ }
+ if (!marked) {
+ caps->mColorFormats.push(flexibleEquivalent);
+ }
+ }
+ caps->mColorFormats.push(portFormat.eColorFormat);
}
- caps->mColorFormats.push(portFormat.eColorFormat);
}
- if (!isEncoder && !strncmp(mime, "video/", 6)) {
+ if (isVideo && !isEncoder) {
if (omx->storeMetaDataInBuffers(
node, 1 /* port index */, OMX_TRUE) == OK ||
omx->prepareForAdaptivePlayback(
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 5e79e78..6e32494 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -38,6 +38,7 @@ extern "C" {
int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb);
int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb);
int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb);
+ long vorbis_packet_blocksize(vorbis_info *vi,ogg_packet *op);
}
namespace android {
@@ -75,7 +76,7 @@ struct MyVorbisExtractor {
status_t seekToTime(int64_t timeUs);
status_t seekToOffset(off64_t offset);
- status_t readNextPacket(MediaBuffer **buffer);
+ status_t readNextPacket(MediaBuffer **buffer, bool conf);
status_t init();
@@ -84,6 +85,8 @@ struct MyVorbisExtractor {
private:
struct Page {
uint64_t mGranulePosition;
+ int32_t mPrevPacketSize;
+ uint64_t mPrevPacketPos;
uint32_t mSerialNo;
uint32_t mPageNo;
uint8_t mFlags;
@@ -121,6 +124,8 @@ private:
status_t verifyHeader(
MediaBuffer *buffer, uint8_t type);
+ int32_t packetBlockSize(MediaBuffer *buffer);
+
void parseFileMetaData();
status_t findPrevGranulePosition(off64_t pageOffset, uint64_t *granulePos);
@@ -151,7 +156,7 @@ sp<MetaData> OggSource::getFormat() {
return mExtractor->mImpl->getFormat();
}
-status_t OggSource::start(MetaData *params) {
+status_t OggSource::start(MetaData * /* params */) {
if (mStarted) {
return INVALID_OPERATION;
}
@@ -180,7 +185,7 @@ status_t OggSource::read(
}
MediaBuffer *packet;
- status_t err = mExtractor->mImpl->readNextPacket(&packet);
+ status_t err = mExtractor->mImpl->readNextPacket(&packet, /* conf = */ false);
if (err != OK) {
return err;
@@ -320,25 +325,29 @@ status_t MyVorbisExtractor::seekToTime(int64_t timeUs) {
}
size_t left = 0;
- size_t right = mTableOfContents.size();
- while (left < right) {
- size_t center = left / 2 + right / 2 + (left & right & 1);
+ size_t right_plus_one = mTableOfContents.size();
+ while (left < right_plus_one) {
+ size_t center = left + (right_plus_one - left) / 2;
const TOCEntry &entry = mTableOfContents.itemAt(center);
if (timeUs < entry.mTimeUs) {
- right = center;
+ right_plus_one = center;
} else if (timeUs > entry.mTimeUs) {
left = center + 1;
} else {
- left = right = center;
+ left = center;
break;
}
}
+ if (left == mTableOfContents.size()) {
+ --left;
+ }
+
const TOCEntry &entry = mTableOfContents.itemAt(left);
- ALOGV("seeking to entry %d / %d at offset %lld",
+ ALOGV("seeking to entry %zu / %zu at offset %lld",
left, mTableOfContents.size(), entry.mPageOffset);
return seekToOffset(entry.mPageOffset);
@@ -369,6 +378,7 @@ status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
mFirstPacketInPage = true;
mCurrentPageSamples = 0;
mCurrentPage.mNumSegments = 0;
+ mCurrentPage.mPrevPacketSize = -1;
mNextLaceIndex = 0;
// XXX what if new page continues packet from last???
@@ -381,7 +391,7 @@ ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
ssize_t n;
if ((n = mSource->readAt(offset, header, sizeof(header)))
< (ssize_t)sizeof(header)) {
- ALOGV("failed to read %d bytes at offset 0x%016llx, got %ld bytes",
+ ALOGV("failed to read %zu bytes at offset 0x%016llx, got %zd bytes",
sizeof(header), offset, n);
if (n < 0) {
@@ -447,7 +457,7 @@ ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
return sizeof(header) + page->mNumSegments + totalSize;
}
-status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
+status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) {
*out = NULL;
MediaBuffer *buffer = NULL;
@@ -485,16 +495,6 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
tmp->set_range(0, buffer->range_length());
buffer->release();
} else {
- // XXX Not only is this not technically the correct time for
- // this packet, we also stamp every packet in this page
- // with the same time. This needs fixing later.
-
- if (mVi.rate) {
- // Rate may not have been initialized yet if we're currently
- // reading the configuration packets...
- // Fortunately, the timestamp doesn't matter for those.
- timeUs = mCurrentPage.mGranulePosition * 1000000ll / mVi.rate;
- }
tmp->set_range(0, 0);
}
buffer = tmp;
@@ -505,7 +505,7 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
packetSize);
if (n < (ssize_t)packetSize) {
- ALOGV("failed to read %d bytes at 0x%016llx, got %ld bytes",
+ ALOGV("failed to read %zu bytes at 0x%016llx, got %zd bytes",
packetSize, dataOffset, n);
return ERROR_IO;
}
@@ -517,16 +517,32 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
if (gotFullPacket) {
// We've just read the entire packet.
- if (timeUs >= 0) {
- buffer->meta_data()->setInt64(kKeyTime, timeUs);
- }
-
if (mFirstPacketInPage) {
buffer->meta_data()->setInt32(
kKeyValidSamples, mCurrentPageSamples);
mFirstPacketInPage = false;
}
+ // ignore timestamp for configuration packets
+ if (!conf) {
+ int32_t curBlockSize = packetBlockSize(buffer);
+ if (mCurrentPage.mPrevPacketSize < 0) {
+ mCurrentPage.mPrevPacketSize = curBlockSize;
+ mCurrentPage.mPrevPacketPos =
+ mCurrentPage.mGranulePosition - mCurrentPageSamples;
+ timeUs = mCurrentPage.mPrevPacketPos * 1000000ll / mVi.rate;
+ } else {
+ // The effective block size is the average of the two overlapped blocks
+ int32_t actualBlockSize =
+ (curBlockSize + mCurrentPage.mPrevPacketSize) / 2;
+ timeUs = mCurrentPage.mPrevPacketPos * 1000000ll / mVi.rate;
+ // The actual size output by the decoder will be half the effective
+ // size, due to the overlap
+ mCurrentPage.mPrevPacketPos += actualBlockSize / 2;
+ mCurrentPage.mPrevPacketSize = curBlockSize;
+ }
+ buffer->meta_data()->setInt64(kKeyTime, timeUs);
+ }
*out = buffer;
return OK;
@@ -546,7 +562,7 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
buffer = NULL;
}
- ALOGV("readPage returned %ld", n);
+ ALOGV("readPage returned %zd", n);
return n < 0 ? n : (status_t)ERROR_END_OF_STREAM;
}
@@ -587,10 +603,10 @@ status_t MyVorbisExtractor::init() {
MediaBuffer *packet;
status_t err;
- if ((err = readNextPacket(&packet)) != OK) {
+ if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
return err;
}
- ALOGV("read packet of size %d\n", packet->range_length());
+ ALOGV("read packet of size %zu\n", packet->range_length());
err = verifyHeader(packet, 1);
packet->release();
packet = NULL;
@@ -598,10 +614,10 @@ status_t MyVorbisExtractor::init() {
return err;
}
- if ((err = readNextPacket(&packet)) != OK) {
+ if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
return err;
}
- ALOGV("read packet of size %d\n", packet->range_length());
+ ALOGV("read packet of size %zu\n", packet->range_length());
err = verifyHeader(packet, 3);
packet->release();
packet = NULL;
@@ -609,10 +625,10 @@ status_t MyVorbisExtractor::init() {
return err;
}
- if ((err = readNextPacket(&packet)) != OK) {
+ if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) {
return err;
}
- ALOGV("read packet of size %d\n", packet->range_length());
+ ALOGV("read packet of size %zu\n", packet->range_length());
err = verifyHeader(packet, 5);
packet->release();
packet = NULL;
@@ -682,6 +698,35 @@ void MyVorbisExtractor::buildTableOfContents() {
}
}
+int32_t MyVorbisExtractor::packetBlockSize(MediaBuffer *buffer) {
+ const uint8_t *data =
+ (const uint8_t *)buffer->data() + buffer->range_offset();
+
+ size_t size = buffer->range_length();
+
+ ogg_buffer buf;
+ buf.data = (uint8_t *)data;
+ buf.size = size;
+ buf.refcount = 1;
+ buf.ptr.owner = NULL;
+
+ ogg_reference ref;
+ ref.buffer = &buf;
+ ref.begin = 0;
+ ref.length = size;
+ ref.next = NULL;
+
+ ogg_packet pack;
+ pack.packet = &ref;
+ pack.bytes = ref.length;
+ pack.b_o_s = 0;
+ pack.e_o_s = 0;
+ pack.granulepos = 0;
+ pack.packetno = 0;
+
+ return vorbis_packet_blocksize(&mVi, &pack);
+}
+
status_t MyVorbisExtractor::verifyHeader(
MediaBuffer *buffer, uint8_t type) {
const uint8_t *data =
@@ -726,6 +771,10 @@ status_t MyVorbisExtractor::verifyHeader(
ALOGV("upper-bitrate = %ld", mVi.bitrate_upper);
ALOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
ALOGV("window-bitrate = %ld", mVi.bitrate_window);
+ ALOGV("blocksizes: %d/%d",
+ vorbis_info_blocksize(&mVi, 0),
+ vorbis_info_blocksize(&mVi, 1)
+ );
off64_t size;
if (mSource->getSize(&size) == OK) {
@@ -800,6 +849,7 @@ void parseVorbisComment(
{ "TRACKNUMBER", kKeyCDTrackNumber },
{ "DISCNUMBER", kKeyDiscNumber },
{ "DATE", kKeyDate },
+ { "YEAR", kKeyYear },
{ "LYRICIST", kKeyWriter },
{ "METADATA_BLOCK_PICTURE", kKeyAlbumArt },
{ "ANDROID_LOOP", kKeyAutoLoop },
@@ -903,7 +953,7 @@ static void extractAlbumArt(
return;
}
- ALOGV("got flac of size %d", flacSize);
+ ALOGV("got flac of size %zu", flacSize);
uint32_t picType;
uint32_t typeLen;
@@ -953,7 +1003,7 @@ static void extractAlbumArt(
goto exit;
}
- ALOGV("got image data, %d trailing bytes",
+ ALOGV("got image data, %zu trailing bytes",
flacSize - 32 - typeLen - descLen - dataLen);
fileMeta->setData(
@@ -998,7 +1048,7 @@ sp<MediaSource> OggExtractor::getTrack(size_t index) {
}
sp<MetaData> OggExtractor::getTrackMetaData(
- size_t index, uint32_t flags) {
+ size_t index, uint32_t /* flags */) {
if (index >= 1) {
return NULL;
}
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index eae721b..2748349 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -133,7 +133,8 @@ status_t SampleIterator::seekTo(uint32_t sampleIndex) {
}
status_t err;
- if ((err = findSampleTime(sampleIndex, &mCurrentSampleTime)) != OK) {
+ if ((err = findSampleTimeAndDuration(
+ sampleIndex, &mCurrentSampleTime, &mCurrentSampleDuration)) != OK) {
ALOGE("findSampleTime return error");
return err;
}
@@ -285,8 +286,8 @@ status_t SampleIterator::getSampleSizeDirect(
return OK;
}
-status_t SampleIterator::findSampleTime(
- uint32_t sampleIndex, uint32_t *time) {
+status_t SampleIterator::findSampleTimeAndDuration(
+ uint32_t sampleIndex, uint32_t *time, uint32_t *duration) {
if (sampleIndex >= mTable->mNumSampleSizes) {
return ERROR_OUT_OF_RANGE;
}
@@ -309,6 +310,8 @@ status_t SampleIterator::findSampleTime(
*time += mTable->getCompositionTimeOffset(sampleIndex);
+ *duration = mTTSDuration;
+
return OK;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index d9858d7..bdd6d56 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -330,6 +330,10 @@ status_t SampleTable::setTimeToSampleParams(
}
mTimeToSampleCount = U32_AT(&header[4]);
+ uint64_t allocSize = mTimeToSampleCount * 2 * sizeof(uint32_t);
+ if (allocSize > SIZE_MAX) {
+ return ERROR_OUT_OF_RANGE;
+ }
mTimeToSample = new uint32_t[mTimeToSampleCount * 2];
size_t size = sizeof(uint32_t) * mTimeToSampleCount * 2;
@@ -372,6 +376,11 @@ status_t SampleTable::setCompositionTimeToSampleParams(
}
mNumCompositionTimeDeltaEntries = numEntries;
+ uint64_t allocSize = numEntries * 2 * sizeof(uint32_t);
+ if (allocSize > SIZE_MAX) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
mCompositionTimeDeltaEntries = new uint32_t[2 * numEntries];
if (mDataSource->readAt(
@@ -417,6 +426,11 @@ status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size)
ALOGV("Table of sync samples is empty or has only a single entry!");
}
+ uint64_t allocSize = mNumSyncSamples * sizeof(uint32_t);
+ if (allocSize > SIZE_MAX) {
+ return ERROR_OUT_OF_RANGE;
+ }
+
mSyncSamples = new uint32_t[mNumSyncSamples];
size_t size = mNumSyncSamples * sizeof(uint32_t);
if (mDataSource->readAt(mSyncSampleOffset + 8, mSyncSamples, size)
@@ -520,83 +534,72 @@ void SampleTable::buildSampleEntriesTable() {
}
status_t SampleTable::findSampleAtTime(
- uint32_t req_time, uint32_t *sample_index, uint32_t flags) {
+ uint64_t req_time, uint64_t scale_num, uint64_t scale_den,
+ uint32_t *sample_index, uint32_t flags) {
buildSampleEntriesTable();
uint32_t left = 0;
- uint32_t right = mNumSampleSizes;
- while (left < right) {
- uint32_t center = (left + right) / 2;
- uint32_t centerTime = mSampleTimeEntries[center].mCompositionTime;
+ uint32_t right_plus_one = mNumSampleSizes;
+ while (left < right_plus_one) {
+ uint32_t center = left + (right_plus_one - left) / 2;
+ uint64_t centerTime =
+ getSampleTime(center, scale_num, scale_den);
if (req_time < centerTime) {
- right = center;
+ right_plus_one = center;
} else if (req_time > centerTime) {
left = center + 1;
} else {
- left = center;
- break;
+ *sample_index = mSampleTimeEntries[center].mSampleIndex;
+ return OK;
}
}
- if (left == mNumSampleSizes) {
+ uint32_t closestIndex = left;
+
+ if (closestIndex == mNumSampleSizes) {
if (flags == kFlagAfter) {
return ERROR_OUT_OF_RANGE;
}
-
- --left;
+ flags = kFlagBefore;
+ } else if (closestIndex == 0) {
+ if (flags == kFlagBefore) {
+ // normally we should return out of range, but that is
+ // treated as end-of-stream. instead return first sample
+ //
+ // return ERROR_OUT_OF_RANGE;
+ }
+ flags = kFlagAfter;
}
- uint32_t closestIndex = left;
-
switch (flags) {
case kFlagBefore:
{
- while (closestIndex > 0
- && mSampleTimeEntries[closestIndex].mCompositionTime
- > req_time) {
- --closestIndex;
- }
+ --closestIndex;
break;
}
case kFlagAfter:
{
- while (closestIndex + 1 < mNumSampleSizes
- && mSampleTimeEntries[closestIndex].mCompositionTime
- < req_time) {
- ++closestIndex;
- }
+ // nothing to do
break;
}
default:
{
CHECK(flags == kFlagClosest);
-
- if (closestIndex > 0) {
- // Check left neighbour and pick closest.
- uint32_t absdiff1 =
- abs_difference(
- mSampleTimeEntries[closestIndex].mCompositionTime,
- req_time);
-
- uint32_t absdiff2 =
- abs_difference(
- mSampleTimeEntries[closestIndex - 1].mCompositionTime,
- req_time);
-
- if (absdiff1 > absdiff2) {
- closestIndex = closestIndex - 1;
- }
+ // pick closest based on timestamp. use abs_difference for safety
+ if (abs_difference(
+ getSampleTime(closestIndex, scale_num, scale_den), req_time) >
+ abs_difference(
+ req_time, getSampleTime(closestIndex - 1, scale_num, scale_den))) {
+ --closestIndex;
}
-
break;
}
}
*sample_index = mSampleTimeEntries[closestIndex].mSampleIndex;
-
return OK;
}
@@ -618,109 +621,85 @@ status_t SampleTable::findSyncSampleNear(
}
uint32_t left = 0;
- uint32_t right = mNumSyncSamples;
- while (left < right) {
- uint32_t center = left + (right - left) / 2;
+ uint32_t right_plus_one = mNumSyncSamples;
+ while (left < right_plus_one) {
+ uint32_t center = left + (right_plus_one - left) / 2;
uint32_t x = mSyncSamples[center];
if (start_sample_index < x) {
- right = center;
+ right_plus_one = center;
} else if (start_sample_index > x) {
left = center + 1;
} else {
- left = center;
- break;
+ *sample_index = x;
+ return OK;
}
}
+
if (left == mNumSyncSamples) {
if (flags == kFlagAfter) {
ALOGE("tried to find a sync frame after the last one: %d", left);
return ERROR_OUT_OF_RANGE;
}
- left = left - 1;
+ flags = kFlagBefore;
}
+ else if (left == 0) {
+ if (flags == kFlagBefore) {
+ ALOGE("tried to find a sync frame before the first one: %d", left);
- // Now ssi[left] is the sync sample index just before (or at)
- // start_sample_index.
- // Also start_sample_index < ssi[left + 1], if left + 1 < mNumSyncSamples.
-
- uint32_t x = mSyncSamples[left];
-
- if (left + 1 < mNumSyncSamples) {
- uint32_t y = mSyncSamples[left + 1];
-
- // our sample lies between sync samples x and y.
-
- status_t err = mSampleIterator->seekTo(start_sample_index);
- if (err != OK) {
- return err;
- }
-
- uint32_t sample_time = mSampleIterator->getSampleTime();
-
- err = mSampleIterator->seekTo(x);
- if (err != OK) {
- return err;
- }
- uint32_t x_time = mSampleIterator->getSampleTime();
-
- err = mSampleIterator->seekTo(y);
- if (err != OK) {
- return err;
- }
-
- uint32_t y_time = mSampleIterator->getSampleTime();
-
- if (abs_difference(x_time, sample_time)
- > abs_difference(y_time, sample_time)) {
- // Pick the sync sample closest (timewise) to the start-sample.
- x = y;
- ++left;
+ // normally we should return out of range, but that is
+ // treated as end-of-stream. instead seek to first sync
+ //
+ // return ERROR_OUT_OF_RANGE;
}
+ flags = kFlagAfter;
}
+ // Now ssi[left - 1] <(=) start_sample_index <= ssi[left]
switch (flags) {
case kFlagBefore:
{
- if (x > start_sample_index) {
- CHECK(left > 0);
-
- x = mSyncSamples[left - 1];
-
- if (x > start_sample_index) {
- // The table of sync sample indices was not sorted
- // properly.
- return ERROR_MALFORMED;
- }
- }
+ --left;
break;
}
-
case kFlagAfter:
{
- if (x < start_sample_index) {
- if (left + 1 >= mNumSyncSamples) {
- return ERROR_OUT_OF_RANGE;
- }
-
- x = mSyncSamples[left + 1];
-
- if (x < start_sample_index) {
- // The table of sync sample indices was not sorted
- // properly.
- return ERROR_MALFORMED;
- }
- }
-
+ // nothing to do
break;
}
-
default:
+ {
+ // this route is not used, but implement it nonetheless
+ CHECK(flags == kFlagClosest);
+
+ status_t err = mSampleIterator->seekTo(start_sample_index);
+ if (err != OK) {
+ return err;
+ }
+ uint32_t sample_time = mSampleIterator->getSampleTime();
+
+ err = mSampleIterator->seekTo(mSyncSamples[left]);
+ if (err != OK) {
+ return err;
+ }
+ uint32_t upper_time = mSampleIterator->getSampleTime();
+
+ err = mSampleIterator->seekTo(mSyncSamples[left - 1]);
+ if (err != OK) {
+ return err;
+ }
+ uint32_t lower_time = mSampleIterator->getSampleTime();
+
+ // use abs_difference for safety
+ if (abs_difference(upper_time, sample_time) >
+ abs_difference(sample_time, lower_time)) {
+ --left;
+ }
break;
+ }
}
- *sample_index = x;
-
+ *sample_index = mSyncSamples[left];
return OK;
}
@@ -778,7 +757,8 @@ status_t SampleTable::getMetaDataForSample(
off64_t *offset,
size_t *size,
uint32_t *compositionTime,
- bool *isSyncSample) {
+ bool *isSyncSample,
+ uint32_t *sampleDuration) {
Mutex::Autolock autoLock(mLock);
status_t err;
@@ -820,6 +800,10 @@ status_t SampleTable::getMetaDataForSample(
}
}
+ if (sampleDuration) {
+ *sampleDuration = mSampleIterator->getSampleDuration();
+ }
+
return OK;
}
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index 773854f..e2e6d79 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -25,7 +25,7 @@
namespace android {
SkipCutBuffer::SkipCutBuffer(int32_t skip, int32_t cut) {
- mFrontPadding = skip;
+ mFrontPadding = mSkip = skip;
mBackPadding = cut;
mWriteHead = 0;
mReadHead = 0;
@@ -94,6 +94,7 @@ void SkipCutBuffer::submit(const sp<ABuffer>& buffer) {
void SkipCutBuffer::clear() {
mWriteHead = mReadHead = 0;
+ mFrontPadding = mSkip;
}
void SkipCutBuffer::write(const char *src, size_t num) {
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index af8186c..db33e83 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -24,12 +24,10 @@
#include <media/stagefright/StagefrightMediaScanner.h>
+#include <media/IMediaHTTPService.h>
#include <media/mediametadataretriever.h>
#include <private/media/VideoFrame.h>
-// Sonivox includes
-#include <libsonivox/eas.h>
-
namespace android {
StagefrightMediaScanner::StagefrightMediaScanner() {}
@@ -56,54 +54,6 @@ static bool FileHasAcceptableExtension(const char *extension) {
return false;
}
-static MediaScanResult HandleMIDI(
- const char *filename, MediaScannerClient *client) {
- // get the library configuration and do sanity check
- const S_EAS_LIB_CONFIG* pLibConfig = EAS_Config();
- if ((pLibConfig == NULL) || (LIB_VERSION != pLibConfig->libVersion)) {
- ALOGE("EAS library/header mismatch\n");
- return MEDIA_SCAN_RESULT_ERROR;
- }
- EAS_I32 temp;
-
- // spin up a new EAS engine
- EAS_DATA_HANDLE easData = NULL;
- EAS_HANDLE easHandle = NULL;
- EAS_RESULT result = EAS_Init(&easData);
- if (result == EAS_SUCCESS) {
- EAS_FILE file;
- file.path = filename;
- file.fd = 0;
- file.offset = 0;
- file.length = 0;
- result = EAS_OpenFile(easData, &file, &easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_Prepare(easData, easHandle);
- }
- if (result == EAS_SUCCESS) {
- result = EAS_ParseMetaData(easData, easHandle, &temp);
- }
- if (easHandle) {
- EAS_CloseFile(easData, easHandle);
- }
- if (easData) {
- EAS_Shutdown(easData);
- }
-
- if (result != EAS_SUCCESS) {
- return MEDIA_SCAN_RESULT_SKIPPED;
- }
-
- char buffer[20];
- sprintf(buffer, "%ld", temp);
- status_t status = client->addStringTag("duration", buffer);
- if (status != OK) {
- return MEDIA_SCAN_RESULT_ERROR;
- }
- return MEDIA_SCAN_RESULT_OK;
-}
-
MediaScanResult StagefrightMediaScanner::processFile(
const char *path, const char *mimeType,
MediaScannerClient &client) {
@@ -117,7 +67,7 @@ MediaScanResult StagefrightMediaScanner::processFile(
}
MediaScanResult StagefrightMediaScanner::processFileInternal(
- const char *path, const char *mimeType,
+ const char *path, const char * /* mimeType */,
MediaScannerClient &client) {
const char *extension = strrchr(path, '.');
@@ -129,25 +79,13 @@ MediaScanResult StagefrightMediaScanner::processFileInternal(
return MEDIA_SCAN_RESULT_SKIPPED;
}
- if (!strcasecmp(extension, ".mid")
- || !strcasecmp(extension, ".smf")
- || !strcasecmp(extension, ".imy")
- || !strcasecmp(extension, ".midi")
- || !strcasecmp(extension, ".xmf")
- || !strcasecmp(extension, ".rtttl")
- || !strcasecmp(extension, ".rtx")
- || !strcasecmp(extension, ".ota")
- || !strcasecmp(extension, ".mxmf")) {
- return HandleMIDI(path, &client);
- }
-
sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever);
int fd = open(path, O_RDONLY | O_LARGEFILE);
status_t status;
if (fd < 0) {
// couldn't open it locally, maybe the media server can?
- status = mRetriever->setDataSource(path);
+ status = mRetriever->setDataSource(NULL /* httpService */, path);
} else {
status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL);
close(fd);
@@ -202,7 +140,7 @@ MediaScanResult StagefrightMediaScanner::processFileInternal(
return MEDIA_SCAN_RESULT_OK;
}
-char *StagefrightMediaScanner::extractAlbumArt(int fd) {
+MediaAlbumArt *StagefrightMediaScanner::extractAlbumArt(int fd) {
ALOGV("extractAlbumArt %d", fd);
off64_t size = lseek64(fd, 0, SEEK_END);
@@ -214,15 +152,9 @@ char *StagefrightMediaScanner::extractAlbumArt(int fd) {
sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever);
if (mRetriever->setDataSource(fd, 0, size) == OK) {
sp<IMemory> mem = mRetriever->extractAlbumArt();
-
if (mem != NULL) {
MediaAlbumArt *art = static_cast<MediaAlbumArt *>(mem->pointer());
-
- char *data = (char *)malloc(art->mSize + 4);
- *(int32_t *)data = art->mSize;
- memcpy(&data[4], &art[1], art->mSize);
-
- return data;
+ return art->clone();
}
}
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index fcd9a85..101fc8a 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -16,11 +16,14 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "StagefrightMetadataRetriever"
+
#include <inttypes.h>
+
#include <utils/Log.h>
#include "include/StagefrightMetadataRetriever.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/DataSource.h>
@@ -29,6 +32,7 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
#include <media/stagefright/MediaDefs.h>
+#include <CharacterEncodingDetector.h>
namespace android {
@@ -51,7 +55,9 @@ StagefrightMetadataRetriever::~StagefrightMetadataRetriever() {
}
status_t StagefrightMetadataRetriever::setDataSource(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
ALOGV("setDataSource(%s)", uri);
mParsedMetaData = false;
@@ -59,7 +65,7 @@ status_t StagefrightMetadataRetriever::setDataSource(
delete mAlbumArt;
mAlbumArt = NULL;
- mSource = DataSource::CreateFromURI(uri, headers);
+ mSource = DataSource::CreateFromURI(httpService, uri, headers);
if (mSource == NULL) {
ALOGE("Unable to create data source for '%s'.", uri);
@@ -84,7 +90,7 @@ status_t StagefrightMetadataRetriever::setDataSource(
int fd, int64_t offset, int64_t length) {
fd = dup(fd);
- ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
mParsedMetaData = false;
mMetaData.clear();
@@ -239,7 +245,7 @@ static VideoFrame *extractVideoFrameWithCodecFlags(
const char *mime;
CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
- ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s",
+ ALOGV("thumbNailTime = %" PRId64 " us, timeUs = %" PRId64 " us, mime = %s",
thumbNailTime, timeUs, mime);
}
}
@@ -322,7 +328,7 @@ static VideoFrame *extractVideoFrameWithCodecFlags(
VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
int64_t timeUs, int option) {
- ALOGV("getFrameAtTime: %lld us option: %d", timeUs, option);
+ ALOGV("getFrameAtTime: %" PRId64 " us option: %d", timeUs, option);
if (mExtractor.get() == NULL) {
ALOGV("no extractor.");
@@ -375,10 +381,7 @@ VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(
size_t dataSize;
if (fileMeta->findData(kKeyAlbumArt, &type, &data, &dataSize)
&& mAlbumArt == NULL) {
- mAlbumArt = new MediaAlbumArt;
- mAlbumArt->mSize = dataSize;
- mAlbumArt->mData = new uint8_t[dataSize];
- memcpy(mAlbumArt->mData, data, dataSize);
+ mAlbumArt = MediaAlbumArt::fromData(dataSize, data);
}
VideoFrame *frame =
@@ -411,7 +414,7 @@ MediaAlbumArt *StagefrightMetadataRetriever::extractAlbumArt() {
}
if (mAlbumArt) {
- return new MediaAlbumArt(*mAlbumArt);
+ return mAlbumArt->clone();
}
return NULL;
@@ -448,42 +451,65 @@ void StagefrightMetadataRetriever::parseMetaData() {
struct Map {
int from;
int to;
+ const char *name;
};
static const Map kMap[] = {
- { kKeyMIMEType, METADATA_KEY_MIMETYPE },
- { kKeyCDTrackNumber, METADATA_KEY_CD_TRACK_NUMBER },
- { kKeyDiscNumber, METADATA_KEY_DISC_NUMBER },
- { kKeyAlbum, METADATA_KEY_ALBUM },
- { kKeyArtist, METADATA_KEY_ARTIST },
- { kKeyAlbumArtist, METADATA_KEY_ALBUMARTIST },
- { kKeyAuthor, METADATA_KEY_AUTHOR },
- { kKeyComposer, METADATA_KEY_COMPOSER },
- { kKeyDate, METADATA_KEY_DATE },
- { kKeyGenre, METADATA_KEY_GENRE },
- { kKeyTitle, METADATA_KEY_TITLE },
- { kKeyYear, METADATA_KEY_YEAR },
- { kKeyWriter, METADATA_KEY_WRITER },
- { kKeyCompilation, METADATA_KEY_COMPILATION },
- { kKeyLocation, METADATA_KEY_LOCATION },
+ { kKeyMIMEType, METADATA_KEY_MIMETYPE, NULL },
+ { kKeyCDTrackNumber, METADATA_KEY_CD_TRACK_NUMBER, "tracknumber" },
+ { kKeyDiscNumber, METADATA_KEY_DISC_NUMBER, "discnumber" },
+ { kKeyAlbum, METADATA_KEY_ALBUM, "album" },
+ { kKeyArtist, METADATA_KEY_ARTIST, "artist" },
+ { kKeyAlbumArtist, METADATA_KEY_ALBUMARTIST, "albumartist" },
+ { kKeyAuthor, METADATA_KEY_AUTHOR, NULL },
+ { kKeyComposer, METADATA_KEY_COMPOSER, "composer" },
+ { kKeyDate, METADATA_KEY_DATE, NULL },
+ { kKeyGenre, METADATA_KEY_GENRE, "genre" },
+ { kKeyTitle, METADATA_KEY_TITLE, "title" },
+ { kKeyYear, METADATA_KEY_YEAR, "year" },
+ { kKeyWriter, METADATA_KEY_WRITER, "writer" },
+ { kKeyCompilation, METADATA_KEY_COMPILATION, "compilation" },
+ { kKeyLocation, METADATA_KEY_LOCATION, NULL },
};
+
static const size_t kNumMapEntries = sizeof(kMap) / sizeof(kMap[0]);
+ CharacterEncodingDetector *detector = new CharacterEncodingDetector();
+
for (size_t i = 0; i < kNumMapEntries; ++i) {
const char *value;
if (meta->findCString(kMap[i].from, &value)) {
- mMetaData.add(kMap[i].to, String8(value));
+ if (kMap[i].name) {
+ // add to charset detector
+ detector->addTag(kMap[i].name, value);
+ } else {
+ // directly add to output list
+ mMetaData.add(kMap[i].to, String8(value));
+ }
+ }
+ }
+
+ detector->detectAndConvert();
+ int size = detector->size();
+ if (size) {
+ for (int i = 0; i < size; i++) {
+ const char *name;
+ const char *value;
+ detector->getTag(i, &name, &value);
+ for (size_t j = 0; j < kNumMapEntries; ++j) {
+ if (kMap[j].name && !strcmp(kMap[j].name, name)) {
+ mMetaData.add(kMap[j].to, String8(value));
+ }
+ }
}
}
+ delete detector;
const void *data;
uint32_t type;
size_t dataSize;
if (meta->findData(kKeyAlbumArt, &type, &data, &dataSize)
&& mAlbumArt == NULL) {
- mAlbumArt = new MediaAlbumArt;
- mAlbumArt->mSize = dataSize;
- mAlbumArt->mData = new uint8_t[dataSize];
- memcpy(mAlbumArt->mData, data, dataSize);
+ mAlbumArt = MediaAlbumArt::fromData(dataSize, data);
}
size_t numTracks = mExtractor->countTracks();
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 6b934d4..e8abf48 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -16,6 +16,8 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SurfaceMediaSource"
+#include <inttypes.h>
+
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/SurfaceMediaSource.h>
#include <media/stagefright/MediaDefs.h>
@@ -24,6 +26,7 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
+#include <gui/BufferItem.h>
#include <gui/ISurfaceComposer.h>
#include <gui/IGraphicBufferAlloc.h>
#include <OMX_Component.h>
@@ -54,9 +57,9 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig
ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
}
- mBufferQueue = new BufferQueue();
- mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
- mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
GRALLOC_USAGE_HW_TEXTURE);
sp<ISurfaceComposer> composer(ComposerService::getComposerService());
@@ -68,7 +71,7 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig
wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
- status_t err = mBufferQueue->consumerConnect(proxy, false);
+ status_t err = mConsumer->consumerConnect(proxy, false);
if (err != NO_ERROR) {
ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
strerror(-err), err);
@@ -99,13 +102,16 @@ void SurfaceMediaSource::dump(String8& result) const
dump(result, "", buffer, 1024);
}
-void SurfaceMediaSource::dump(String8& result, const char* prefix,
- char* buffer, size_t SIZE) const
+void SurfaceMediaSource::dump(
+ String8& result,
+ const char* /* prefix */,
+ char* buffer,
+ size_t /* SIZE */) const
{
Mutex::Autolock lock(mMutex);
result.append(buffer);
- mBufferQueue->dump(result, "");
+ mConsumer->dump(result, "");
}
status_t SurfaceMediaSource::setFrameRate(int32_t fps)
@@ -163,7 +169,7 @@ status_t SurfaceMediaSource::start(MetaData *params)
CHECK_GT(mMaxAcquiredBufferCount, 1);
status_t err =
- mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
+ mConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
if (err != OK) {
return err;
@@ -176,7 +182,7 @@ status_t SurfaceMediaSource::start(MetaData *params)
}
status_t SurfaceMediaSource::setMaxAcquiredBufferCount(size_t count) {
- ALOGV("setMaxAcquiredBufferCount(%d)", count);
+ ALOGV("setMaxAcquiredBufferCount(%zu)", count);
Mutex::Autolock lock(mMutex);
CHECK_GT(count, 1);
@@ -202,8 +208,11 @@ status_t SurfaceMediaSource::stop()
return OK;
}
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
while (mNumPendingBuffers > 0) {
- ALOGI("Still waiting for %d buffers to be returned.",
+ ALOGI("Still waiting for %zu buffers to be returned.",
mNumPendingBuffers);
#if DEBUG_PENDING_BUFFERS
@@ -215,11 +224,9 @@ status_t SurfaceMediaSource::stop()
mMediaBuffersAvailableCondition.wait(mMutex);
}
- mStarted = false;
- mFrameAvailableCondition.signal();
mMediaBuffersAvailableCondition.signal();
- return mBufferQueue->consumerDisconnect();
+ return mConsumer->consumerDisconnect();
}
sp<MetaData> SurfaceMediaSource::getFormat()
@@ -265,13 +272,12 @@ static void passMetadataBuffer(MediaBuffer **buffer,
memcpy(data, &type, 4);
memcpy(data + 4, &bufferHandle, sizeof(buffer_handle_t));
- ALOGV("handle = %p, , offset = %d, length = %d",
+ ALOGV("handle = %p, , offset = %zu, length = %zu",
bufferHandle, (*buffer)->range_length(), (*buffer)->range_offset());
}
-status_t SurfaceMediaSource::read( MediaBuffer **buffer,
- const ReadOptions *options)
-{
+status_t SurfaceMediaSource::read(
+ MediaBuffer **buffer, const ReadOptions * /* options */) {
ALOGV("read");
Mutex::Autolock lock(mMutex);
@@ -285,12 +291,12 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer,
// TODO: mCurrentSlot can be made a bufferstate since there
// can be more than one "current" slots.
- BufferQueue::BufferItem item;
+ BufferItem item;
// If the recording has started and the queue is empty, then just
// wait here till the frames come in from the client side
while (mStarted) {
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// wait for a buffer to be queued
mFrameAvailableCondition.wait(mMutex);
@@ -313,7 +319,7 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer,
if (mStartTimeNs > 0) {
if (item.mTimestamp < mStartTimeNs) {
// This frame predates start of record, discard
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY,
EGL_NO_SYNC_KHR, Fence::NO_FENCE);
continue;
@@ -360,7 +366,7 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer,
(*buffer)->setObserver(this);
(*buffer)->add_ref();
(*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp / 1000);
- ALOGV("Frames encoded = %d, timestamp = %lld, time diff = %lld",
+ ALOGV("Frames encoded = %d, timestamp = %" PRId64 ", time diff = %" PRId64,
mNumFramesEncoded, mCurrentTimestamp / 1000,
mCurrentTimestamp / 1000 - prevTimeStamp / 1000);
@@ -413,7 +419,7 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
ALOGV("Slot %d returned, matches handle = %p", id,
mSlots[id].mGraphicBuffer->handle);
- mBufferQueue->releaseBuffer(id, mSlots[id].mFrameNumber,
+ mConsumer->releaseBuffer(id, mSlots[id].mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
Fence::NO_FENCE);
@@ -443,7 +449,7 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
}
// Part of the BufferQueue::ConsumerListener
-void SurfaceMediaSource::onFrameAvailable() {
+void SurfaceMediaSource::onFrameAvailable(const BufferItem& /* item */) {
ALOGV("onFrameAvailable");
sp<FrameAvailableListener> listener;
@@ -474,4 +480,8 @@ void SurfaceMediaSource::onBuffersReleased() {
}
}
+void SurfaceMediaSource::onSidebandStreamChanged() {
+ ALOG_ASSERT(false, "SurfaceMediaSource can't consume sideband streams");
+}
+
} // end of namespace android
diff --git a/media/libstagefright/TimedEventQueue.cpp b/media/libstagefright/TimedEventQueue.cpp
index 0afac69..7d15220 100644
--- a/media/libstagefright/TimedEventQueue.cpp
+++ b/media/libstagefright/TimedEventQueue.cpp
@@ -17,7 +17,11 @@
#undef __STRICT_ANSI__
#define __STDINT_LIMITS
#define __STDC_LIMIT_MACROS
+
+#include <inttypes.h>
#include <stdint.h>
+#include <sys/prctl.h>
+#include <sys/time.h>
//#define LOG_NDEBUG 0
#define LOG_TAG "TimedEventQueue"
@@ -26,9 +30,6 @@
#include "include/TimedEventQueue.h"
-#include <sys/prctl.h>
-#include <sys/time.h>
-
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <binder/IServiceManager.h>
@@ -51,7 +52,7 @@ TimedEventQueue::TimedEventQueue()
TimedEventQueue::~TimedEventQueue() {
stop();
if (mPowerManager != 0) {
- sp<IBinder> binder = mPowerManager->asBinder();
+ sp<IBinder> binder = IInterface::asBinder(mPowerManager);
binder->unlinkToDeath(mDeathRecipient);
}
}
@@ -258,7 +259,7 @@ void TimedEventQueue::threadEntry() {
static int64_t kMaxTimeoutUs = 10000000ll; // 10 secs
bool timeoutCapped = false;
if (delay_us > kMaxTimeoutUs) {
- ALOGW("delay_us exceeds max timeout: %lld us", delay_us);
+ ALOGW("delay_us exceeds max timeout: %" PRId64 " us", delay_us);
// We'll never block for more than 10 secs, instead
// we will split up the full timeout into chunks of
@@ -337,7 +338,7 @@ void TimedEventQueue::acquireWakeLock_l()
status_t status = mPowerManager->acquireWakeLock(POWERMANAGER_PARTIAL_WAKE_LOCK,
binder,
String16("TimedEventQueue"),
- String16("media"));
+ String16("media")); // not oneway
IPCThreadState::self()->restoreCallingIdentity(token);
if (status == NO_ERROR) {
mWakeLockToken = binder;
@@ -362,7 +363,7 @@ void TimedEventQueue::releaseWakeLock_l(bool force)
CHECK(mWakeLockToken != 0);
if (mPowerManager != 0) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mPowerManager->releaseWakeLock(mWakeLockToken, 0);
+ mPowerManager->releaseWakeLock(mWakeLockToken, 0); // not oneway
IPCThreadState::self()->restoreCallingIdentity(token);
}
mWakeLockToken.clear();
@@ -376,8 +377,8 @@ void TimedEventQueue::clearPowerManager()
mPowerManager.clear();
}
-void TimedEventQueue::PMDeathRecipient::binderDied(const wp<IBinder>& who)
-{
+void TimedEventQueue::PMDeathRecipient::binderDied(
+ const wp<IBinder>& /* who */) {
mQueue->clearPowerManager();
}
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 216a329..b3a79a0 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -17,11 +17,13 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Utils"
#include <utils/Log.h>
+#include <ctype.h>
#include "include/ESDS.h"
#include <arpa/inet.h>
#include <cutils/properties.h>
+#include <media/openmax/OMX_Audio.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -83,6 +85,11 @@ status_t convertMetaDataToMessage(
msg->setInt64("durationUs", durationUs);
}
+ int avgBitRate;
+ if (meta->findInt32(kKeyBitRate, &avgBitRate)) {
+ msg->setInt32("bit-rate", avgBitRate);
+ }
+
int32_t isSync;
if (meta->findInt32(kKeyIsSyncFrame, &isSync) && isSync != 0) {
msg->setInt32("is-sync-frame", 1);
@@ -102,6 +109,25 @@ status_t convertMetaDataToMessage(
msg->setInt32("sar-width", sarWidth);
msg->setInt32("sar-height", sarHeight);
}
+
+ int32_t colorFormat;
+ if (meta->findInt32(kKeyColorFormat, &colorFormat)) {
+ msg->setInt32("color-format", colorFormat);
+ }
+
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
+ if (meta->findRect(kKeyCropRect,
+ &cropLeft,
+ &cropTop,
+ &cropRight,
+ &cropBottom)) {
+ msg->setRect("crop", cropLeft, cropTop, cropRight, cropBottom);
+ }
+
+ int32_t rotationDegrees;
+ if (meta->findInt32(kKeyRotation, &rotationDegrees)) {
+ msg->setInt32("rotation-degrees", rotationDegrees);
+ }
} else if (!strncasecmp("audio/", mime, 6)) {
int32_t numChannels, sampleRate;
CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
@@ -128,6 +154,11 @@ status_t convertMetaDataToMessage(
if (meta->findInt32(kKeyIsADTS, &isADTS)) {
msg->setInt32("is-adts", true);
}
+
+ int32_t aacProfile = -1;
+ if (meta->findInt32(kKeyAACAOT, &aacProfile)) {
+ msg->setInt32("aac-profile", aacProfile);
+ }
}
int32_t maxInputSize;
@@ -135,6 +166,11 @@ status_t convertMetaDataToMessage(
msg->setInt32("max-input-size", maxInputSize);
}
+ int32_t rotationDegrees;
+ if (meta->findInt32(kKeyRotation, &rotationDegrees)) {
+ msg->setInt32("rotation-degrees", rotationDegrees);
+ }
+
uint32_t type;
const void *data;
size_t size;
@@ -145,14 +181,14 @@ status_t convertMetaDataToMessage(
CHECK(size >= 7);
CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
- uint8_t profile = ptr[1];
- uint8_t level = ptr[3];
+ uint8_t profile __unused = ptr[1];
+ uint8_t level __unused = ptr[3];
// There is decodable content out there that fails the following
// assertion, let's be lenient for now...
// CHECK((ptr[4] >> 2) == 0x3f); // reserved
- size_t lengthSize = 1 + (ptr[4] & 3);
+ size_t lengthSize __unused = 1 + (ptr[4] & 3);
// commented out check below as H264_QVGA_500_NO_AUDIO.3gp
// violates it...
@@ -216,6 +252,56 @@ status_t convertMetaDataToMessage(
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-1", buffer);
+ } else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ CHECK(size >= 7);
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+ uint8_t profile __unused = ptr[1] & 31;
+ uint8_t level __unused = ptr[12];
+ ptr += 22;
+ size -= 22;
+
+
+ size_t numofArrays = (char)ptr[0];
+ ptr += 1;
+ size -= 1;
+ size_t j = 0, i = 0;
+
+ sp<ABuffer> buffer = new ABuffer(1024);
+ buffer->setRange(0, 0);
+
+ for (i = 0; i < numofArrays; i++) {
+ ptr += 1;
+ size -= 1;
+
+ //Num of nals
+ size_t numofNals = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ for (j = 0; j < numofNals; j++) {
+ CHECK(size >= 2);
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ CHECK(size >= length);
+
+ memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+ memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+ buffer->setRange(0, buffer->size() + 4 + length);
+
+ ptr += length;
+ size -= length;
+ }
+ }
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
+
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
CHECK_EQ(esds.InitCheck(), (status_t)OK);
@@ -251,6 +337,13 @@ status_t convertMetaDataToMessage(
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-1", buffer);
+ } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
+ sp<ABuffer> buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
}
*format = msg;
@@ -277,7 +370,7 @@ static size_t reassembleAVCC(const sp<ABuffer> &csd0, const sp<ABuffer> csd1, ch
// there can't be another param here, so use all the rest
i = csd0->size();
}
- ALOGV("block at %d, last was %d", i, lastparamoffset);
+ ALOGV("block at %zu, last was %d", i, lastparamoffset);
if (lastparamoffset > 0) {
int size = i - lastparamoffset;
avcc[avccidx++] = size >> 8;
@@ -308,7 +401,7 @@ static size_t reassembleAVCC(const sp<ABuffer> &csd0, const sp<ABuffer> csd1, ch
// there can't be another param here, so use all the rest
i = csd1->size();
}
- ALOGV("block at %d, last was %d", i, lastparamoffset);
+ ALOGV("block at %zu, last was %d", i, lastparamoffset);
if (lastparamoffset > 0) {
int size = i - lastparamoffset;
avcc[avccidx++] = size >> 8;
@@ -401,6 +494,25 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
meta->setInt32(kKeySARWidth, sarWidth);
meta->setInt32(kKeySARHeight, sarHeight);
}
+
+ int32_t colorFormat;
+ if (msg->findInt32("color-format", &colorFormat)) {
+ meta->setInt32(kKeyColorFormat, colorFormat);
+ }
+
+ int32_t cropLeft, cropTop, cropRight, cropBottom;
+ if (msg->findRect("crop",
+ &cropLeft,
+ &cropTop,
+ &cropRight,
+ &cropBottom)) {
+ meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
+ }
+
+ int32_t rotationDegrees;
+ if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
+ meta->setInt32(kKeyRotation, rotationDegrees);
+ }
} else if (mime.startsWith("audio/")) {
int32_t numChannels;
if (msg->findInt32("channel-count", &numChannels)) {
@@ -452,6 +564,11 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
}
}
+ int32_t timeScale;
+ if (msg->findInt32("time-scale", &timeScale)) {
+ meta->setInt32(kKeyTimeScale, timeScale);
+ }
+
// XXX TODO add whatever other keys there are
#if 0
@@ -523,6 +640,7 @@ static const struct mime_conv_t mimeLookup[] = {
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, AUDIO_FORMAT_AMR_WB },
{ MEDIA_MIMETYPE_AUDIO_AAC, AUDIO_FORMAT_AAC },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, AUDIO_FORMAT_VORBIS },
+ { MEDIA_MIMETYPE_AUDIO_OPUS, AUDIO_FORMAT_OPUS},
{ 0, AUDIO_FORMAT_INVALID }
};
@@ -540,10 +658,46 @@ const struct mime_conv_t* p = &mimeLookup[0];
return BAD_VALUE;
}
+struct aac_format_conv_t {
+ OMX_AUDIO_AACPROFILETYPE eAacProfileType;
+ audio_format_t format;
+};
+
+static const struct aac_format_conv_t profileLookup[] = {
+ { OMX_AUDIO_AACObjectMain, AUDIO_FORMAT_AAC_MAIN},
+ { OMX_AUDIO_AACObjectLC, AUDIO_FORMAT_AAC_LC},
+ { OMX_AUDIO_AACObjectSSR, AUDIO_FORMAT_AAC_SSR},
+ { OMX_AUDIO_AACObjectLTP, AUDIO_FORMAT_AAC_LTP},
+ { OMX_AUDIO_AACObjectHE, AUDIO_FORMAT_AAC_HE_V1},
+ { OMX_AUDIO_AACObjectScalable, AUDIO_FORMAT_AAC_SCALABLE},
+ { OMX_AUDIO_AACObjectERLC, AUDIO_FORMAT_AAC_ERLC},
+ { OMX_AUDIO_AACObjectLD, AUDIO_FORMAT_AAC_LD},
+ { OMX_AUDIO_AACObjectHE_PS, AUDIO_FORMAT_AAC_HE_V2},
+ { OMX_AUDIO_AACObjectELD, AUDIO_FORMAT_AAC_ELD},
+ { OMX_AUDIO_AACObjectNull, AUDIO_FORMAT_AAC},
+};
+
+void mapAACProfileToAudioFormat( audio_format_t& format, uint64_t eAacProfile)
+{
+const struct aac_format_conv_t* p = &profileLookup[0];
+ while (p->eAacProfileType != OMX_AUDIO_AACObjectNull) {
+ if (eAacProfile == p->eAacProfileType) {
+ format = p->format;
+ return;
+ }
+ ++p;
+ }
+ format = AUDIO_FORMAT_AAC;
+ return;
+}
+
bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo,
bool isStreaming, audio_stream_type_t streamType)
{
const char *mime;
+ if (meta == NULL) {
+ return false;
+ }
CHECK(meta->findCString(kKeyMIMEType, &mime));
audio_offload_info_t info = AUDIO_INFO_INITIALIZER;
@@ -562,15 +716,11 @@ bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo,
return false;
}
- // check whether it is ELD/LD content -> no offloading
- // FIXME: this should depend on audio DSP capabilities. mapMimeToAudioFormat() should use the
- // metadata to refine the AAC format and the audio HAL should only list supported profiles.
+ // Redefine aac format according to its profile
+ // Offloading depends on audio DSP capabilities.
int32_t aacaot = -1;
if (meta->findInt32(kKeyAACAOT, &aacaot)) {
- if (aacaot == 23 || aacaot == 39 ) {
- ALOGV("track of type '%s' is ELD/LD content", mime);
- return false;
- }
+ mapAACProfileToAudioFormat(info.format,(OMX_AUDIO_AACPROFILETYPE) aacaot);
}
int32_t srate = -1;
@@ -615,5 +765,40 @@ bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo,
return AudioSystem::isOffloadSupported(info);
}
+AString uriDebugString(const AString &uri, bool incognito) {
+ if (incognito) {
+ return AString("<URI suppressed>");
+ }
+
+ char prop[PROPERTY_VALUE_MAX];
+ if (property_get("media.stagefright.log-uri", prop, "false") &&
+ (!strcmp(prop, "1") || !strcmp(prop, "true"))) {
+ return uri;
+ }
+
+ // find scheme
+ AString scheme;
+ const char *chars = uri.c_str();
+ for (size_t i = 0; i < uri.size(); i++) {
+ const char c = chars[i];
+ if (!isascii(c)) {
+ break;
+ } else if (isalpha(c)) {
+ continue;
+ } else if (i == 0) {
+ // first character must be a letter
+ break;
+ } else if (isdigit(c) || c == '+' || c == '.' || c =='-') {
+ continue;
+ } else if (c != ':') {
+ break;
+ }
+ scheme = AString(uri, 0, i);
+ scheme.append("://<suppressed>");
+ return scheme;
+ }
+ return AString("<no-scheme URI suppressed>");
+}
+
} // namespace android
diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp
index a245f2c..e988f6d 100644
--- a/media/libstagefright/VBRISeeker.cpp
+++ b/media/libstagefright/VBRISeeker.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "VBRISeeker"
+
+#include <inttypes.h>
+
#include <utils/Log.h>
#include "include/VBRISeeker.h"
@@ -75,7 +78,7 @@ sp<VBRISeeker> VBRISeeker::CreateFromSource(
size_t entrySize = U16_AT(&vbriHeader[22]);
size_t scale = U16_AT(&vbriHeader[20]);
- ALOGV("%d entries, scale=%d, size_per_entry=%d",
+ ALOGV("%zu entries, scale=%zu, size_per_entry=%zu",
numEntries,
scale,
entrySize);
@@ -119,7 +122,7 @@ sp<VBRISeeker> VBRISeeker::CreateFromSource(
seeker->mSegments.push(numBytes);
- ALOGV("entry #%d: %d offset 0x%08lx", i, numBytes, offset);
+ ALOGV("entry #%zu: %u offset 0x%016llx", i, numBytes, offset);
offset += numBytes;
}
@@ -160,7 +163,7 @@ bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
*pos += mSegments.itemAt(segmentIndex++);
}
- ALOGV("getOffsetForTime %lld us => 0x%08lx", *timeUs, *pos);
+ ALOGV("getOffsetForTime %" PRId64 " us => 0x%016llx", *timeUs, *pos);
*timeUs = nowUs;
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index 22af6fb..335ac84 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -127,7 +127,7 @@ sp<MediaSource> WAVExtractor::getTrack(size_t index) {
}
sp<MetaData> WAVExtractor::getTrackMetaData(
- size_t index, uint32_t flags) {
+ size_t index, uint32_t /* flags */) {
if (mInitCheck != OK || index > 0) {
return NULL;
}
@@ -358,7 +358,7 @@ WAVSource::~WAVSource() {
}
}
-status_t WAVSource::start(MetaData *params) {
+status_t WAVSource::start(MetaData * /* params */) {
ALOGV("WAVSource::start");
CHECK(!mStarted);
@@ -414,7 +414,7 @@ status_t WAVSource::read(
} else {
pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3);
}
- if (pos > mSize) {
+ if (pos > (off64_t)mSize) {
pos = mSize;
}
mCurrentPos = pos + mOffset;
@@ -446,6 +446,10 @@ status_t WAVSource::read(
maxBytesToRead = 1024;
}
maxBytesToRead = (maxBytesToRead / 65) * 65;
+ } else {
+ // read only integral amounts of audio unit frames.
+ const size_t inputUnitFrameSize = mNumChannels * mBitsPerSample / 8;
+ maxBytesToRead -= maxBytesToRead % inputUnitFrameSize;
}
ssize_t n = mDataSource->readAt(
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index b822868..5ec3438 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -40,6 +40,25 @@ unsigned parseUE(ABitReader *br) {
return x + (1u << numZeroes) - 1;
}
+signed parseSE(ABitReader *br) {
+ unsigned codeNum = parseUE(br);
+
+ return (codeNum & 1) ? (codeNum + 1) / 2 : -(codeNum / 2);
+}
+
+static void skipScalingList(ABitReader *br, size_t sizeOfScalingList) {
+ size_t lastScale = 8;
+ size_t nextScale = 8;
+ for (size_t j = 0; j < sizeOfScalingList; ++j) {
+ if (nextScale != 0) {
+ signed delta_scale = parseSE(br);
+ nextScale = (lastScale + delta_scale + 256) % 256;
+ }
+
+ lastScale = (nextScale == 0) ? lastScale : nextScale;
+ }
+}
+
// Determine video dimensions from the sequence parameterset.
void FindAVCDimensions(
const sp<ABuffer> &seqParamSet,
@@ -63,7 +82,24 @@ void FindAVCDimensions(
parseUE(&br); // bit_depth_luma_minus8
parseUE(&br); // bit_depth_chroma_minus8
br.skipBits(1); // qpprime_y_zero_transform_bypass_flag
- CHECK_EQ(br.getBits(1), 0u); // seq_scaling_matrix_present_flag
+
+ if (br.getBits(1)) { // seq_scaling_matrix_present_flag
+ for (size_t i = 0; i < 8; ++i) {
+ if (br.getBits(1)) { // seq_scaling_list_present_flag[i]
+
+ // WARNING: the code below has not ever been exercised...
+ // need a real-world example.
+
+ if (i < 6) {
+ // ScalingList4x4[i],16,...
+ skipScalingList(&br, 16);
+ } else {
+ // ScalingList8x8[i-6],64,...
+ skipScalingList(&br, 64);
+ }
+ }
+ }
+ }
}
parseUE(&br); // log2_max_frame_num_minus4
@@ -186,28 +222,25 @@ status_t getNextNALUnit(
*nalStart = NULL;
*nalSize = 0;
- if (size == 0) {
+ if (size < 3) {
return -EAGAIN;
}
- // Skip any number of leading 0x00.
-
size_t offset = 0;
- while (offset < size && data[offset] == 0x00) {
- ++offset;
- }
-
- if (offset == size) {
- return -EAGAIN;
- }
// A valid startcode consists of at least two 0x00 bytes followed by 0x01.
-
- if (offset < 2 || data[offset] != 0x01) {
- return ERROR_MALFORMED;
+ for (; offset + 2 < size; ++offset) {
+ if (data[offset + 2] == 0x01 && data[offset] == 0x00
+ && data[offset + 1] == 0x00) {
+ break;
+ }
}
-
- ++offset;
+ if (offset + 2 >= size) {
+ *_data = &data[offset];
+ *_size = 2;
+ return -EAGAIN;
+ }
+ offset += 3;
size_t startOffset = offset;
@@ -251,9 +284,7 @@ status_t getNextNALUnit(
return OK;
}
-static sp<ABuffer> FindNAL(
- const uint8_t *data, size_t size, unsigned nalType,
- size_t *stopOffset) {
+static sp<ABuffer> FindNAL(const uint8_t *data, size_t size, unsigned nalType) {
const uint8_t *nalStart;
size_t nalSize;
while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
@@ -293,7 +324,7 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
const uint8_t *data = accessUnit->data();
size_t size = accessUnit->size();
- sp<ABuffer> seqParamSet = FindNAL(data, size, 7, NULL);
+ sp<ABuffer> seqParamSet = FindNAL(data, size, 7);
if (seqParamSet == NULL) {
return NULL;
}
@@ -303,8 +334,7 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
FindAVCDimensions(
seqParamSet, &width, &height, &sarWidth, &sarHeight);
- size_t stopOffset;
- sp<ABuffer> picParamSet = FindNAL(data, size, 8, &stopOffset);
+ sp<ABuffer> picParamSet = FindNAL(data, size, 8);
CHECK(picParamSet != NULL);
size_t csdSize =
@@ -475,8 +505,8 @@ bool ExtractDimensionsFromVOLHeader(
CHECK_NE(video_object_type_indication,
0x21u /* Fine Granularity Scalable */);
- unsigned video_object_layer_verid;
- unsigned video_object_layer_priority;
+ unsigned video_object_layer_verid __unused;
+ unsigned video_object_layer_priority __unused;
if (br.getBits(1)) {
video_object_layer_verid = br.getBits(4);
video_object_layer_priority = br.getBits(3);
@@ -538,7 +568,7 @@ bool ExtractDimensionsFromVOLHeader(
unsigned video_object_layer_height = br.getBits(13);
CHECK(br.getBits(1)); // marker_bit
- unsigned interlaced = br.getBits(1);
+ unsigned interlaced __unused = br.getBits(1);
*width = video_object_layer_width;
*height = video_object_layer_height;
@@ -584,7 +614,7 @@ bool GetMPEGAudioFrameSize(
return false;
}
- unsigned protection = (header >> 16) & 1;
+ unsigned protection __unused = (header >> 16) & 1;
unsigned bitrate_index = (header >> 12) & 0x0f;
diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk
deleted file mode 100644
index f26f386..0000000
--- a/media/libstagefright/chromium_http/Android.mk
+++ /dev/null
@@ -1,37 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-ifneq ($(TARGET_BUILD_PDK), true)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- DataUriSource.cpp \
- ChromiumHTTPDataSource.cpp \
- support.cpp \
- chromium_http_stub.cpp
-
-LOCAL_C_INCLUDES:= \
- $(TOP)/frameworks/av/media/libstagefright \
- $(TOP)/frameworks/native/include/media/openmax \
- external/chromium \
- external/chromium/android
-
-LOCAL_CFLAGS += -Wno-multichar
-
-LOCAL_SHARED_LIBRARIES += \
- libstlport \
- libchromium_net \
- libutils \
- libcutils \
- liblog \
- libstagefright_foundation \
- libstagefright \
- libdrmframework
-
-include external/stlport/libstlport.mk
-
-LOCAL_MODULE:= libstagefright_chromium_http
-
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-endif
diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
deleted file mode 100644
index 7e5c280..0000000
--- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ChromiumHTTPDataSource"
-#include <media/stagefright/foundation/ADebug.h>
-
-#include "include/ChromiumHTTPDataSource.h"
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include "support.h"
-
-#include <cutils/properties.h> // for property_get
-
-namespace android {
-
-ChromiumHTTPDataSource::ChromiumHTTPDataSource(uint32_t flags)
- : mFlags(flags),
- mState(DISCONNECTED),
- mDelegate(new SfDelegate),
- mCurrentOffset(0),
- mIOResult(OK),
- mContentSize(-1),
- mDecryptHandle(NULL),
- mDrmManagerClient(NULL) {
- mDelegate->setOwner(this);
-}
-
-ChromiumHTTPDataSource::~ChromiumHTTPDataSource() {
- disconnect();
-
- delete mDelegate;
- mDelegate = NULL;
-
- clearDRMState_l();
-
- if (mDrmManagerClient != NULL) {
- delete mDrmManagerClient;
- mDrmManagerClient = NULL;
- }
-}
-
-status_t ChromiumHTTPDataSource::connect(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- Mutex::Autolock autoLock(mLock);
-
- uid_t uid;
- if (getUID(&uid)) {
- mDelegate->setUID(uid);
- }
-
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "connect on behalf of uid %d", uid);
-#endif
-
- return connect_l(uri, headers, offset);
-}
-
-status_t ChromiumHTTPDataSource::connect_l(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- if (mState != DISCONNECTED) {
- disconnect_l();
- }
-
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
- "connect to <URL suppressed> @%lld", offset);
-#endif
-
- mURI = uri;
- mContentType = String8("application/octet-stream");
-
- if (headers != NULL) {
- mHeaders = *headers;
- } else {
- mHeaders.clear();
- }
-
- mState = CONNECTING;
- mContentSize = -1;
- mCurrentOffset = offset;
-
- mDelegate->initiateConnection(mURI.c_str(), &mHeaders, offset);
-
- while (mState == CONNECTING || mState == DISCONNECTING) {
- mCondition.wait(mLock);
- }
-
- return mState == CONNECTED ? OK : mIOResult;
-}
-
-void ChromiumHTTPDataSource::onRedirect(const char *url) {
- Mutex::Autolock autoLock(mLock);
- mURI = url;
-}
-
-void ChromiumHTTPDataSource::onConnectionEstablished(
- int64_t contentSize, const char *contentType) {
- Mutex::Autolock autoLock(mLock);
-
- if (mState != CONNECTING) {
- // We may have initiated disconnection.
- CHECK_EQ(mState, DISCONNECTING);
- return;
- }
-
- mState = CONNECTED;
- mContentSize = (contentSize < 0) ? -1 : contentSize + mCurrentOffset;
- mContentType = String8(contentType);
- mCondition.broadcast();
-}
-
-void ChromiumHTTPDataSource::onConnectionFailed(status_t err) {
- Mutex::Autolock autoLock(mLock);
- mState = DISCONNECTED;
- mCondition.broadcast();
-
- // mURI.clear();
-
- mIOResult = err;
-}
-
-void ChromiumHTTPDataSource::disconnect() {
- Mutex::Autolock autoLock(mLock);
- disconnect_l();
-}
-
-void ChromiumHTTPDataSource::disconnect_l() {
- if (mState == DISCONNECTED) {
- return;
- }
-
- mState = DISCONNECTING;
- mIOResult = -EINTR;
-
- mDelegate->initiateDisconnect();
-
- while (mState == DISCONNECTING) {
- mCondition.wait(mLock);
- }
-
- CHECK_EQ((int)mState, (int)DISCONNECTED);
-}
-
-status_t ChromiumHTTPDataSource::initCheck() const {
- Mutex::Autolock autoLock(mLock);
-
- return mState == CONNECTED ? OK : NO_INIT;
-}
-
-ssize_t ChromiumHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
- Mutex::Autolock autoLock(mLock);
-
- if (mState != CONNECTED) {
- return INVALID_OPERATION;
- }
-
-#if 0
- char value[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.disable-net", value, 0)
- && (!strcasecmp(value, "true") || !strcmp(value, "1"))) {
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Simulating that the network is down.");
- disconnect_l();
- return ERROR_IO;
- }
-#endif
-
- if (offset != mCurrentOffset) {
- AString tmp = mURI;
- KeyedVector<String8, String8> tmpHeaders = mHeaders;
-
- disconnect_l();
-
- status_t err = connect_l(tmp.c_str(), &tmpHeaders, offset);
-
- if (err != OK) {
- return err;
- }
- }
-
- mState = READING;
-
- int64_t startTimeUs = ALooper::GetNowUs();
-
- mDelegate->initiateRead(data, size);
-
- while (mState == READING) {
- mCondition.wait(mLock);
- }
-
- if (mIOResult < OK) {
- return mIOResult;
- }
-
- if (mState == CONNECTED) {
- int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
-
- // The read operation was successful, mIOResult contains
- // the number of bytes read.
- addBandwidthMeasurement(mIOResult, delayUs);
-
- mCurrentOffset += mIOResult;
- return mIOResult;
- }
-
- return ERROR_IO;
-}
-
-void ChromiumHTTPDataSource::onReadCompleted(ssize_t size) {
- Mutex::Autolock autoLock(mLock);
-
- mIOResult = size;
-
- if (mState == READING) {
- mState = CONNECTED;
- mCondition.broadcast();
- }
-}
-
-status_t ChromiumHTTPDataSource::getSize(off64_t *size) {
- Mutex::Autolock autoLock(mLock);
-
- if (mContentSize < 0) {
- return ERROR_UNSUPPORTED;
- }
-
- *size = mContentSize;
-
- return OK;
-}
-
-uint32_t ChromiumHTTPDataSource::flags() {
- return kWantsPrefetching | kIsHTTPBasedSource;
-}
-
-// static
-void ChromiumHTTPDataSource::InitiateRead(
- ChromiumHTTPDataSource *me, void *data, size_t size) {
- me->initiateRead(data, size);
-}
-
-void ChromiumHTTPDataSource::initiateRead(void *data, size_t size) {
- mDelegate->initiateRead(data, size);
-}
-
-void ChromiumHTTPDataSource::onDisconnectComplete() {
- Mutex::Autolock autoLock(mLock);
- CHECK_EQ((int)mState, (int)DISCONNECTING);
-
- mState = DISCONNECTED;
- // mURI.clear();
- mIOResult = -ENOTCONN;
-
- mCondition.broadcast();
-}
-
-sp<DecryptHandle> ChromiumHTTPDataSource::DrmInitialization(const char* mime) {
- Mutex::Autolock autoLock(mLock);
-
- if (mDrmManagerClient == NULL) {
- mDrmManagerClient = new DrmManagerClient();
- }
-
- if (mDrmManagerClient == NULL) {
- return NULL;
- }
-
- if (mDecryptHandle == NULL) {
- /* Note if redirect occurs, mUri is the redirect uri instead of the
- * original one
- */
- mDecryptHandle = mDrmManagerClient->openDecryptSession(
- String8(mURI.c_str()), mime);
- }
-
- if (mDecryptHandle == NULL) {
- delete mDrmManagerClient;
- mDrmManagerClient = NULL;
- }
-
- return mDecryptHandle;
-}
-
-void ChromiumHTTPDataSource::getDrmInfo(
- sp<DecryptHandle> &handle, DrmManagerClient **client) {
- Mutex::Autolock autoLock(mLock);
-
- handle = mDecryptHandle;
- *client = mDrmManagerClient;
-}
-
-String8 ChromiumHTTPDataSource::getUri() {
- Mutex::Autolock autoLock(mLock);
-
- return String8(mURI.c_str());
-}
-
-String8 ChromiumHTTPDataSource::getMIMEType() const {
- Mutex::Autolock autoLock(mLock);
-
- return mContentType;
-}
-
-void ChromiumHTTPDataSource::clearDRMState_l() {
- if (mDecryptHandle != NULL) {
- // To release mDecryptHandle
- CHECK(mDrmManagerClient);
- mDrmManagerClient->closeDecryptSession(mDecryptHandle);
- mDecryptHandle = NULL;
- }
-}
-
-status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) {
- Mutex::Autolock autoLock(mLock);
-
- if (mURI.empty()) {
- return INVALID_OPERATION;
- }
-
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnecting...");
- status_t err = connect_l(mURI.c_str(), &mHeaders, offset);
- if (err != OK) {
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnect failed w/ err 0x%08x", err);
- }
-
- return err;
-}
-
-// static
-status_t ChromiumHTTPDataSource::UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- return SfDelegate::UpdateProxyConfig(host, port, exclusionList);
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/chromium_http/DataUriSource.cpp b/media/libstagefright/chromium_http/DataUriSource.cpp
deleted file mode 100644
index ecf3fa1..0000000
--- a/media/libstagefright/chromium_http/DataUriSource.cpp
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <include/DataUriSource.h>
-
-#include <net/base/data_url.h>
-#include <googleurl/src/gurl.h>
-
-
-namespace android {
-
-DataUriSource::DataUriSource(const char *uri) :
- mDataUri(uri),
- mInited(NO_INIT) {
-
- // Copy1: const char *uri -> String8 mDataUri.
- std::string mimeTypeStr, unusedCharsetStr, dataStr;
- // Copy2: String8 mDataUri -> std::string
- const bool ret = net::DataURL::Parse(
- GURL(std::string(mDataUri.string())),
- &mimeTypeStr, &unusedCharsetStr, &dataStr);
- // Copy3: std::string dataStr -> AString mData
- mData.setTo(dataStr.data(), dataStr.length());
- mInited = ret ? OK : UNKNOWN_ERROR;
-
- // The chromium data url implementation defaults to using "text/plain"
- // if no mime type is specified. We prefer to leave this unspecified
- // instead, since the mime type is sniffed in most cases.
- if (mimeTypeStr != "text/plain") {
- mMimeType = mimeTypeStr.c_str();
- }
-}
-
-ssize_t DataUriSource::readAt(off64_t offset, void *out, size_t size) {
- if (mInited != OK) {
- return mInited;
- }
-
- const off64_t length = mData.size();
- if (offset >= length) {
- return UNKNOWN_ERROR;
- }
-
- const char *dataBuf = mData.c_str();
- const size_t bytesToCopy =
- offset + size >= length ? (length - offset) : size;
-
- if (bytesToCopy > 0) {
- memcpy(out, dataBuf + offset, bytesToCopy);
- }
-
- return bytesToCopy;
-}
-
-} // namespace android
diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp
deleted file mode 100644
index 3b33212..0000000
--- a/media/libstagefright/chromium_http/support.cpp
+++ /dev/null
@@ -1,559 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ChromiumHTTPDataSourceSupport"
-#include <utils/Log.h>
-
-#include <media/stagefright/foundation/AString.h>
-
-#include "support.h"
-
-#include "android/net/android_network_library_impl.h"
-#include "base/logging.h"
-#include "base/threading/thread.h"
-#include "net/base/cert_verifier.h"
-#include "net/base/cookie_monster.h"
-#include "net/base/host_resolver.h"
-#include "net/base/ssl_config_service.h"
-#include "net/http/http_auth_handler_factory.h"
-#include "net/http/http_cache.h"
-#include "net/proxy/proxy_config_service_android.h"
-
-#include "include/ChromiumHTTPDataSource.h"
-
-#include <cutils/log.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-#include <string>
-
-namespace android {
-
-static Mutex gNetworkThreadLock;
-static base::Thread *gNetworkThread = NULL;
-static scoped_refptr<SfRequestContext> gReqContext;
-static scoped_ptr<net::NetworkChangeNotifier> gNetworkChangeNotifier;
-
-bool logMessageHandler(
- int severity,
- const char* file,
- int line,
- size_t message_start,
- const std::string& str) {
- int androidSeverity = ANDROID_LOG_VERBOSE;
- switch(severity) {
- case logging::LOG_FATAL:
- androidSeverity = ANDROID_LOG_FATAL;
- break;
- case logging::LOG_ERROR_REPORT:
- case logging::LOG_ERROR:
- androidSeverity = ANDROID_LOG_ERROR;
- break;
- case logging::LOG_WARNING:
- androidSeverity = ANDROID_LOG_WARN;
- break;
- default:
- androidSeverity = ANDROID_LOG_VERBOSE;
- break;
- }
- android_printLog(androidSeverity, "chromium-libstagefright",
- "%s:%d: %s", file, line, str.c_str());
- return false;
-}
-
-struct AutoPrioritySaver {
- AutoPrioritySaver()
- : mTID(androidGetTid()),
- mPrevPriority(androidGetThreadPriority(mTID)) {
- androidSetThreadPriority(mTID, ANDROID_PRIORITY_NORMAL);
- }
-
- ~AutoPrioritySaver() {
- androidSetThreadPriority(mTID, mPrevPriority);
- }
-
-private:
- pid_t mTID;
- int mPrevPriority;
-
- DISALLOW_EVIL_CONSTRUCTORS(AutoPrioritySaver);
-};
-
-static void InitializeNetworkThreadIfNecessary() {
- Mutex::Autolock autoLock(gNetworkThreadLock);
-
- if (gNetworkThread == NULL) {
- // Make sure any threads spawned by the chromium framework are
- // running at normal priority instead of inheriting this thread's.
- AutoPrioritySaver saver;
-
- gNetworkThread = new base::Thread("network");
- base::Thread::Options options;
- options.message_loop_type = MessageLoop::TYPE_IO;
- CHECK(gNetworkThread->StartWithOptions(options));
-
- gReqContext = new SfRequestContext;
-
- gNetworkChangeNotifier.reset(net::NetworkChangeNotifier::Create());
-
- net::AndroidNetworkLibrary::RegisterSharedInstance(
- new SfNetworkLibrary);
- logging::SetLogMessageHandler(logMessageHandler);
- }
-}
-
-static void MY_LOGI(const char *s) {
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "%s", s);
-}
-
-static void MY_LOGV(const char *s) {
-#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "%s", s);
-#endif
-}
-
-SfNetLog::SfNetLog()
- : mNextID(1) {
-}
-
-void SfNetLog::AddEntry(
- EventType type,
- const base::TimeTicks &time,
- const Source &source,
- EventPhase phase,
- EventParameters *params) {
-#if 0
- MY_LOGI(StringPrintf(
- "AddEntry time=%s type=%s source=%s phase=%s\n",
- TickCountToString(time).c_str(),
- EventTypeToString(type),
- SourceTypeToString(source.type),
- EventPhaseToString(phase)).c_str());
-#endif
-}
-
-uint32 SfNetLog::NextID() {
- return mNextID++;
-}
-
-net::NetLog::LogLevel SfNetLog::GetLogLevel() const {
- return LOG_BASIC;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfRequestContext::SfRequestContext() {
- mUserAgent = MakeUserAgent().c_str();
-
- set_net_log(new SfNetLog());
-
- set_host_resolver(
- net::CreateSystemHostResolver(
- net::HostResolver::kDefaultParallelism,
- NULL /* resolver_proc */,
- net_log()));
-
- set_ssl_config_service(
- net::SSLConfigService::CreateSystemSSLConfigService());
-
- mProxyConfigService = new net::ProxyConfigServiceAndroid;
-
- set_proxy_service(net::ProxyService::CreateWithoutProxyResolver(
- mProxyConfigService, net_log()));
-
- set_http_transaction_factory(new net::HttpCache(
- host_resolver(),
- new net::CertVerifier(),
- dnsrr_resolver(),
- dns_cert_checker(),
- proxy_service(),
- ssl_config_service(),
- net::HttpAuthHandlerFactory::CreateDefault(host_resolver()),
- network_delegate(),
- net_log(),
- NULL)); // backend_factory
-
- set_cookie_store(new net::CookieMonster(NULL, NULL));
-}
-
-const std::string &SfRequestContext::GetUserAgent(const GURL &url) const {
- return mUserAgent;
-}
-
-status_t SfRequestContext::updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- Mutex::Autolock autoLock(mProxyConfigLock);
-
- if (host == NULL || *host == '\0') {
- MY_LOGV("updateProxyConfig NULL");
-
- std::string proxy;
- std::string exList;
- mProxyConfigService->UpdateProxySettings(proxy, exList);
- } else {
-#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
- "updateProxyConfig %s:%d, exclude '%s'",
- host, port, exclusionList);
-#endif
-
- std::string proxy = StringPrintf("%s:%d", host, port).c_str();
- std::string exList = exclusionList;
- mProxyConfigService->UpdateProxySettings(proxy, exList);
- }
-
- return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfNetworkLibrary::SfNetworkLibrary() {}
-
-SfNetworkLibrary::VerifyResult SfNetworkLibrary::VerifyX509CertChain(
- const std::vector<std::string>& cert_chain,
- const std::string& hostname,
- const std::string& auth_type) {
- return VERIFY_OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfDelegate::SfDelegate()
- : mOwner(NULL),
- mURLRequest(NULL),
- mReadBuffer(new net::IOBufferWithSize(8192)),
- mNumBytesRead(0),
- mNumBytesTotal(0),
- mDataDestination(NULL),
- mAtEOS(false) {
- InitializeNetworkThreadIfNecessary();
-}
-
-SfDelegate::~SfDelegate() {
- CHECK(mURLRequest == NULL);
-}
-
-// static
-status_t SfDelegate::UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- InitializeNetworkThreadIfNecessary();
-
- return gReqContext->updateProxyConfig(host, port, exclusionList);
-}
-
-void SfDelegate::setOwner(ChromiumHTTPDataSource *owner) {
- mOwner = owner;
-}
-
-void SfDelegate::setUID(uid_t uid) {
- gReqContext->setUID(uid);
-}
-
-bool SfDelegate::getUID(uid_t *uid) const {
- return gReqContext->getUID(uid);
-}
-
-void SfDelegate::OnReceivedRedirect(
- net::URLRequest *request, const GURL &new_url, bool *defer_redirect) {
- MY_LOGV("OnReceivedRedirect");
- mOwner->onRedirect(new_url.spec().c_str());
-}
-
-void SfDelegate::OnAuthRequired(
- net::URLRequest *request, net::AuthChallengeInfo *auth_info) {
- MY_LOGV("OnAuthRequired");
-
- inherited::OnAuthRequired(request, auth_info);
-}
-
-void SfDelegate::OnCertificateRequested(
- net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info) {
- MY_LOGV("OnCertificateRequested");
-
- inherited::OnCertificateRequested(request, cert_request_info);
-}
-
-void SfDelegate::OnSSLCertificateError(
- net::URLRequest *request, int cert_error, net::X509Certificate *cert) {
- fprintf(stderr, "OnSSLCertificateError cert_error=%d\n", cert_error);
-
- inherited::OnSSLCertificateError(request, cert_error, cert);
-}
-
-void SfDelegate::OnGetCookies(net::URLRequest *request, bool blocked_by_policy) {
- MY_LOGV("OnGetCookies");
-}
-
-void SfDelegate::OnSetCookie(
- net::URLRequest *request,
- const std::string &cookie_line,
- const net::CookieOptions &options,
- bool blocked_by_policy) {
- MY_LOGV("OnSetCookie");
-}
-
-void SfDelegate::OnResponseStarted(net::URLRequest *request) {
- if (request->status().status() != net::URLRequestStatus::SUCCESS) {
- MY_LOGI(StringPrintf(
- "Request failed with status %d and os_error %d",
- request->status().status(),
- request->status().os_error()).c_str());
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onConnectionFailed(ERROR_IO);
- return;
- } else if (mRangeRequested && request->GetResponseCode() != 206) {
- MY_LOGI(StringPrintf(
- "We requested a content range, but server didn't "
- "support that. (responded with %d)",
- request->GetResponseCode()).c_str());
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onConnectionFailed(-EPIPE);
- return;
- } else if ((request->GetResponseCode() / 100) != 2) {
- MY_LOGI(StringPrintf(
- "Server responded with http status %d",
- request->GetResponseCode()).c_str());
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onConnectionFailed(ERROR_IO);
- return;
- }
-
- MY_LOGV("OnResponseStarted");
-
- std::string headers;
- request->GetAllResponseHeaders(&headers);
-
- MY_LOGV(StringPrintf("response headers: %s", headers.c_str()).c_str());
-
- std::string contentType;
- request->GetResponseHeaderByName("Content-Type", &contentType);
-
- mOwner->onConnectionEstablished(
- request->GetExpectedContentSize(), contentType.c_str());
-}
-
-void SfDelegate::OnReadCompleted(net::URLRequest *request, int bytes_read) {
- if (bytes_read == -1) {
- MY_LOGI(StringPrintf(
- "OnReadCompleted, read failed, status %d",
- request->status().status()).c_str());
-
- mOwner->onReadCompleted(ERROR_IO);
- return;
- }
-
- MY_LOGV(StringPrintf("OnReadCompleted, read %d bytes", bytes_read).c_str());
-
- if (bytes_read < 0) {
- MY_LOGI(StringPrintf(
- "Read failed w/ status %d\n",
- request->status().status()).c_str());
-
- mOwner->onReadCompleted(ERROR_IO);
- return;
- } else if (bytes_read == 0) {
- mAtEOS = true;
- mOwner->onReadCompleted(mNumBytesRead);
- return;
- }
-
- CHECK_GT(bytes_read, 0);
- CHECK_LE(mNumBytesRead + bytes_read, mNumBytesTotal);
-
- memcpy((uint8_t *)mDataDestination + mNumBytesRead,
- mReadBuffer->data(),
- bytes_read);
-
- mNumBytesRead += bytes_read;
-
- readMore(request);
-}
-
-void SfDelegate::readMore(net::URLRequest *request) {
- while (mNumBytesRead < mNumBytesTotal) {
- size_t copy = mNumBytesTotal - mNumBytesRead;
- if (copy > mReadBuffer->size()) {
- copy = mReadBuffer->size();
- }
-
- int n;
- if (request->Read(mReadBuffer, copy, &n)) {
- MY_LOGV(StringPrintf("Read %d bytes directly.", n).c_str());
-
- CHECK_LE((size_t)n, copy);
-
- memcpy((uint8_t *)mDataDestination + mNumBytesRead,
- mReadBuffer->data(),
- n);
-
- mNumBytesRead += n;
-
- if (n == 0) {
- mAtEOS = true;
- break;
- }
- } else {
- MY_LOGV("readMore pending read");
-
- if (request->status().status() != net::URLRequestStatus::IO_PENDING) {
- MY_LOGI(StringPrintf(
- "Direct read failed w/ status %d\n",
- request->status().status()).c_str());
-
- mOwner->onReadCompleted(ERROR_IO);
- return;
- }
-
- return;
- }
- }
-
- mOwner->onReadCompleted(mNumBytesRead);
-}
-
-void SfDelegate::initiateConnection(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- GURL url(uri);
-
- MessageLoop *loop = gNetworkThread->message_loop();
- loop->PostTask(
- FROM_HERE,
- NewRunnableFunction(
- &SfDelegate::OnInitiateConnectionWrapper,
- this,
- url,
- headers,
- offset));
-
-}
-
-// static
-void SfDelegate::OnInitiateConnectionWrapper(
- SfDelegate *me, GURL url,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- me->onInitiateConnection(url, headers, offset);
-}
-
-void SfDelegate::onInitiateConnection(
- const GURL &url,
- const KeyedVector<String8, String8> *extra,
- off64_t offset) {
- CHECK(mURLRequest == NULL);
-
- mURLRequest = new net::URLRequest(url, this);
- mAtEOS = false;
-
- mRangeRequested = false;
-
- if (offset != 0 || extra != NULL) {
- net::HttpRequestHeaders headers =
- mURLRequest->extra_request_headers();
-
- if (offset != 0) {
- headers.AddHeaderFromString(
- StringPrintf("Range: bytes=%lld-", offset).c_str());
-
- mRangeRequested = true;
- }
-
- if (extra != NULL) {
- for (size_t i = 0; i < extra->size(); ++i) {
- AString s;
- s.append(extra->keyAt(i).string());
- s.append(": ");
- s.append(extra->valueAt(i).string());
-
- headers.AddHeaderFromString(s.c_str());
- }
- }
-
- mURLRequest->SetExtraRequestHeaders(headers);
- }
-
- mURLRequest->set_context(gReqContext);
-
- mURLRequest->Start();
-}
-
-void SfDelegate::initiateDisconnect() {
- MessageLoop *loop = gNetworkThread->message_loop();
- loop->PostTask(
- FROM_HERE,
- NewRunnableFunction(
- &SfDelegate::OnInitiateDisconnectWrapper, this));
-}
-
-// static
-void SfDelegate::OnInitiateDisconnectWrapper(SfDelegate *me) {
- me->onInitiateDisconnect();
-}
-
-void SfDelegate::onInitiateDisconnect() {
- if (mURLRequest == NULL) {
- return;
- }
-
- mURLRequest->Cancel();
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onDisconnectComplete();
-}
-
-void SfDelegate::initiateRead(void *data, size_t size) {
- MessageLoop *loop = gNetworkThread->message_loop();
- loop->PostTask(
- FROM_HERE,
- NewRunnableFunction(
- &SfDelegate::OnInitiateReadWrapper, this, data, size));
-}
-
-// static
-void SfDelegate::OnInitiateReadWrapper(
- SfDelegate *me, void *data, size_t size) {
- me->onInitiateRead(data, size);
-}
-
-void SfDelegate::onInitiateRead(void *data, size_t size) {
- CHECK(mURLRequest != NULL);
-
- mNumBytesRead = 0;
- mNumBytesTotal = size;
- mDataDestination = data;
-
- if (mAtEOS) {
- mOwner->onReadCompleted(0);
- return;
- }
-
- readMore(mURLRequest);
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h
deleted file mode 100644
index 975a1d3..0000000
--- a/media/libstagefright/chromium_http/support.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SUPPORT_H_
-
-#define SUPPORT_H_
-
-#include <assert.h>
-
-#include "net/base/net_log.h"
-#include "net/url_request/url_request.h"
-#include "net/url_request/url_request_context.h"
-#include "net/base/android_network_library.h"
-#include "net/base/io_buffer.h"
-
-#include <utils/KeyedVector.h>
-#include <utils/Mutex.h>
-#include <utils/String8.h>
-
-namespace net {
- struct ProxyConfigServiceAndroid;
-};
-
-namespace android {
-
-struct SfNetLog : public net::NetLog {
- SfNetLog();
-
- virtual void AddEntry(
- EventType type,
- const base::TimeTicks &time,
- const Source &source,
- EventPhase phase,
- EventParameters *params);
-
- virtual uint32 NextID();
- virtual LogLevel GetLogLevel() const;
-
-private:
- uint32 mNextID;
-
- DISALLOW_EVIL_CONSTRUCTORS(SfNetLog);
-};
-
-struct SfRequestContext : public net::URLRequestContext {
- SfRequestContext();
-
- virtual const std::string &GetUserAgent(const GURL &url) const;
-
- status_t updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-private:
- Mutex mProxyConfigLock;
-
- std::string mUserAgent;
- net::ProxyConfigServiceAndroid *mProxyConfigService;
-
- DISALLOW_EVIL_CONSTRUCTORS(SfRequestContext);
-};
-
-// This is required for https support, we don't really verify certificates,
-// we accept anything...
-struct SfNetworkLibrary : public net::AndroidNetworkLibrary {
- SfNetworkLibrary();
-
- virtual VerifyResult VerifyX509CertChain(
- const std::vector<std::string>& cert_chain,
- const std::string& hostname,
- const std::string& auth_type);
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(SfNetworkLibrary);
-};
-
-struct ChromiumHTTPDataSource;
-
-struct SfDelegate : public net::URLRequest::Delegate {
- SfDelegate();
- virtual ~SfDelegate();
-
- void initiateConnection(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- void initiateDisconnect();
- void initiateRead(void *data, size_t size);
-
- void setOwner(ChromiumHTTPDataSource *mOwner);
-
- // Gets the UID of the calling process
- bool getUID(uid_t *uid) const;
-
- void setUID(uid_t uid);
-
- virtual void OnReceivedRedirect(
- net::URLRequest *request, const GURL &new_url, bool *defer_redirect);
-
- virtual void OnAuthRequired(
- net::URLRequest *request, net::AuthChallengeInfo *auth_info);
-
- virtual void OnCertificateRequested(
- net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info);
-
- virtual void OnSSLCertificateError(
- net::URLRequest *request, int cert_error, net::X509Certificate *cert);
-
- virtual void OnGetCookies(net::URLRequest *request, bool blocked_by_policy);
-
- virtual void OnSetCookie(
- net::URLRequest *request,
- const std::string &cookie_line,
- const net::CookieOptions &options,
- bool blocked_by_policy);
-
- virtual void OnResponseStarted(net::URLRequest *request);
-
- virtual void OnReadCompleted(net::URLRequest *request, int bytes_read);
-
- static status_t UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-private:
- typedef Delegate inherited;
-
- ChromiumHTTPDataSource *mOwner;
-
- net::URLRequest *mURLRequest;
- scoped_refptr<net::IOBufferWithSize> mReadBuffer;
-
- size_t mNumBytesRead;
- size_t mNumBytesTotal;
- void *mDataDestination;
-
- bool mRangeRequested;
- bool mAtEOS;
-
- void readMore(net::URLRequest *request);
-
- static void OnInitiateConnectionWrapper(
- SfDelegate *me,
- GURL url,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- static void OnInitiateDisconnectWrapper(SfDelegate *me);
-
- static void OnInitiateReadWrapper(
- SfDelegate *me, void *data, size_t size);
-
- void onInitiateConnection(
- const GURL &url,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- void onInitiateDisconnect();
- void onInitiateRead(void *data, size_t size);
-
- DISALLOW_EVIL_CONSTRUCTORS(SfDelegate);
-};
-
-} // namespace android
-
-#endif // SUPPORT_H_
diff --git a/media/libstagefright/chromium_http_stub.cpp b/media/libstagefright/chromium_http_stub.cpp
deleted file mode 100644
index ed8a878..0000000
--- a/media/libstagefright/chromium_http_stub.cpp
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <dlfcn.h>
-
-#include <media/stagefright/DataSource.h>
-
-#include "include/chromium_http_stub.h"
-#include "include/HTTPBase.h"
-
-namespace android {
-
-static bool gFirst = true;
-static void *gHandle;
-static Mutex gLibMutex;
-
-HTTPBase *(*gLib_createChromiumHTTPDataSource)(uint32_t flags);
-DataSource *(*gLib_createDataUriSource)(const char *uri);
-
-status_t (*gLib_UpdateChromiumHTTPDataSourceProxyConfig)(
- const char *host, int32_t port, const char *exclusionList);
-
-static bool load_libstagefright_chromium_http() {
- Mutex::Autolock autoLock(gLibMutex);
- void *sym;
-
- if (!gFirst) {
- return (gHandle != NULL);
- }
-
- gFirst = false;
-
- gHandle = dlopen("libstagefright_chromium_http.so", RTLD_NOW);
- if (gHandle == NULL) {
- return false;
- }
-
- sym = dlsym(gHandle, "createChromiumHTTPDataSource");
- if (sym == NULL) {
- gHandle = NULL;
- return false;
- }
- gLib_createChromiumHTTPDataSource = (HTTPBase *(*)(uint32_t))sym;
-
- sym = dlsym(gHandle, "createDataUriSource");
- if (sym == NULL) {
- gHandle = NULL;
- return false;
- }
- gLib_createDataUriSource = (DataSource *(*)(const char *))sym;
-
- sym = dlsym(gHandle, "UpdateChromiumHTTPDataSourceProxyConfig");
- if (sym == NULL) {
- gHandle = NULL;
- return false;
- }
- gLib_UpdateChromiumHTTPDataSourceProxyConfig =
- (status_t (*)(const char *, int32_t, const char *))sym;
-
- return true;
-}
-
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
- if (!load_libstagefright_chromium_http()) {
- return NULL;
- }
-
- return gLib_createChromiumHTTPDataSource(flags);
-}
-
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- if (!load_libstagefright_chromium_http()) {
- return INVALID_OPERATION;
- }
-
- return gLib_UpdateChromiumHTTPDataSourceProxyConfig(
- host, port, exclusionList);
-}
-
-DataSource *createDataUriSource(const char *uri) {
- if (!load_libstagefright_chromium_http()) {
- return NULL;
- }
-
- return gLib_createDataUriSource(uri);
-}
-
-}
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index ffa64f9..afb00aa 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -3,7 +3,8 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- SoftAAC2.cpp
+ SoftAAC2.cpp \
+ DrcPresModeWrap.cpp
LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
@@ -17,6 +18,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS :=
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
new file mode 100644
index 0000000..129ad65
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "DrcPresModeWrap.h"
+
+#include <assert.h>
+
+#define LOG_TAG "SoftAAC2_DrcWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+//#define DRC_PRES_MODE_WRAP_DEBUG
+
+#define GPM_ENCODER_TARGET_LEVEL 64
+#define MAX_TARGET_LEVEL 64
+
+CDrcPresModeWrapper::CDrcPresModeWrapper()
+{
+ mDataUpdate = true;
+
+ /* Data from streamInfo. */
+ /* Initialized to the same values as in the aac decoder */
+ mStreamPRL = -1;
+ mStreamDRCPresMode = -1;
+ mStreamNrAACChan = 0;
+ mStreamNrOutChan = 0;
+
+ /* Desired values (set by user). */
+ /* Initialized to the same values as in the aac decoder */
+ mDesTarget = -1;
+ mDesAttFactor = 0;
+ mDesBoostFactor = 0;
+ mDesHeavy = 0;
+
+ mEncoderTarget = -1;
+
+ /* Values from last time. */
+ /* Initialized to the same values as the desired values */
+ mLastTarget = -1;
+ mLastAttFactor = 0;
+ mLastBoostFactor = 0;
+ mLastHeavy = 0;
+}
+
+CDrcPresModeWrapper::~CDrcPresModeWrapper()
+{
+}
+
+void
+CDrcPresModeWrapper::setDecoderHandle(const HANDLE_AACDECODER handle)
+{
+ mHandleDecoder = handle;
+}
+
+void
+CDrcPresModeWrapper::submitStreamData(CStreamInfo* pStreamInfo)
+{
+ assert(pStreamInfo);
+
+ if (mStreamPRL != pStreamInfo->drcProgRefLev) {
+ mStreamPRL = pStreamInfo->drcProgRefLev;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcProgRefLev is %d\n", mStreamPRL);
+#endif
+ }
+
+ if (mStreamDRCPresMode != pStreamInfo->drcPresMode) {
+ mStreamDRCPresMode = pStreamInfo->drcPresMode;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcPresMode is %d\n", mStreamDRCPresMode);
+#endif
+ }
+
+ if (mStreamNrAACChan != pStreamInfo->aacNumChannels) {
+ mStreamNrAACChan = pStreamInfo->aacNumChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: aacNumChannels is %d\n", mStreamNrAACChan);
+#endif
+ }
+
+ if (mStreamNrOutChan != pStreamInfo->numChannels) {
+ mStreamNrOutChan = pStreamInfo->numChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: numChannels is %d\n", mStreamNrOutChan);
+#endif
+ }
+
+
+
+ if (mStreamNrOutChan<mStreamNrAACChan) {
+ mIsDownmix = true;
+ } else {
+ mIsDownmix = false;
+ }
+
+ if (mIsDownmix && (mStreamNrOutChan == 1)) {
+ mIsMonoDownmix = true;
+ } else {
+ mIsMonoDownmix = false;
+ }
+
+ if (mIsDownmix && mStreamNrOutChan == 2){
+ mIsStereoDownmix = true;
+ } else {
+ mIsStereoDownmix = false;
+ }
+
+}
+
+void
+CDrcPresModeWrapper::setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value)
+{
+ switch (param) {
+ case DRC_PRES_MODE_WRAP_DESIRED_TARGET:
+ mDesTarget = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR:
+ mDesAttFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR:
+ mDesBoostFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_HEAVY:
+ mDesHeavy = value;
+ break;
+ case DRC_PRES_MODE_WRAP_ENCODER_TARGET:
+ mEncoderTarget = value;
+ break;
+ default:
+ break;
+ }
+ mDataUpdate = true;
+}
+
+void
+CDrcPresModeWrapper::update()
+{
+ // Get Data from Decoder
+ int progRefLevel = mStreamPRL;
+ int drcPresMode = mStreamDRCPresMode;
+
+ // by default, do as desired
+ int newTarget = mDesTarget;
+ int newAttFactor = mDesAttFactor;
+ int newBoostFactor = mDesBoostFactor;
+ int newHeavy = mDesHeavy;
+
+ if (mDataUpdate) {
+ // sanity check
+ if (mDesTarget < MAX_TARGET_LEVEL){
+ mDesTarget = MAX_TARGET_LEVEL; // limit target level to -16 dB or below
+ newTarget = MAX_TARGET_LEVEL;
+ }
+
+ if (mEncoderTarget != -1) {
+ if (mDesTarget<124) { // if target level > -31 dB
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ // no stereo or mono downmixing, calculated scaling of light DRC
+ /* use as little compression as possible */
+ newAttFactor = 0;
+ newBoostFactor = 0;
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ if (mEncoderTarget < mDesTarget) { // if mEncoderTarget > target level
+ // mEncoderTarget > target level > PRL
+ int calcFactor;
+ float calcFactor_norm;
+ // 0.0f < calcFactor_norm < 1.0f
+ calcFactor_norm = (float)(mDesTarget - progRefLevel) /
+ (float)(mEncoderTarget - progRefLevel);
+ calcFactor = (int)(calcFactor_norm*127.0f); // 0 <= calcFactor < 127
+ // calcFactor is the lower limit
+ newAttFactor = (calcFactor>newAttFactor) ? calcFactor : newAttFactor;
+ // new AttFactor will be always = calcFactor, as it is set to 0 before.
+ newBoostFactor = newAttFactor;
+ } else {
+ /* target level > mEncoderTarget > PRL */
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ newBoostFactor = 127;
+ }
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget<92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else {
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB: light DRC only needed if we perform downmixing
+ if (mIsDownmix) { // we do downmixing
+ newAttFactor = 127;
+ }
+ }
+ }
+ else { // handle other used encoder target levels
+
+ // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+ if (mStreamNrAACChan > 6) {
+ drcPresMode = 0;
+ }
+
+ switch (drcPresMode) {
+ case 0:
+ default: // presentation mode not indicated
+ {
+
+ if (mDesTarget<124) { // if target level > -31 dB
+ // no stereo or mono downmixing
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127; // at least, use light compression
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget < 92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else{
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ // Presentation mode 1 and 2 according to ETSI TS 101 154:
+ // Digital Video Broadcasting (DVB); Specification for the use of Video and Audio Coding
+ // in Broadcasting Applications based on the MPEG-2 Transport Stream,
+ // section C.5.4., "Decoding", and Table C.33
+ // ISO DRC -> newHeavy = 0 (Use light compression, MPEG-style)
+ // Compression_value -> newHeavy = 1 (Use heavy compression, DVB-style)
+ // scaling restricted -> newAttFactor = 127
+
+ case 1: // presentation mode 1, Light:-31/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ newHeavy = 1;
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ case 2: // presentation mode 2, Light:-23/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ if (mIsMonoDownmix) { // if mono downmix
+ newHeavy = 1;
+ } else {
+ newHeavy = 0;
+ newAttFactor = 127;
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ newHeavy = 0;
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ } // switch()
+ } // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL)
+
+ // sanity again
+ if (newHeavy == 1) {
+ newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
+ newAttFactor = 127;
+ }
+
+ // update the decoder
+ if (newTarget != mLastTarget) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_REFERENCE_LEVEL, newTarget);
+ mLastTarget = newTarget;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newTarget != mDesTarget)
+ ALOGV("DRC presentation mode wrapper: forced target level to %d (from %d)\n", newTarget, mDesTarget);
+ else
+ ALOGV("DRC presentation mode wrapper: set target level to %d\n", newTarget);
+#endif
+ }
+
+ if (newAttFactor != mLastAttFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_ATTENUATION_FACTOR, newAttFactor);
+ mLastAttFactor = newAttFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newAttFactor != mDesAttFactor)
+ ALOGV("DRC presentation mode wrapper: forced attenuation factor to %d (from %d)\n", newAttFactor, mDesAttFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set attenuation factor to %d\n", newAttFactor);
+#endif
+ }
+
+ if (newBoostFactor != mLastBoostFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_BOOST_FACTOR, newBoostFactor);
+ mLastBoostFactor = newBoostFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newBoostFactor != mDesBoostFactor)
+ ALOGV("DRC presentation mode wrapper: forced boost factor to %d (from %d)\n",
+ newBoostFactor, mDesBoostFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set boost factor to %d\n", newBoostFactor);
+#endif
+ }
+
+ if (newHeavy != mLastHeavy) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_HEAVY_COMPRESSION, newHeavy);
+ mLastHeavy = newHeavy;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newHeavy != mDesHeavy)
+ ALOGV("DRC presentation mode wrapper: forced heavy compression to %d (from %d)\n",
+ newHeavy, mDesHeavy);
+ else
+ ALOGV("DRC presentation mode wrapper: set heavy compression to %d\n", newHeavy);
+#endif
+ }
+
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC config: tgt_lev: %3d, cut: %3d, boost: %3d, heavy: %d\n", newTarget,
+ newAttFactor, newBoostFactor, newHeavy);
+#endif
+ mDataUpdate = false;
+
+ } // if (mDataUpdate)
+}
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
new file mode 100644
index 0000000..f0b6cf2
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#include "aacdecoder_lib.h"
+
+typedef enum
+{
+ DRC_PRES_MODE_WRAP_DESIRED_TARGET = 0x0000,
+ DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR = 0x0001,
+ DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR = 0x0002,
+ DRC_PRES_MODE_WRAP_DESIRED_HEAVY = 0x0003,
+ DRC_PRES_MODE_WRAP_ENCODER_TARGET = 0x0004
+} DRC_PRES_MODE_WRAP_PARAM;
+
+
+class CDrcPresModeWrapper {
+public:
+ CDrcPresModeWrapper();
+ ~CDrcPresModeWrapper();
+ void setDecoderHandle(const HANDLE_AACDECODER handle);
+ void setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value);
+ void submitStreamData(CStreamInfo*);
+ void update();
+
+protected:
+ HANDLE_AACDECODER mHandleDecoder;
+ int mDesTarget;
+ int mDesAttFactor;
+ int mDesBoostFactor;
+ int mDesHeavy;
+
+ int mEncoderTarget;
+
+ int mLastTarget;
+ int mLastAttFactor;
+ int mLastBoostFactor;
+ int mLastHeavy;
+
+ SCHAR mStreamPRL;
+ SCHAR mStreamDRCPresMode;
+ INT mStreamNrAACChan;
+ INT mStreamNrOutChan;
+
+ bool mIsDownmix;
+ bool mIsMonoDownmix;
+ bool mIsStereoDownmix;
+
+ bool mDataUpdate;
+};
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index 1b20cbb..495bad0 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -14,27 +14,35 @@
* limitations under the License.
*/
-#define LOG_TAG "SoftAAC2"
//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAAC2"
#include <utils/Log.h>
#include "SoftAAC2.h"
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaErrors.h>
+#include <math.h>
+
#define FILEREAD_MAX_LAYERS 2
#define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
-#define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1 /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL -1 /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
// names of properties that can be used to override the default DRC settings
#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
namespace android {
@@ -57,9 +65,9 @@ SoftAAC2::SoftAAC2(
mStreamInfo(NULL),
mIsADTS(false),
mInputBufferCount(0),
+ mOutputBufferCount(0),
mSignalledError(false),
- mAnchorTimeUs(0),
- mNumSamplesOutput(0),
+ mLastInHeader(NULL),
mOutputPortSettingsChange(NONE) {
initPorts();
CHECK_EQ(initDecoder(), (status_t)OK);
@@ -67,6 +75,7 @@ SoftAAC2::SoftAAC2(
SoftAAC2::~SoftAAC2() {
aacDecoder_Close(mAACDecoder);
+ delete mOutputDelayRingBuffer;
}
void SoftAAC2::initPorts() {
@@ -111,6 +120,7 @@ void SoftAAC2::initPorts() {
}
status_t SoftAAC2::initDecoder() {
+ ALOGV("initDecoder()");
status_t status = UNKNOWN_ERROR;
mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
if (mAACDecoder != NULL) {
@@ -119,36 +129,73 @@ status_t SoftAAC2::initDecoder() {
status = OK;
}
}
- mDecoderHasData = false;
- // for streams that contain metadata, use the mobile profile DRC settings unless overridden
- // by platform properties:
+ mEndOfInput = false;
+ mEndOfOutput = false;
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
+ mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mOutputDelayRingBufferFilled = 0;
+
+ if (mAACDecoder == NULL) {
+ ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
+ }
+
+ //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
+
+ //init DRC wrapper
+ mDrcWrap.setDecoderHandle(mAACDecoder);
+ mDrcWrap.submitStreamData(mStreamInfo);
+
+ // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
+ // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
char value[PROPERTY_VALUE_MAX];
- // * AAC_DRC_REFERENCE_LEVEL
+ // DRC_PRES_MODE_WRAP_DESIRED_TARGET
if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
unsigned refLevel = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_REFERENCE_LEVEL of %d instead of %d",
- refLevel, DRC_DEFAULT_MOBILE_REF_LEVEL);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_REFERENCE_LEVEL, refLevel);
+ ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d", refLevel,
+ DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, refLevel);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_REFERENCE_LEVEL, DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, DRC_DEFAULT_MOBILE_REF_LEVEL);
}
- // * AAC_DRC_ATTENUATION_FACTOR
+ // DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
unsigned cut = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_ATTENUATION_FACTOR of %d instead of %d",
- cut, DRC_DEFAULT_MOBILE_DRC_CUT);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_ATTENUATION_FACTOR, cut);
+ ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", cut,
+ DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, cut);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_ATTENUATION_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
}
- // * AAC_DRC_BOOST_FACTOR (note: no default, using cut)
+ // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
unsigned boost = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_BOOST_FACTOR of %d", boost);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, boost);
+ ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", boost,
+ DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, boost);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ }
+ // DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+ if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
+ unsigned heavy = atoi(value);
+ ALOGV("AAC decoder using desried DRC heavy compression switch of %d instead of %d", heavy,
+ DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, heavy);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ }
+ // DRC_PRES_MODE_WRAP_ENCODER_TARGET
+ if (property_get(PROP_DRC_OVERRIDE_ENC_LEVEL, value, NULL)) {
+ unsigned encoderRefLevel = atoi(value);
+ ALOGV("AAC decoder using encoder-side DRC reference level of %d instead of %d",
+ encoderRefLevel, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, encoderRefLevel);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
}
return status;
@@ -231,7 +278,7 @@ OMX_ERRORTYPE SoftAAC2::internalGetParameter(
OMX_ERRORTYPE SoftAAC2::internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
+ switch ((int)index) {
case OMX_IndexParamStandardComponentRole:
{
const OMX_PARAM_COMPONENTROLETYPE *roleParams =
@@ -267,6 +314,71 @@ OMX_ERRORTYPE SoftAAC2::internalSetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioAndroidAacPresentation:
+ {
+ const OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE *aacPresParams =
+ (const OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE *)params;
+ // for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
+ // a value of -1 implies the parameter is not set by the application:
+ // nMaxOutputChannels uses default platform properties, see configureDownmix()
+ // nDrcCut uses default platform properties, see initDecoder()
+ // nDrcBoost idem
+ // nHeavyCompression idem
+ // nTargetReferenceLevel idem
+ // nEncodedTargetLevel idem
+ if (aacPresParams->nMaxOutputChannels >= 0) {
+ int max;
+ if (aacPresParams->nMaxOutputChannels >= 8) { max = 8; }
+ else if (aacPresParams->nMaxOutputChannels >= 6) { max = 6; }
+ else if (aacPresParams->nMaxOutputChannels >= 2) { max = 2; }
+ else {
+ // -1 or 0: disable downmix, 1: mono
+ max = aacPresParams->nMaxOutputChannels;
+ }
+ ALOGV("set nMaxOutputChannels=%d", max);
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, max);
+ }
+ bool updateDrcWrapper = false;
+ if (aacPresParams->nDrcBoost >= 0) {
+ ALOGV("set nDrcBoost=%d", aacPresParams->nDrcBoost);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR,
+ aacPresParams->nDrcBoost);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nDrcCut >= 0) {
+ ALOGV("set nDrcCut=%d", aacPresParams->nDrcCut);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, aacPresParams->nDrcCut);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nHeavyCompression >= 0) {
+ ALOGV("set nHeavyCompression=%d", aacPresParams->nHeavyCompression);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY,
+ aacPresParams->nHeavyCompression);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nTargetReferenceLevel >= 0) {
+ ALOGV("set nTargetReferenceLevel=%d", aacPresParams->nTargetReferenceLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET,
+ aacPresParams->nTargetReferenceLevel);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nEncodedTargetLevel >= 0) {
+ ALOGV("set nEncodedTargetLevel=%d", aacPresParams->nEncodedTargetLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET,
+ aacPresParams->nEncodedTargetLevel);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nPCMLimiterEnable >= 0) {
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE,
+ (aacPresParams->nPCMLimiterEnable != 0));
+ }
+ if (updateDrcWrapper) {
+ mDrcWrap.update();
+ }
+
+ return OMX_ErrorNone;
+ }
+
case OMX_IndexParamAudioPcm:
{
const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
@@ -288,290 +400,567 @@ bool SoftAAC2::isConfigured() const {
return mInputBufferCount > 0;
}
-void SoftAAC2::maybeConfigureDownmix() const {
- if (mStreamInfo->numChannels > 2) {
- char value[PROPERTY_VALUE_MAX];
- if (!(property_get("media.aac_51_output_enabled", value, NULL) &&
- (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
- ALOGI("Downmixing multichannel AAC to stereo");
- aacDecoder_SetParam(mAACDecoder, AAC_PCM_OUTPUT_CHANNELS, 2);
- mStreamInfo->numChannels = 2;
- }
+void SoftAAC2::configureDownmix() const {
+ char value[PROPERTY_VALUE_MAX];
+ if (!(property_get("media.aac_51_output_enabled", value, NULL)
+ && (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
+ ALOGI("limiting to stereo output");
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, 2);
+ // By default, the decoder creates a 5.1 channel downmix signal
+ // for seven and eight channel input streams. To enable 6.1 and 7.1 channel output
+ // use aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1)
}
}
-void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
- if (mSignalledError || mOutputPortSettingsChange != NONE) {
- return;
+bool SoftAAC2::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
+ if (numSamples == 0) {
+ return true;
+ }
+ if (outputDelayRingBufferSpaceLeft() < numSamples) {
+ ALOGE("RING BUFFER WOULD OVERFLOW");
+ return false;
}
+ if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
+ || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
+ }
- UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
- UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
- UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow SoftAAC2::outputDelayRingBufferPutSamples()");
- List<BufferInfo *> &inQueue = getPortQueue(0);
- List<BufferInfo *> &outQueue = getPortQueue(1);
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
+ mOutputDelayRingBufferWritePos++;
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ mOutputDelayRingBufferFilled += numSamples;
+ return true;
+}
- if (portIndex == 0 && mInputBufferCount == 0) {
- ++mInputBufferCount;
- BufferInfo *info = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *header = info->mHeader;
+int32_t SoftAAC2::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
- inBuffer[0] = header->pBuffer + header->nOffset;
- inBufferLength[0] = header->nFilledLen;
+ if (numSamples > mOutputDelayRingBufferFilled) {
+ ALOGE("RING BUFFER WOULD UNDERRUN");
+ return -1;
+ }
- AAC_DECODER_ERROR decoderErr =
- aacDecoder_ConfigRaw(mAACDecoder,
- inBuffer,
- inBufferLength);
+ if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
+ || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ if (samples != 0) {
+ for (int32_t i = 0; i < numSamples; i++) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
+ }
+ } else {
+ mOutputDelayRingBufferReadPos += numSamples;
+ }
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow SoftAAC2::outputDelayRingBufferGetSamples()");
- if (decoderErr != AAC_DEC_OK) {
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
- return;
+ for (int32_t i = 0; i < numSamples; i++) {
+ if (samples != 0) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
+ }
+ mOutputDelayRingBufferReadPos++;
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
}
+ }
+ mOutputDelayRingBufferFilled -= numSamples;
+ return numSamples;
+}
- inQueue.erase(inQueue.begin());
- info->mOwnedByUs = false;
- notifyEmptyBufferDone(header);
+int32_t SoftAAC2::outputDelayRingBufferSamplesAvailable() {
+ return mOutputDelayRingBufferFilled;
+}
- // Only send out port settings changed event if both sample rate
- // and numChannels are valid.
- if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
- maybeConfigureDownmix();
- ALOGI("Initially configuring decoder: %d Hz, %d channels",
- mStreamInfo->sampleRate,
- mStreamInfo->numChannels);
+int32_t SoftAAC2::outputDelayRingBufferSpaceLeft() {
+ return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
+}
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- }
+void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
+ if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
- while (!inQueue.empty() && !outQueue.empty()) {
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ UCHAR* inBuffer[FILEREAD_MAX_LAYERS];
+ UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0};
+ UINT bytesValid[FILEREAD_MAX_LAYERS] = {0};
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
+ if (!inQueue.empty()) {
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- BufferInfo *outInfo = *outQueue.begin();
- OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ mEndOfInput = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
+ if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ ALOGE("first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
+ inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
+ }
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- if (mDecoderHasData) {
- // flush out the decoder's delayed data by calling DecodeFrame
- // one more time, with the AACDEC_FLUSH flag set
- INT_PCM *outBuffer =
- reinterpret_cast<INT_PCM *>(
- outHeader->pBuffer + outHeader->nOffset);
+ inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
+ inBufferLength[0] = inHeader->nFilledLen;
AAC_DECODER_ERROR decoderErr =
- aacDecoder_DecodeFrame(mAACDecoder,
- outBuffer,
- outHeader->nAllocLen,
- AACDEC_FLUSH);
- mDecoderHasData = false;
+ aacDecoder_ConfigRaw(mAACDecoder,
+ inBuffer,
+ inBufferLength);
if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
mSignalledError = true;
-
- notify(OMX_EventError, OMX_ErrorUndefined, decoderErr,
- NULL);
-
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
return;
}
- outHeader->nFilledLen =
- mStreamInfo->frameSize
- * sizeof(int16_t)
- * mStreamInfo->numChannels;
- } else {
- // we never submitted any data to the decoder, so there's nothing to flush out
- outHeader->nFilledLen = 0;
+ mInputBufferCount++;
+ mOutputBufferCount++; // fake increase of outputBufferCount to keep the counters aligned
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ configureDownmix();
+ // Only send out port settings changed event if both sample rate
+ // and numChannels are valid.
+ if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
+ ALOGI("Initially configuring decoder: %d Hz, %d channels",
+ mStreamInfo->sampleRate,
+ mStreamInfo->numChannels);
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+ return;
}
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
- return;
- }
-
- if (inHeader->nOffset == 0) {
- mAnchorTimeUs = inHeader->nTimeStamp;
- mNumSamplesOutput = 0;
- }
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ continue;
+ }
- size_t adtsHeaderSize = 0;
- if (mIsADTS) {
- // skip 30 bits, aac_frame_length follows.
- // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+ if (mIsADTS) {
+ size_t adtsHeaderSize = 0;
+ // skip 30 bits, aac_frame_length follows.
+ // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
- const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
+ const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
- bool signalError = false;
- if (inHeader->nFilledLen < 7) {
- ALOGE("Audio data too short to contain even the ADTS header. "
- "Got %ld bytes.", inHeader->nFilledLen);
- hexdump(adtsHeader, inHeader->nFilledLen);
- signalError = true;
- } else {
- bool protectionAbsent = (adtsHeader[1] & 1);
-
- unsigned aac_frame_length =
- ((adtsHeader[3] & 3) << 11)
- | (adtsHeader[4] << 3)
- | (adtsHeader[5] >> 5);
-
- if (inHeader->nFilledLen < aac_frame_length) {
- ALOGE("Not enough audio data for the complete frame. "
- "Got %ld bytes, frame size according to the ADTS "
- "header is %u bytes.",
- inHeader->nFilledLen, aac_frame_length);
+ bool signalError = false;
+ if (inHeader->nFilledLen < 7) {
+ ALOGE("Audio data too short to contain even the ADTS header. "
+ "Got %d bytes.", inHeader->nFilledLen);
hexdump(adtsHeader, inHeader->nFilledLen);
signalError = true;
} else {
- adtsHeaderSize = (protectionAbsent ? 7 : 9);
-
- inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
- inBufferLength[0] = aac_frame_length - adtsHeaderSize;
-
- inHeader->nOffset += adtsHeaderSize;
- inHeader->nFilledLen -= adtsHeaderSize;
+ bool protectionAbsent = (adtsHeader[1] & 1);
+
+ unsigned aac_frame_length =
+ ((adtsHeader[3] & 3) << 11)
+ | (adtsHeader[4] << 3)
+ | (adtsHeader[5] >> 5);
+
+ if (inHeader->nFilledLen < aac_frame_length) {
+ ALOGE("Not enough audio data for the complete frame. "
+ "Got %d bytes, frame size according to the ADTS "
+ "header is %u bytes.",
+ inHeader->nFilledLen, aac_frame_length);
+ hexdump(adtsHeader, inHeader->nFilledLen);
+ signalError = true;
+ } else {
+ adtsHeaderSize = (protectionAbsent ? 7 : 9);
+
+ inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
+ inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+
+ inHeader->nOffset += adtsHeaderSize;
+ inHeader->nFilledLen -= adtsHeaderSize;
+ }
}
- }
-
- if (signalError) {
- mSignalledError = true;
- notify(OMX_EventError,
- OMX_ErrorStreamCorrupt,
- ERROR_MALFORMED,
- NULL);
+ if (signalError) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
+ return;
+ }
- return;
+ // insert buffer size and time stamp
+ mBufferSizes.add(inBufferLength[0]);
+ if (mLastInHeader != inHeader) {
+ mBufferTimestamps.add(inHeader->nTimeStamp);
+ mLastInHeader = inHeader;
+ } else {
+ int64_t currentTime = mBufferTimestamps.top();
+ currentTime += mStreamInfo->aacSamplesPerFrame *
+ 1000000ll / mStreamInfo->sampleRate;
+ mBufferTimestamps.add(currentTime);
+ }
+ } else {
+ inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
+ inBufferLength[0] = inHeader->nFilledLen;
+ mLastInHeader = inHeader;
+ mBufferTimestamps.add(inHeader->nTimeStamp);
+ mBufferSizes.add(inHeader->nFilledLen);
}
- } else {
- inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
- inBufferLength[0] = inHeader->nFilledLen;
- }
-
- // Fill and decode
- INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(
- outHeader->pBuffer + outHeader->nOffset);
- bytesValid[0] = inBufferLength[0];
+ // Fill and decode
+ bytesValid[0] = inBufferLength[0];
- int prevSampleRate = mStreamInfo->sampleRate;
- int prevNumChannels = mStreamInfo->numChannels;
+ INT prevSampleRate = mStreamInfo->sampleRate;
+ INT prevNumChannels = mStreamInfo->numChannels;
- AAC_DECODER_ERROR decoderErr = AAC_DEC_NOT_ENOUGH_BITS;
- while (bytesValid[0] > 0 && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
aacDecoder_Fill(mAACDecoder,
inBuffer,
inBufferLength,
bytesValid);
- mDecoderHasData = true;
-
- decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
- outBuffer,
- outHeader->nAllocLen,
- 0 /* flags */);
-
- if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
- ALOGW("Not enough bits, bytesValid %d", bytesValid[0]);
- }
- }
- size_t numOutBytes =
- mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
- if (decoderErr == AAC_DEC_OK) {
UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
inHeader->nFilledLen -= inBufferUsedLength;
inHeader->nOffset += inBufferUsedLength;
- } else {
- ALOGW("AAC decoder returned error %d, substituting silence",
- decoderErr);
- memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
+ AAC_DECODER_ERROR decoderErr;
+ int numLoops = 0;
+ do {
+ if (outputDelayRingBufferSpaceLeft() <
+ (mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ ALOGV("skipping decode: not enough space left in ringbuffer");
+ break;
+ }
+
+ int numConsumed = mStreamInfo->numTotalBytes;
+ decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ 0 /* flags */);
+
+ numConsumed = mStreamInfo->numTotalBytes - numConsumed;
+ numLoops++;
- // Discard input buffer.
- inHeader->nFilledLen = 0;
+ if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+ break;
+ }
+ mDecodedSizes.add(numConsumed);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+ if (bytesValid[0] != 0) {
+ ALOGE("bytesValid[0] != 0 should never happen");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ size_t numOutBytes =
+ mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+
+ if (decoderErr == AAC_DEC_OK) {
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+ } else {
+ ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
+
+ memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
+
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+
+ // Discard input buffer.
+ if (inHeader) {
+ inHeader->nFilledLen = 0;
+ }
+
+ aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+
+ // After an error, replace the last entry in mBufferSizes with the sum of the
+ // last <numLoops> entries from mDecodedSizes to resynchronize the in/out lists.
+ mBufferSizes.pop();
+ int n = 0;
+ for (int i = 0; i < numLoops; i++) {
+ n += mDecodedSizes.itemAt(mDecodedSizes.size() - numLoops + i);
+ }
+ mBufferSizes.add(n);
+
+ // fall through
+ }
- // fall through
+ /*
+ * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+ * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+ * rate system and the sampling rate in the final output is actually
+ * doubled compared with the core AAC decoder sampling rate.
+ *
+ * Explicit signalling is done by explicitly defining SBR audio object
+ * type in the bitstream. Implicit signalling is done by embedding
+ * SBR content in AAC extension payload specific to SBR, and hence
+ * requires an AAC decoder to perform pre-checks on actual audio frames.
+ *
+ * Thus, we could not say for sure whether a stream is
+ * AAC+/eAAC+ until the first data frame is decoded.
+ */
+ if (mInputBufferCount <= 2 || mOutputBufferCount > 1) { // TODO: <= 1
+ if (mStreamInfo->sampleRate != prevSampleRate ||
+ mStreamInfo->numChannels != prevNumChannels) {
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ prevSampleRate, mStreamInfo->sampleRate,
+ prevNumChannels, mStreamInfo->numChannels);
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+
+ if (inHeader && inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ mInputBufferCount++;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ return;
+ }
+ } else if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
+ ALOGW("Invalid AAC stream");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+ if (inHeader && inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ mInputBufferCount++;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ } else {
+ ALOGV("inHeader->nFilledLen = %d", inHeader ? inHeader->nFilledLen : 0);
+ }
+ } while (decoderErr == AAC_DEC_OK);
}
- if (inHeader->nFilledLen == 0) {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ if (!mEndOfInput && mOutputDelayCompensated < outputDelay) {
+ // discard outputDelay at the beginning
+ int32_t toCompensate = outputDelay - mOutputDelayCompensated;
+ int32_t discard = outputDelayRingBufferSamplesAvailable();
+ if (discard > toCompensate) {
+ discard = toCompensate;
+ }
+ int32_t discarded = outputDelayRingBufferGetSamples(0, discard);
+ mOutputDelayCompensated += discarded;
+ continue;
}
- /*
- * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
- * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
- * rate system and the sampling rate in the final output is actually
- * doubled compared with the core AAC decoder sampling rate.
- *
- * Explicit signalling is done by explicitly defining SBR audio object
- * type in the bitstream. Implicit signalling is done by embedding
- * SBR content in AAC extension payload specific to SBR, and hence
- * requires an AAC decoder to perform pre-checks on actual audio frames.
- *
- * Thus, we could not say for sure whether a stream is
- * AAC+/eAAC+ until the first data frame is decoded.
- */
- if (mInputBufferCount <= 2) {
- if (mStreamInfo->sampleRate != prevSampleRate ||
- mStreamInfo->numChannels != prevNumChannels) {
- maybeConfigureDownmix();
- ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
- prevSampleRate, mStreamInfo->sampleRate,
- prevNumChannels, mStreamInfo->numChannels);
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- return;
+ if (mEndOfInput) {
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+ mOutputDelayCompensated -= tmpOutBufferSamples;
}
- } else if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
- ALOGW("Invalid AAC stream");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
- return;
}
- if (decoderErr == AAC_DEC_OK || mNumSamplesOutput > 0) {
- // We'll only output data if we successfully decoded it or
- // we've previously decoded valid data, in the latter case
- // (decode failed) we'll output a silent frame.
- outHeader->nFilledLen = numOutBytes;
- outHeader->nFlags = 0;
+ while (!outQueue.empty()
+ && outputDelayRingBufferSamplesAvailable()
+ >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (outHeader->nOffset != 0) {
+ ALOGE("outHeader->nOffset != 0 is not handled");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ INT_PCM *outBuffer =
+ reinterpret_cast<INT_PCM *>(outHeader->pBuffer + outHeader->nOffset);
+ int samplesize = mStreamInfo->numChannels * sizeof(int16_t);
+ if (outHeader->nOffset
+ + mStreamInfo->frameSize * samplesize
+ > outHeader->nAllocLen) {
+ ALOGE("buffer overflow");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+
+ }
+
+ int available = outputDelayRingBufferSamplesAvailable();
+ int numSamples = outHeader->nAllocLen / sizeof(int16_t);
+ if (numSamples > available) {
+ numSamples = available;
+ }
+ int64_t currentTime = 0;
+ if (available) {
+
+ int numFrames = numSamples / (mStreamInfo->frameSize * mStreamInfo->numChannels);
+ numSamples = numFrames * (mStreamInfo->frameSize * mStreamInfo->numChannels);
+
+ ALOGV("%d samples available (%d), or %d frames",
+ numSamples, available, numFrames);
+ int64_t *nextTimeStamp = &mBufferTimestamps.editItemAt(0);
+ currentTime = *nextTimeStamp;
+ int32_t *currentBufLeft = &mBufferSizes.editItemAt(0);
+ for (int i = 0; i < numFrames; i++) {
+ int32_t decodedSize = mDecodedSizes.itemAt(0);
+ mDecodedSizes.removeAt(0);
+ ALOGV("decoded %d of %d", decodedSize, *currentBufLeft);
+ if (*currentBufLeft > decodedSize) {
+ // adjust/interpolate next time stamp
+ *currentBufLeft -= decodedSize;
+ *nextTimeStamp += mStreamInfo->aacSamplesPerFrame *
+ 1000000ll / mStreamInfo->sampleRate;
+ ALOGV("adjusted nextTimeStamp/size to %lld/%d",
+ (long long) *nextTimeStamp, *currentBufLeft);
+ } else {
+ // move to next timestamp in list
+ if (mBufferTimestamps.size() > 0) {
+ mBufferTimestamps.removeAt(0);
+ nextTimeStamp = &mBufferTimestamps.editItemAt(0);
+ mBufferSizes.removeAt(0);
+ currentBufLeft = &mBufferSizes.editItemAt(0);
+ ALOGV("moved to next time/size: %lld/%d",
+ (long long) *nextTimeStamp, *currentBufLeft);
+ }
+ // try to limit output buffer size to match input buffers
+ // (e.g when an input buffer contained 4 "sub" frames, output
+ // at most 4 decoded units in the corresponding output buffer)
+ // This is optional. Remove the next three lines to fill the output
+ // buffer with as many units as available.
+ numFrames = i + 1;
+ numSamples = numFrames * mStreamInfo->frameSize * mStreamInfo->numChannels;
+ break;
+ }
+ }
+
+ ALOGV("getting %d from ringbuffer", numSamples);
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer, numSamples);
+ if (ns != numSamples) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ }
+
+ outHeader->nFilledLen = numSamples * sizeof(int16_t);
- outHeader->nTimeStamp =
- mAnchorTimeUs
- + (mNumSamplesOutput * 1000000ll) / mStreamInfo->sampleRate;
+ if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mEndOfOutput = true;
+ } else {
+ outHeader->nFlags = 0;
+ }
- mNumSamplesOutput += mStreamInfo->frameSize;
+ outHeader->nTimeStamp = currentTime;
+ mOutputBufferCount++;
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
outInfo = NULL;
+ ALOGV("out timestamp %lld / %d", outHeader->nTimeStamp, outHeader->nFilledLen);
notifyFillBufferDone(outHeader);
outHeader = NULL;
}
- if (decoderErr == AAC_DEC_OK) {
- ++mInputBufferCount;
+ if (mEndOfInput) {
+ int ringBufAvail = outputDelayRingBufferSamplesAvailable();
+ if (!outQueue.empty()
+ && ringBufAvail < mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ if (!mEndOfOutput) {
+ // send partial or empty block signaling EOS
+ mEndOfOutput = true;
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(outHeader->pBuffer
+ + outHeader->nOffset);
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer, ringBufAvail);
+ if (ns < 0) {
+ ns = 0;
+ }
+ outHeader->nFilledLen = ns;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outHeader->nTimeStamp = mBufferTimestamps.itemAt(0);
+ mBufferTimestamps.clear();
+ mBufferSizes.clear();
+ mDecodedSizes.clear();
+
+ mOutputBufferCount++;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+ break; // if outQueue not empty but no more output
+ }
}
}
}
@@ -582,28 +971,75 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
// depend on fragments from the last one decoded.
// drain all existing data
drainDecoder();
+ mBufferTimestamps.clear();
+ mBufferSizes.clear();
+ mDecodedSizes.clear();
+ mLastInHeader = NULL;
+ } else {
+ int avail;
+ while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) {
+ if (avail > mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ avail = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ }
+ int32_t ns = outputDelayRingBufferGetSamples(0, avail);
+ if (ns != avail) {
+ ALOGW("not a complete frame of samples available");
+ break;
+ }
+ mOutputBufferCount++;
+ }
+ mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
}
}
void SoftAAC2::drainDecoder() {
- // a buffer big enough for 6 channels of decoded HE-AAC
- short buf [2048*6];
- aacDecoder_DecodeFrame(mAACDecoder,
- buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
- aacDecoder_DecodeFrame(mAACDecoder,
- buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
- mDecoderHasData = false;
+ // flush decoder until outputDelay is compensated
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+
+ mOutputDelayCompensated -= tmpOutBufferSamples;
+ }
}
void SoftAAC2::onReset() {
drainDecoder();
// reset the "configured" state
mInputBufferCount = 0;
- mNumSamplesOutput = 0;
+ mOutputBufferCount = 0;
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mOutputDelayRingBufferFilled = 0;
+ mEndOfInput = false;
+ mEndOfOutput = false;
+ mBufferTimestamps.clear();
+ mBufferSizes.clear();
+ mDecodedSizes.clear();
+ mLastInHeader = NULL;
+
// To make the codec behave the same before and after a reset, we need to invalidate the
// streaminfo struct. This does that:
- mStreamInfo->sampleRate = 0;
+ mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
mSignalledError = false;
mOutputPortSettingsChange = NONE;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index 2d960ab..c3e4459 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -20,6 +20,7 @@
#include "SimpleSoftOMXComponent.h"
#include "aacdecoder_lib.h"
+#include "DrcPresModeWrap.h"
namespace android {
@@ -47,16 +48,22 @@ private:
enum {
kNumInputBuffers = 4,
kNumOutputBuffers = 4,
+ kNumDelayBlocksMax = 8,
};
HANDLE_AACDECODER mAACDecoder;
CStreamInfo *mStreamInfo;
bool mIsADTS;
- bool mDecoderHasData;
+ bool mIsFirst;
size_t mInputBufferCount;
+ size_t mOutputBufferCount;
bool mSignalledError;
- int64_t mAnchorTimeUs;
- int64_t mNumSamplesOutput;
+ OMX_BUFFERHEADERTYPE *mLastInHeader;
+ Vector<int32_t> mBufferSizes;
+ Vector<int32_t> mDecodedSizes;
+ Vector<int64_t> mBufferTimestamps;
+
+ CDrcPresModeWrapper mDrcWrap;
enum {
NONE,
@@ -67,9 +74,23 @@ private:
void initPorts();
status_t initDecoder();
bool isConfigured() const;
- void maybeConfigureDownmix() const;
+ void configureDownmix() const;
void drainDecoder();
+// delay compensation
+ bool mEndOfInput;
+ bool mEndOfOutput;
+ int32_t mOutputDelayCompensated;
+ int32_t mOutputDelayRingBufferSize;
+ short *mOutputDelayRingBuffer;
+ int32_t mOutputDelayRingBufferWritePos;
+ int32_t mOutputDelayRingBufferReadPos;
+ int32_t mOutputDelayRingBufferFilled;
+ bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferSamplesAvailable();
+ int32_t outputDelayRingBufferSpaceLeft();
+
DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2);
};
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index 8b5007e..bebb9dc 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -214,8 +214,6 @@ sp<MetaData> AACEncoder::getFormat() {
status_t AACEncoder::read(
MediaBuffer **out, const ReadOptions *options) {
- status_t err;
-
*out = NULL;
int64_t seekTimeUs;
diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk
index 057c69b..58ec3ba 100644
--- a/media/libstagefright/codecs/aacenc/Android.mk
+++ b/media/libstagefright/codecs/aacenc/Android.mk
@@ -82,6 +82,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -106,6 +108,8 @@ ifeq ($(AAC_LIBRARY), fraunhofer)
LOCAL_CFLAGS :=
+ LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
@@ -128,6 +132,8 @@ else # visualon
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+ LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_aacenc
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
index ff2b503..35aa883 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
@@ -19,6 +19,7 @@
#include <utils/Log.h>
#include "SoftAACEncoder2.h"
+#include <OMX_AudioExt.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
@@ -44,6 +45,8 @@ SoftAACEncoder2::SoftAACEncoder2(
mNumChannels(1),
mSampleRate(44100),
mBitRate(0),
+ mSBRMode(-1),
+ mSBRRatio(0),
mAACProfile(OMX_AUDIO_AACObjectLC),
mSentCodecSpecificData(false),
mInputSize(0),
@@ -156,6 +159,41 @@ OMX_ERRORTYPE SoftAACEncoder2::internalGetParameter(
aacParams->nSampleRate = mSampleRate;
aacParams->nFrameLength = 0;
+ switch (mSBRMode) {
+ case 1: // sbr on
+ switch (mSBRRatio) {
+ case 0:
+ // set both OMX AAC tool flags
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 1:
+ // set single-rate SBR active
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 2:
+ // set dual-rate SBR active
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ default:
+ ALOGE("invalid SBR ratio %d", mSBRRatio);
+ TRESPASS();
+ }
+ break;
+ case 0: // sbr off
+ case -1: // sbr undefined
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ default:
+ ALOGE("invalid SBR mode %d", mSBRMode);
+ TRESPASS();
+ }
+
+
+
return OMX_ErrorNone;
}
@@ -243,6 +281,23 @@ OMX_ERRORTYPE SoftAACEncoder2::internalSetParameter(
mAACProfile = aacParams->eAACProfile;
}
+ if (!(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
+ && !(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
+ mSBRMode = 0;
+ mSBRRatio = 0;
+ } else if ((aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
+ && !(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
+ mSBRMode = 1;
+ mSBRRatio = 1;
+ } else if (!(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
+ && (aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
+ mSBRMode = 1;
+ mSBRRatio = 2;
+ } else {
+ mSBRMode = -1; // codec default sbr mode
+ mSBRRatio = 0;
+ }
+
if (setAudioParams() != OK) {
return OMX_ErrorUndefined;
}
@@ -305,11 +360,11 @@ static AUDIO_OBJECT_TYPE getAOTFromProfile(OMX_U32 profile) {
}
status_t SoftAACEncoder2::setAudioParams() {
- // We call this whenever sample rate, number of channels or bitrate change
+ // We call this whenever sample rate, number of channels, bitrate or SBR mode change
// in reponse to setParameter calls.
- ALOGV("setAudioParams: %lu Hz, %lu channels, %lu bps",
- mSampleRate, mNumChannels, mBitRate);
+ ALOGV("setAudioParams: %u Hz, %u channels, %u bps, %i sbr mode, %i sbr ratio",
+ mSampleRate, mNumChannels, mBitRate, mSBRMode, mSBRRatio);
if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_AOT,
getAOTFromProfile(mAACProfile))) {
@@ -335,10 +390,28 @@ status_t SoftAACEncoder2::setAudioParams() {
return UNKNOWN_ERROR;
}
+ if (mSBRMode != -1 && mAACProfile == OMX_AUDIO_AACObjectELD) {
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, mSBRMode)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* SBR ratio parameter configurations:
+ 0: Default configuration wherein SBR ratio is configured depending on audio object type by
+ the FDK.
+ 1: Downsampled SBR (default for ELD)
+ 2: Dualrate SBR (default for HE-AAC)
+ */
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_RATIO, mSBRRatio)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+
return OK;
}
-void SoftAACEncoder2::onQueueFilled(OMX_U32 portIndex) {
+void SoftAACEncoder2::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError) {
return;
}
@@ -364,7 +437,7 @@ void SoftAACEncoder2::onQueueFilled(OMX_U32 portIndex) {
OMX_U32 actualBitRate = aacEncoder_GetParam(mAACEncoder, AACENC_BITRATE);
if (mBitRate != actualBitRate) {
- ALOGW("Requested bitrate %lu unsupported, using %lu", mBitRate, actualBitRate);
+ ALOGW("Requested bitrate %u unsupported, using %u", mBitRate, actualBitRate);
}
AACENC_InfoStruct encInfo;
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
index 2603f4f..bce9c24 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
@@ -53,6 +53,8 @@ private:
OMX_U32 mNumChannels;
OMX_U32 mSampleRate;
OMX_U32 mBitRate;
+ OMX_S32 mSBRMode;
+ OMX_S32 mSBRRatio;
OMX_U32 mAACProfile;
bool mSentCodecSpecificData;
diff --git a/media/libstagefright/codecs/aacenc/basic_op/basic_op.h b/media/libstagefright/codecs/aacenc/basic_op/basic_op.h
index 5cd7e5f..bbc753b 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/basic_op.h
+++ b/media/libstagefright/codecs/aacenc/basic_op/basic_op.h
@@ -518,8 +518,6 @@ __inline Word32 L_shl(Word32 L_var1, Word16 var2)
return ASM_L_shr( L_var1, -var2);
}
#else
- Word32 L_var_out = 0L;
-
if (var2 <= 0)
{
L_var1 = L_shr(L_var1, (Word16)-var2);
@@ -540,7 +538,6 @@ __inline Word32 L_shl(Word32 L_var1, Word16 var2)
}
}
L_var1 <<= 1;
- L_var_out = L_var1;
}
}
return (L_var1);
diff --git a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
index cc01927..4fd16a1 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
+++ b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
@@ -24,6 +24,8 @@
#include "basic_op.h"
#include "oper_32b.h"
+#define UNUSED(x) (void)(x)
+
/*****************************************************************************
* *
* Function L_Extract() *
@@ -245,6 +247,7 @@ Word32 rsqrt(Word32 value, /*!< Operand to square root (0.0 ... 1) */
{
Word32 root = 0;
Word32 scale;
+ UNUSED(accuracy);
if(value < 0)
return 0;
@@ -347,12 +350,11 @@ Word32 pow2_xy(Word32 x, Word32 y)
UWord32 iPart;
UWord32 fPart;
Word32 res;
- Word32 tmp, tmp2;
- Word32 shift, shift2;
+ Word32 tmp;
- tmp2 = -x;
- iPart = tmp2 / y;
- fPart = tmp2 - iPart*y;
+ tmp = -x;
+ iPart = tmp / y;
+ fPart = tmp - iPart*y;
iPart = min(iPart,INT_BITS-1);
res = pow2Table[(POW2_TABLE_SIZE*fPart)/y] >> iPart;
diff --git a/media/libstagefright/codecs/aacenc/src/aacenc.c b/media/libstagefright/codecs/aacenc/src/aacenc.c
index d1c8621..df17787 100644
--- a/media/libstagefright/codecs/aacenc/src/aacenc.c
+++ b/media/libstagefright/codecs/aacenc/src/aacenc.c
@@ -27,6 +27,8 @@
#include "cmnMemory.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
+
/**
* Init the audio codec module and return codec handle
* \param phCodec [OUT] Return the video codec handle
@@ -37,16 +39,20 @@
VO_U32 VO_API voAACEncInit(VO_HANDLE * phCodec,VO_AUDIO_CODINGTYPE vType, VO_CODEC_INIT_USERDATA *pUserData)
{
AAC_ENCODER*hAacEnc;
- AACENC_CONFIG config;
int error;
#ifdef USE_DEAULT_MEM
VO_MEM_OPERATOR voMemoprator;
#endif
VO_MEM_OPERATOR *pMemOP;
+
+#ifdef USE_DEAULT_MEM
int interMem;
+ interMem = 0;
+#endif
+
+ UNUSED(vType);
- interMem = 0;
error = 0;
/* init the memory operator */
@@ -210,7 +216,7 @@ VO_U32 VO_API voAACEncGetOutputData(VO_HANDLE hCodec, VO_CODECBUFFER * pOutput,
AAC_ENCODER* hAacEnc = (AAC_ENCODER*)hCodec;
Word16 numAncDataBytes=0;
Word32 inbuflen;
- int ret, length;
+ int length;
if(NULL == hAacEnc)
return VO_ERR_INVALID_ARG;
@@ -471,6 +477,10 @@ VO_U32 VO_API voAACEncSetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData)
*/
VO_U32 VO_API voAACEncGetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData)
{
+ UNUSED(hCodec);
+ UNUSED(uParamID);
+ UNUSED(pData);
+
return VO_ERR_NONE;
}
diff --git a/media/libstagefright/codecs/aacenc/src/aacenc_core.c b/media/libstagefright/codecs/aacenc/src/aacenc_core.c
index cecbc8f..de452d4 100644
--- a/media/libstagefright/codecs/aacenc/src/aacenc_core.c
+++ b/media/libstagefright/codecs/aacenc/src/aacenc_core.c
@@ -58,7 +58,6 @@ Word16 AacEncOpen( AAC_ENCODER* hAacEnc, /* pointer to an encoder
const AACENC_CONFIG config /* pre-initialized config struct */
)
{
- Word32 i;
Word32 error = 0;
Word16 profile = 1;
diff --git a/media/libstagefright/codecs/aacenc/src/adj_thr.c b/media/libstagefright/codecs/aacenc/src/adj_thr.c
index ccfe883..8b8be0e 100644
--- a/media/libstagefright/codecs/aacenc/src/adj_thr.c
+++ b/media/libstagefright/codecs/aacenc/src/adj_thr.c
@@ -72,7 +72,7 @@ static void calcThreshExp(Word32 thrExp[MAX_CHANNELS][MAX_GROUPED_SFB],
const Word16 nChannels)
{
Word16 ch, sfb, sfbGrp;
- Word32 *pthrExp, *psfbThre;
+ Word32 *pthrExp = NULL, *psfbThre;
for (ch=0; ch<nChannels; ch++) {
PSY_OUT_CHANNEL *psyOutChan = &psyOutChannel[ch];
for(sfbGrp = 0; sfbGrp < psyOutChan->sfbCnt; sfbGrp+= psyOutChan->sfbPerGroup)
@@ -96,7 +96,7 @@ static void adaptMinSnr(PSY_OUT_CHANNEL psyOutChannel[MAX_CHANNELS],
MINSNR_ADAPT_PARAM *msaParam,
const Word16 nChannels)
{
- Word16 ch, sfb, sfbOffs, shift;
+ Word16 ch, sfb, sfbOffs;
Word32 nSfb, avgEn;
Word16 log_avgEn = 0;
Word32 startRatio_x_avgEn = 0;
diff --git a/media/libstagefright/codecs/aacenc/src/bitbuffer.c b/media/libstagefright/codecs/aacenc/src/bitbuffer.c
index 0ce93d3..15eebd0 100644
--- a/media/libstagefright/codecs/aacenc/src/bitbuffer.c
+++ b/media/libstagefright/codecs/aacenc/src/bitbuffer.c
@@ -24,29 +24,6 @@
/*****************************************************************************
*
-* function name: updateBitBufWordPtr
-* description: update Bit Buffer pointer
-*
-*****************************************************************************/
-static void updateBitBufWordPtr(HANDLE_BIT_BUF hBitBuf,
- UWord8 **pBitBufWord,
- Word16 cnt)
-{
- *pBitBufWord += cnt;
-
-
- if(*pBitBufWord > hBitBuf->pBitBufEnd) {
- *pBitBufWord -= (hBitBuf->pBitBufEnd - hBitBuf->pBitBufBase + 1);
- }
-
- if(*pBitBufWord < hBitBuf->pBitBufBase) {
- *pBitBufWord += (hBitBuf->pBitBufEnd - hBitBuf->pBitBufBase + 1);
- }
-}
-
-
-/*****************************************************************************
-*
* function name: CreateBitBuffer
* description: create and init Bit Buffer Management
*
diff --git a/media/libstagefright/codecs/aacenc/src/bitenc.c b/media/libstagefright/codecs/aacenc/src/bitenc.c
index fcc12dd..9c81204 100644
--- a/media/libstagefright/codecs/aacenc/src/bitenc.c
+++ b/media/libstagefright/codecs/aacenc/src/bitenc.c
@@ -26,6 +26,7 @@
#include "qc_data.h"
#include "interface.h"
+#define UNUSED(x) (void)(x)
static const Word16 globalGainOffset = 100;
static const Word16 icsReservedBit = 0;
@@ -546,7 +547,7 @@ static void writeFillElement( const UWord8 *ancBytes,
totFillBits = totFillBits - (3+4);
- if ((cnt == (1<<4)-1)) {
+ if (cnt == (1<<4)-1) {
esc_count = min( ((totFillBits >> 3) - ((1<<4)-1)), (1<<8)-1);
WriteBits(hBitStream,esc_count,8);
@@ -585,6 +586,8 @@ Word16 WriteBitstream (HANDLE_BIT_BUF hBitStream,
Word16 elementUsedBits;
Word16 frameBits=0;
+ UNUSED(ancBytes);
+
/* struct bitbuffer bsWriteCopy; */
bitMarkUp = GetBitsAvail(hBitStream);
if(qcOut->qcElement.adtsUsed) /* write adts header*/
diff --git a/media/libstagefright/codecs/aacenc/src/block_switch.c b/media/libstagefright/codecs/aacenc/src/block_switch.c
index c80538f..11bc7e7 100644
--- a/media/libstagefright/codecs/aacenc/src/block_switch.c
+++ b/media/libstagefright/codecs/aacenc/src/block_switch.c
@@ -30,9 +30,6 @@
#define ENERGY_SHIFT (8 - 1)
/**************** internal function prototypes ***********/
-static Word16
-IIRFilter(const Word16 in, const Word32 coeff[], Word32 states[]);
-
static Word32
SrchMaxWithIndex(const Word32 *in, Word16 *index, Word16 n);
@@ -280,7 +277,7 @@ Word32 CalcWindowEnergy(BLOCK_SWITCHING_CONTROL *blockSwitchingControl,
Word16 chIncrement,
Word16 windowLen)
{
- Word32 w, i, wOffset, tidx, ch;
+ Word32 w, i, tidx;
Word32 accuUE, accuFE;
Word32 tempUnfiltered;
Word32 tempFiltered;
@@ -329,30 +326,6 @@ Word32 CalcWindowEnergy(BLOCK_SWITCHING_CONTROL *blockSwitchingControl,
}
#endif
-/*****************************************************************************
-*
-* function name: IIRFilter
-* description: calculate the iir-filter for an array
-* returns: the result after iir-filter
-*
-**********************************************************************************/
-static Word16 IIRFilter(const Word16 in, const Word32 coeff[], Word32 states[])
-{
- Word32 accu1, accu2, accu3;
- Word32 out;
-
- accu1 = L_mpy_ls(coeff[1], in);
- accu3 = accu1 - states[0];
- accu2 = fixmul( coeff[0], states[1] );
- out = accu3 - accu2;
-
- states[0] = accu1;
- states[1] = out;
-
- return round16(out);
-}
-
-
static Word16 synchronizedBlockTypeTable[4][4] = {
/* LONG_WINDOW START_WINDOW SHORT_WINDOW STOP_WINDOW */
/* LONG_WINDOW */{LONG_WINDOW, START_WINDOW, SHORT_WINDOW, STOP_WINDOW},
diff --git a/media/libstagefright/codecs/aacenc/src/dyn_bits.c b/media/libstagefright/codecs/aacenc/src/dyn_bits.c
index 7769188..4d763d0 100644
--- a/media/libstagefright/codecs/aacenc/src/dyn_bits.c
+++ b/media/libstagefright/codecs/aacenc/src/dyn_bits.c
@@ -25,7 +25,6 @@
#include "bit_cnt.h"
#include "psy_const.h"
-
/*****************************************************************************
*
* function name: buildBitLookUp
@@ -226,7 +225,7 @@ gmStage2(SECTION_INFO *sectionInfo,
}
while (TRUE) {
- Word16 maxMergeGain, maxNdx, maxNdxNext, maxNdxLast;
+ Word16 maxMergeGain, maxNdx = 0, maxNdxNext, maxNdxLast;
maxMergeGain = findMaxMerge(mergeGainLookUp, sectionInfo, maxSfb, &maxNdx);
diff --git a/media/libstagefright/codecs/aacenc/src/ms_stereo.c b/media/libstagefright/codecs/aacenc/src/ms_stereo.c
index 2e34f14..1e4b227 100644
--- a/media/libstagefright/codecs/aacenc/src/ms_stereo.c
+++ b/media/libstagefright/codecs/aacenc/src/ms_stereo.c
@@ -50,7 +50,6 @@ void MsStereoProcessing(Word32 *sfbEnergyLeft,
const Word16 sfbPerGroup,
const Word16 maxSfbPerGroup,
const Word16 *sfbOffset) {
- Word32 temp;
Word32 sfb,sfboffs, j;
Word32 msMaskTrueSomewhere = 0;
Word32 msMaskFalseSomewhere = 0;
diff --git a/media/libstagefright/codecs/aacenc/src/psy_main.c b/media/libstagefright/codecs/aacenc/src/psy_main.c
index 4e9218c..6f0679c 100644
--- a/media/libstagefright/codecs/aacenc/src/psy_main.c
+++ b/media/libstagefright/codecs/aacenc/src/psy_main.c
@@ -38,6 +38,8 @@
#include "tns_func.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
+
/* long start short stop */
static Word16 blockType2windowShape[] = {KBD_WINDOW,SINE_WINDOW,SINE_WINDOW,KBD_WINDOW};
@@ -170,7 +172,9 @@ Word16 PsyOutNew(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP)
*****************************************************************************/
Word16 PsyOutDelete(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP)
{
- hPsyOut=NULL;
+ UNUSED(hPsyOut);
+ UNUSED(pMemOP);
+
return 0;
}
diff --git a/media/libstagefright/codecs/aacenc/src/qc_main.c b/media/libstagefright/codecs/aacenc/src/qc_main.c
index 48ff300..e5d78aa 100644
--- a/media/libstagefright/codecs/aacenc/src/qc_main.c
+++ b/media/libstagefright/codecs/aacenc/src/qc_main.c
@@ -33,6 +33,7 @@
#include "channel_map.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
typedef enum{
FRAME_LEN_BYTES_MODULO = 1,
@@ -204,11 +205,8 @@ Word16 QCNew(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP)
**********************************************************************************/
void QCDelete(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP)
{
-
- /*
- nothing to do
- */
- hQC=NULL;
+ UNUSED(hQC);
+ UNUSED(pMemOP);
}
/*********************************************************************************
diff --git a/media/libstagefright/codecs/aacenc/src/sf_estim.c b/media/libstagefright/codecs/aacenc/src/sf_estim.c
index bc320ec..78947e1 100644
--- a/media/libstagefright/codecs/aacenc/src/sf_estim.c
+++ b/media/libstagefright/codecs/aacenc/src/sf_estim.c
@@ -99,7 +99,7 @@ CalcFormFactorChannel(Word16 *logSfbFormFactor,
{
Word32 sfbw, sfbw1;
Word32 i, j;
- Word32 sfbOffs, sfb, shift;
+ Word32 sfbOffs, sfb;
sfbw = sfbw1 = 0;
for (sfbOffs=0; sfbOffs<psyOutChan->sfbCnt; sfbOffs+=psyOutChan->sfbPerGroup){
diff --git a/media/libstagefright/codecs/aacenc/src/tns.c b/media/libstagefright/codecs/aacenc/src/tns.c
index 455a864..27c3971 100644
--- a/media/libstagefright/codecs/aacenc/src/tns.c
+++ b/media/libstagefright/codecs/aacenc/src/tns.c
@@ -30,6 +30,8 @@
#include "psy_configuration.h"
#include "tns_func.h"
+#define UNUSED(x) (void)(x)
+
#define TNS_MODIFY_BEGIN 2600 /* Hz */
#define RATIO_PATCH_LOWER_BORDER 380 /* Hz */
#define TNS_GAIN_THRESH 141 /* 1.41*100 */
@@ -138,7 +140,7 @@ Word16 InitTnsConfigurationLong(Word32 bitRate, /*!< bitrate */
Word16 active) /*!< tns active flag */
{
- Word32 bitratePerChannel;
+ Word32 bitratePerChannel __unused;
tC->maxOrder = TNS_MAX_ORDER;
tC->tnsStartFreq = 1275;
tC->coefRes = 4;
@@ -204,7 +206,7 @@ Word16 InitTnsConfigurationShort(Word32 bitRate, /*!< bitrate */
PSY_CONFIGURATION_SHORT *pC, /*!< psy config struct */
Word16 active) /*!< tns active flag */
{
- Word32 bitratePerChannel;
+ Word32 bitratePerChannel __unused;
tC->maxOrder = TNS_MAX_ORDER_SHORT;
tC->tnsStartFreq = 2750;
tC->coefRes = 3;
@@ -495,36 +497,6 @@ Word16 TnsEncode(TNS_INFO* tnsInfo, /*!< tns info structure (modified) */
/*****************************************************************************
*
-* function name: m_pow2_cordic
-* description: Iterative power function
-*
-* Calculates pow(2.0,x-1.0*(scale+1)) with INT_BITS bit precision
-* using modified cordic algorithm
-* returns: the result of pow2
-*
-*****************************************************************************/
-static Word32 m_pow2_cordic(Word32 x, Word16 scale)
-{
- Word32 k;
-
- Word32 accu_y = 0x40000000;
- accu_y = L_shr(accu_y,scale);
-
- for(k=1; k<INT_BITS; k++) {
- const Word32 z = m_log2_table[k];
-
- while(L_sub(x,z) >= 0) {
-
- x = L_sub(x, z);
- accu_y = L_add(accu_y, (accu_y >> k));
- }
- }
- return(accu_y);
-}
-
-
-/*****************************************************************************
-*
* function name: CalcWeightedSpectrum
* description: Calculate weighted spectrum for LPC calculation
*
@@ -643,6 +615,8 @@ static Word16 CalcTnsFilter(const Word16 *signal,
Word32 i;
Word32 tnsOrderPlus1 = tnsOrder + 1;
+ UNUSED(window);
+
assert(tnsOrder <= TNS_MAX_ORDER); /* remove asserts later? (btg) */
for(i=0;i<tnsOrder;i++) {
diff --git a/media/libstagefright/codecs/aacenc/src/transform.c b/media/libstagefright/codecs/aacenc/src/transform.c
index a02336f..0080810 100644
--- a/media/libstagefright/codecs/aacenc/src/transform.c
+++ b/media/libstagefright/codecs/aacenc/src/transform.c
@@ -475,7 +475,6 @@ void Transform_Real(Word16 *mdctDelayBuffer,
Word32 *winPtr;
Word32 delayBufferSf,timeSignalSf,minSf;
- Word32 headRoom=0;
switch(blockType){
diff --git a/media/libstagefright/codecs/amrnb/common/Android.mk b/media/libstagefright/codecs/amrnb/common/Android.mk
index 30ce29c..5e632a6 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.mk
+++ b/media/libstagefright/codecs/amrnb/common/Android.mk
@@ -67,7 +67,9 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
LOCAL_CFLAGS := \
- -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF=
+ -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF=
+
+LOCAL_CFLAGS += -Werror
LOCAL_MODULE := libstagefright_amrnb_common
diff --git a/media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h
index 35638e3..c4e4d4f 100644
--- a/media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h
+++ b/media/libstagefright/codecs/amrnb/common/include/basic_op_c_equivalent.h
@@ -115,7 +115,7 @@ extern "C"
Returns:
L_sum = 32-bit sum of L_var1 and L_var2 (Word32)
*/
- static inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
+ static inline Word32 L_add(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
{
Word32 L_sum;
@@ -154,8 +154,8 @@ extern "C"
Returns:
L_diff = 32-bit difference of L_var1 and L_var2 (Word32)
*/
- static inline Word32 L_sub(register Word32 L_var1, register Word32 L_var2,
- register Flag *pOverflow)
+ static inline Word32 L_sub(Word32 L_var1, Word32 L_var2,
+ Flag *pOverflow)
{
Word32 L_diff;
@@ -246,7 +246,7 @@ extern "C"
*/
static inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
{
- register Word32 L_product;
+ Word32 L_product;
L_product = (Word32) var1 * var2;
@@ -452,7 +452,7 @@ extern "C"
*/
static inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
{
- register Word32 product;
+ Word32 product;
product = ((Word32) var1 * var2) >> 15;
diff --git a/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp b/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
index 4135f30..976b1a6 100644
--- a/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/az_lsp.cpp
@@ -564,10 +564,10 @@ void Az_lsp(
Flag *pOverflow /* (i/o): overflow flag */
)
{
- register Word16 i;
- register Word16 j;
- register Word16 nf;
- register Word16 ip;
+ Word16 i;
+ Word16 j;
+ Word16 nf;
+ Word16 ip;
Word16 xlow;
Word16 ylow;
Word16 xhigh;
diff --git a/media/libstagefright/codecs/amrnb/common/src/div_s.cpp b/media/libstagefright/codecs/amrnb/common/src/div_s.cpp
index f3bed7e..14d30c5 100644
--- a/media/libstagefright/codecs/amrnb/common/src/div_s.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/div_s.cpp
@@ -207,13 +207,13 @@ Word16 div_s (Word16 var1, Word16 var2)
/*----------------------------------------------------------------------------
; FUNCTION CODE
----------------------------------------------------------------------------*/
-Word16 div_s(register Word16 var1, register Word16 var2)
+Word16 div_s(Word16 var1, Word16 var2)
{
/*----------------------------------------------------------------------------
; Define all local variables
----------------------------------------------------------------------------*/
Word16 var_out = 0;
- register Word16 iteration;
+ Word16 iteration;
Word32 L_num;
Word32 L_denom;
Word32 L_denom_by_2;
diff --git a/media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp b/media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp
index 3650f3c..1c8a700 100644
--- a/media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/gc_pred.cpp
@@ -477,9 +477,9 @@ void gc_pred(
Flag *pOverflow
)
{
- register Word16 i;
- register Word32 L_temp1, L_temp2;
- register Word32 L_tmp;
+ Word16 i;
+ Word32 L_temp1, L_temp2;
+ Word32 L_tmp;
Word32 ener_code;
Word32 ener;
Word16 exp, frac;
@@ -993,7 +993,7 @@ void gc_pred_average_limited(
)
{
Word16 av_pred_en;
- register Word16 i;
+ Word16 i;
/* do average in MR122 mode (log2() domain) */
av_pred_en = 0;
diff --git a/media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp b/media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp
index be76241..2d3b9e4 100644
--- a/media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/gmed_n.cpp
@@ -185,9 +185,9 @@ Word16 gmed_n( /* o : the median value */
Word16 n /* i : number of inputs */
)
{
- register Word16 i, j, ix = 0;
- register Word16 max;
- register Word16 medianIndex;
+ Word16 i, j, ix = 0;
+ Word16 max;
+ Word16 medianIndex;
Word16 tmp[NMAX];
Word16 tmp2[NMAX];
diff --git a/media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp b/media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp
index 6b7b471..495359f 100644
--- a/media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/lsp_az.cpp
@@ -254,8 +254,8 @@ static void Get_lsp_pol(
Word32 *f,
Flag *pOverflow)
{
- register Word16 i;
- register Word16 j;
+ Word16 i;
+ Word16 j;
Word16 hi;
Word16 lo;
@@ -511,8 +511,8 @@ void Lsp_Az(
Flag *pOverflow /* (o) : overflow flag */
)
{
- register Word16 i;
- register Word16 j;
+ Word16 i;
+ Word16 j;
Word32 f1[6];
Word32 f2[6];
diff --git a/media/libstagefright/codecs/amrnb/common/src/mult_r.cpp b/media/libstagefright/codecs/amrnb/common/src/mult_r.cpp
index 0777e68..7112b3d 100644
--- a/media/libstagefright/codecs/amrnb/common/src/mult_r.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/mult_r.cpp
@@ -190,7 +190,7 @@ Word16 mult_r (Word16 var1, Word16 var2)
Word16 mult_r(Word16 var1, Word16 var2, Flag *pOverflow)
{
- register Word32 L_product_arr;
+ Word32 L_product_arr;
L_product_arr = ((Word32) var1) * var2; /* product */
L_product_arr += (Word32) 0x00004000L; /* round */
diff --git a/media/libstagefright/codecs/amrnb/common/src/norm_l.cpp b/media/libstagefright/codecs/amrnb/common/src/norm_l.cpp
index 132fed6..d8d1259 100644
--- a/media/libstagefright/codecs/amrnb/common/src/norm_l.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/norm_l.cpp
@@ -197,12 +197,12 @@ Word16 norm_l (Word32 L_var1)
; FUNCTION CODE
----------------------------------------------------------------------------*/
#if !( defined(PV_ARM_V5) || defined(PV_ARM_GCC_V5) )
-Word16 norm_l(register Word32 L_var1)
+Word16 norm_l(Word32 L_var1)
{
/*----------------------------------------------------------------------------
; Define all local variables
----------------------------------------------------------------------------*/
- register Word16 var_out = 0;
+ Word16 var_out = 0;
/*----------------------------------------------------------------------------
; Function body here
diff --git a/media/libstagefright/codecs/amrnb/common/src/norm_s.cpp b/media/libstagefright/codecs/amrnb/common/src/norm_s.cpp
index 8cdcdb8..6468b67 100644
--- a/media/libstagefright/codecs/amrnb/common/src/norm_s.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/norm_s.cpp
@@ -194,13 +194,13 @@ Word16 norm_s (Word16 var1)
----------------------------------------------------------------------------*/
#if !( defined(PV_ARM_V5) || defined(PV_ARM_GCC_V5) )
-Word16 norm_s(register Word16 var1)
+Word16 norm_s(Word16 var1)
{
/*----------------------------------------------------------------------------
; Define all local variables
----------------------------------------------------------------------------*/
- register Word16 var_out = 0;
+ Word16 var_out = 0;
/*----------------------------------------------------------------------------
; Function body here
diff --git a/media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp b/media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp
index 9163623..8a1aa9e 100644
--- a/media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/pred_lt.cpp
@@ -260,9 +260,9 @@ void Pred_lt_3or6(
Flag *pOverflow /* output: if set, overflow occurred in this function */
)
{
- register Word16 i;
- register Word16 j;
- register Word16 k;
+ Word16 i;
+ Word16 j;
+ Word16 k;
Word16 *pX0;
Word16 *pX2;
diff --git a/media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp b/media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp
index 2b30bf4..c70847e 100644
--- a/media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/q_plsf_3.cpp
@@ -281,7 +281,7 @@ static Word16 Vq_subvec4( /* o: quantization index, Q0 */
Flag *pOverflow /* o : Flag set when overflow occurs */
)
{
- register Word16 i;
+ Word16 i;
Word16 temp;
const Word16 *p_dico;
Word16 index = 0;
@@ -607,7 +607,7 @@ static Word16 Vq_subvec3( /* o: quantization index, Q0 */
Flag use_half, /* i: use every second entry in codebook */
Flag *pOverflow) /* o : Flag set when overflow occurs */
{
- register Word16 i;
+ Word16 i;
Word16 temp;
const Word16 *p_dico;
@@ -1013,7 +1013,7 @@ void Q_plsf_3(
Flag *pOverflow /* o : Flag set when overflow occurs */
)
{
- register Word16 i, j;
+ Word16 i, j;
Word16 lsf1[M];
Word16 wf1[M];
Word16 lsf_p[M];
diff --git a/media/libstagefright/codecs/amrnb/common/src/residu.cpp b/media/libstagefright/codecs/amrnb/common/src/residu.cpp
index b25d3be..2ad132f 100644
--- a/media/libstagefright/codecs/amrnb/common/src/residu.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/residu.cpp
@@ -202,7 +202,7 @@ void Residu(
{
- register Word16 i, j;
+ Word16 i, j;
Word32 s1;
Word32 s2;
Word32 s3;
diff --git a/media/libstagefright/codecs/amrnb/common/src/shr.cpp b/media/libstagefright/codecs/amrnb/common/src/shr.cpp
index 775dc69..1018d9c 100644
--- a/media/libstagefright/codecs/amrnb/common/src/shr.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/shr.cpp
@@ -202,10 +202,10 @@ Word16 shr_std (Word16 var1, Word16 var2)
/*----------------------------------------------------------------------------
; FUNCTION CODE
----------------------------------------------------------------------------*/
-Word16 shr(register Word16 var1, register Word16 var2, Flag *pOverflow)
+Word16 shr(Word16 var1, Word16 var2, Flag *pOverflow)
{
- register Word16 result;
- register Word32 temp_res;
+ Word16 result;
+ Word32 temp_res;
if (var2 != 0)
{
diff --git a/media/libstagefright/codecs/amrnb/common/src/weight_a.cpp b/media/libstagefright/codecs/amrnb/common/src/weight_a.cpp
index 2e2efc4..ee821ef 100644
--- a/media/libstagefright/codecs/amrnb/common/src/weight_a.cpp
+++ b/media/libstagefright/codecs/amrnb/common/src/weight_a.cpp
@@ -178,7 +178,7 @@ void Weight_Ai(
Word16 a_exp[] /* (o) : Spectral expanded LPC coefficients */
)
{
- register Word16 i;
+ Word16 i;
*(a_exp) = *(a);
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index 8d6c6f8..3750e2e 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -45,7 +45,9 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common/include
LOCAL_CFLAGS := \
- -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
+ -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_IMPORT_REF=
+
+LOCAL_CFLAGS += -Werror
LOCAL_MODULE := libstagefright_amrnbdec
@@ -68,6 +70,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbdec libstagefright_amrwbdec
@@ -79,3 +83,24 @@ LOCAL_MODULE := libstagefright_soft_amrdec
LOCAL_MODULE_TAGS := optional
include $(BUILD_SHARED_LIBRARY)
+
+################################################################################
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES := \
+ test/amrnbdec_test.cpp
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)/src \
+ $(LOCAL_PATH)/../common/include \
+ $(call include-path-for, audio-utils)
+
+LOCAL_STATIC_LIBRARIES := \
+ libstagefright_amrnbdec libsndfile
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright_amrnb_common libaudioutils
+
+LOCAL_MODULE := libstagefright_amrnbdec_test
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
index 3320688..d1b0f76 100644
--- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp
@@ -274,7 +274,7 @@ static size_t getFrameSize(unsigned FT) {
return frameSize;
}
-void SoftAMR::onQueueFilled(OMX_U32 portIndex) {
+void SoftAMR::onQueueFilled(OMX_U32 /* portIndex */) {
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
@@ -428,7 +428,7 @@ void SoftAMR::onQueueFilled(OMX_U32 portIndex) {
}
}
-void SoftAMR::onPortFlushCompleted(OMX_U32 portIndex) {
+void SoftAMR::onPortFlushCompleted(OMX_U32 /* portIndex */) {
}
void SoftAMR::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp b/media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp
index 899daba..861b3e6 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/d1035pf.cpp
@@ -209,7 +209,7 @@ void dec_10i40_35bits(
Word16 cod[] /* (o) : algebraic (fixed) codebook excitation */
)
{
- register Word16 i, j, pos1, pos2;
+ Word16 i, j, pos1, pos2;
Word16 sign, tmp;
for (i = 0; i < L_CODE; i++)
diff --git a/media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp b/media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp
index 08b690d..7068c0a 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/d_plsf_5.cpp
@@ -308,7 +308,7 @@ void D_plsf_5(
Flag *pOverflow /* o : Flag set when overflow occurs */
)
{
- register Word16 i;
+ Word16 i;
Word16 temp;
Word16 sign;
diff --git a/media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp b/media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp
index c5aefe4..2ca30de 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/int_lsf.cpp
@@ -218,9 +218,9 @@ void Int_lsf(
Flag *pOverflow /* o : flag set if overflow occurs */
)
{
- register Word16 i;
- register Word16 temp1;
- register Word16 temp2;
+ Word16 i;
+ Word16 temp1;
+ Word16 temp2;
if (i_subfr == 0)
{
diff --git a/media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp b/media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp
index da5445b..285465f 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/ph_disp.cpp
@@ -207,7 +207,7 @@ int ph_disp_reset (ph_dispState *state)
Word16 ph_disp_reset(ph_dispState *state)
{
- register Word16 i;
+ Word16 i;
if (state == (ph_dispState *) NULL)
{
@@ -667,15 +667,15 @@ void ph_disp(
Flag *pOverflow /* i/o : oveflow indicator */
)
{
- register Word16 i, i1;
- register Word16 tmp1;
+ Word16 i, i1;
+ Word16 tmp1;
Word32 L_temp;
Word32 L_temp2;
Word16 impNr; /* indicator for amount of disp./filter used */
Word16 inno_sav[L_SUBFR];
Word16 ps_poss[L_SUBFR];
- register Word16 nze, nPulse;
+ Word16 nze, nPulse;
Word16 ppos;
const Word16 *ph_imp; /* Pointer to phase dispersion filter */
diff --git a/media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp b/media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp
index 0336990..39e01a2 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/pstfilt.cpp
@@ -445,13 +445,13 @@ void Post_Filter(
)
{
Word16 Ap3[MP1];
- Word16 Ap4[MP1]; /* bandwidth expanded LP parameters */
- Word16 *Az; /* pointer to Az_4: */
+ Word16 Ap4[MP1]; /* bandwidth expanded LP parameters */
+ Word16 *Az; /* pointer to Az_4: */
/* LPC parameters in each subframe */
- register Word16 i_subfr; /* index for beginning of subframe */
+ Word16 i_subfr; /* index for beginning of subframe */
Word16 h[L_H];
- register Word16 i;
+ Word16 i;
Word16 temp1;
Word16 temp2;
Word32 L_tmp;
diff --git a/media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp b/media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp
new file mode 100644
index 0000000..41a9e98
--- /dev/null
+++ b/media/libstagefright/codecs/amrnb/dec/test/amrnbdec_test.cpp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <malloc.h>
+#include <stdio.h>
+#include <stdint.h>
+#include <string.h>
+#include <assert.h>
+
+#include "gsmamr_dec.h"
+#include <audio_utils/sndfile.h>
+
+// Constants for AMR-NB
+enum {
+ kInputBufferSize = 64,
+ kSamplesPerFrame = 160,
+ kBitsPerSample = 16,
+ kOutputBufferSize = kSamplesPerFrame * kBitsPerSample/8,
+ kSampleRate = 8000,
+ kChannels = 1,
+ kFileHeaderSize = 6
+};
+const uint32_t kFrameSizes[] = {12, 13, 15, 17, 19, 20, 26, 31};
+
+
+int main(int argc, char *argv[]) {
+
+ if(argc != 3) {
+ fprintf(stderr, "Usage %s <input file> <output file>\n", argv[0]);
+ return 1;
+ }
+
+ // Open the input file
+ FILE* fpInput = fopen(argv[1], "rb");
+ if (!fpInput) {
+ fprintf(stderr, "Could not open %s\n", argv[1]);
+ return 1;
+ }
+
+ // Validate the input AMR file
+ char header[kFileHeaderSize];
+ int bytesRead = fread(header, 1, kFileHeaderSize, fpInput);
+ if (bytesRead != kFileHeaderSize || memcmp(header, "#!AMR\n", kFileHeaderSize)) {
+ fprintf(stderr, "Invalid AMR-NB file\n");
+ return 1;
+ }
+
+ // Open the output file
+ SF_INFO sfInfo;
+ memset(&sfInfo, 0, sizeof(SF_INFO));
+ sfInfo.channels = kChannels;
+ sfInfo.format = SF_FORMAT_WAV | SF_FORMAT_PCM_16;
+ sfInfo.samplerate = kSampleRate;
+ SNDFILE *handle = sf_open(argv[2], SFM_WRITE, &sfInfo);
+ if(!handle){
+ fprintf(stderr, "Could not create %s\n", argv[2]);
+ return 1;
+ }
+
+ // Create AMR-NB decoder instance
+ void* amrHandle;
+ int err = GSMInitDecode(&amrHandle, (Word8*)"AMRNBDecoder");
+ if(err != 0){
+ fprintf(stderr, "Error creating AMR-NB decoder instance\n");
+ return 1;
+ }
+
+ //Allocate input buffer
+ void *inputBuf = malloc(kInputBufferSize);
+ assert(inputBuf != NULL);
+
+ //Allocate output buffer
+ void *outputBuf = malloc(kOutputBufferSize);
+ assert(outputBuf != NULL);
+
+
+ // Decode loop
+ uint32_t retVal = 0;
+ while (1) {
+ // Read mode
+ uint8_t mode;
+ bytesRead = fread(&mode, 1, 1, fpInput);
+ if (bytesRead != 1) break;
+
+ // Find frame type
+ Frame_Type_3GPP frameType = (Frame_Type_3GPP)((mode >> 3) & 0x0f);
+ if (frameType >= AMR_SID){
+ fprintf(stderr, "Frame type %d not supported\n",frameType);
+ retVal = 1;
+ break;
+ }
+
+ // Find frame type
+ int32_t frameSize = kFrameSizes[frameType];
+ bytesRead = fread(inputBuf, 1, frameSize, fpInput);
+ if (bytesRead != frameSize) break;
+
+ //Decode frame
+ int32_t decodeStatus;
+ decodeStatus = AMRDecode(amrHandle, frameType, (uint8_t*)inputBuf,
+ (int16_t*)outputBuf, MIME_IETF);
+ if(decodeStatus == -1) {
+ fprintf(stderr, "Decoder encountered error\n");
+ retVal = 1;
+ break;
+ }
+
+ //Write output to wav
+ sf_writef_short(handle, (int16_t*)outputBuf, kSamplesPerFrame);
+
+ }
+
+ // Close input and output file
+ fclose(fpInput);
+ sf_close(handle);
+
+ //Free allocated memory
+ free(inputBuf);
+ free(outputBuf);
+
+ // Close decoder instance
+ GSMDecodeFrameExit(&amrHandle);
+
+ return retVal;
+}
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk
index f4e467a..bdba8a9 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.mk
+++ b/media/libstagefright/codecs/amrnb/enc/Android.mk
@@ -67,7 +67,9 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common/include
LOCAL_CFLAGS := \
- -DOSCL_UNUSED_ARG=
+ -D"OSCL_UNUSED_ARG(x)=(void)(x)"
+
+LOCAL_CFLAGS += -Werror
LOCAL_MODULE := libstagefright_amrnbenc
@@ -88,6 +90,8 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common/include \
$(LOCAL_PATH)/../common
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbenc
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
index 50b739c..9489457 100644
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
@@ -270,7 +270,7 @@ OMX_ERRORTYPE SoftAMRNBEncoder::internalSetParameter(
}
}
-void SoftAMRNBEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftAMRNBEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError) {
return;
}
diff --git a/media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp b/media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp
index 0d3acac..c71811d 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/autocorr.cpp
@@ -306,9 +306,9 @@ Word16 Autocorr(
Flag *pOverflow /* (o) : indicates overflow */
)
{
- register Word16 i;
- register Word16 j;
- register Word16 norm;
+ Word16 i;
+ Word16 j;
+ Word16 norm;
Word16 y[L_WINDOW];
Word32 sum;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp b/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp
index a33cdf74..b211032 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/c2_9pf.cpp
@@ -318,7 +318,7 @@ extern "C"
Word16 dn_sign[L_CODE];
Word16 rr[L_CODE][L_CODE];
- register Word16 i;
+ Word16 i;
Word16 index;
Word16 sharp;
@@ -592,10 +592,10 @@ extern "C"
Flag * pOverflow /* o : Flag set when overflow occurs */
)
{
- register Word16 i0;
- register Word16 i1;
+ Word16 i0;
+ Word16 i1;
Word16 ix = 0; /* initialization only needed to keep gcc silent */
- register Word16 track1;
+ Word16 track1;
Word16 ipos[NB_PULSE];
Word16 psk;
Word16 ps0;
@@ -608,7 +608,7 @@ extern "C"
Word32 s;
Word32 alp0;
Word32 alp1;
- register Word16 i;
+ Word16 i;
Word32 L_temp;
Word16 *p_codvec = &codvec[0];
@@ -993,13 +993,13 @@ extern "C"
Flag *pOverflow /* o : Flag set when overflow occurs */
)
{
- register Word16 i;
- register Word16 j;
- register Word16 k;
- register Word16 track;
- register Word16 first;
- register Word16 index;
- register Word16 rsign;
+ Word16 i;
+ Word16 j;
+ Word16 k;
+ Word16 track;
+ Word16 first;
+ Word16 index;
+ Word16 rsign;
Word16 indx;
Word16 _sign[NB_PULSE];
Word16 *p0;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp b/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp
index 4a05327..525e57d 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/cl_ltp.cpp
@@ -638,7 +638,7 @@ void cl_ltp(
Flag *pOverflow /* o : overflow indicator */
)
{
- register Word16 i;
+ Word16 i;
Word16 index;
Word32 L_temp; /* temporarily variable */
Word16 resu3; /* flag for upsample resolution */
diff --git a/media/libstagefright/codecs/amrnb/enc/src/convolve.cpp b/media/libstagefright/codecs/amrnb/enc/src/convolve.cpp
index e9ce7ba..5015a4a 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/convolve.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/convolve.cpp
@@ -212,7 +212,7 @@ void Convolve(
Word16 L /* (i) : vector size */
)
{
- register Word16 i, n;
+ Word16 i, n;
Word32 s1, s2;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp b/media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp
index e46d99f..20583c4 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/cor_h.cpp
@@ -272,8 +272,8 @@ void cor_h(
Flag *pOverflow
)
{
- register Word16 i;
- register Word16 dec;
+ Word16 i;
+ Word16 dec;
Word16 h2[L_CODE];
Word32 s;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp b/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp
index beb2aec..c25c026 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/cor_h_x.cpp
@@ -249,9 +249,9 @@ void cor_h_x(
Flag *pOverflow /* (o): pointer to overflow flag */
)
{
- register Word16 i;
- register Word16 j;
- register Word16 k;
+ Word16 i;
+ Word16 j;
+ Word16 k;
Word32 s;
Word32 y32[L_CODE];
diff --git a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp b/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp
index da60640..b4fd867 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/cor_h_x2.cpp
@@ -236,9 +236,9 @@ void cor_h_x2(
Flag *pOverflow
)
{
- register Word16 i;
- register Word16 j;
- register Word16 k;
+ Word16 i;
+ Word16 j;
+ Word16 k;
Word32 s;
Word32 y32[L_CODE];
Word32 max;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp b/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp
index 276e590..2ccb777 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/dtx_enc.cpp
@@ -130,7 +130,7 @@ terms listed above has been obtained from the copyright holder.
; MACROS
; Define module specific macros here
----------------------------------------------------------------------------*/
-extern Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow);
+extern Word32 L_add(Word32 L_var1, Word32 L_var2, Flag *pOverflow);
/*----------------------------------------------------------------------------
; DEFINES
@@ -671,7 +671,7 @@ void dtx_enc(dtx_encState *st, /* i/o : State struct */
Flag *pOverflow /* i/o : overflow indicator */
)
{
- register Word16 i, j;
+ Word16 i, j;
Word16 temp;
Word16 log_en;
Word16 lsf[M];
@@ -943,7 +943,7 @@ void dtx_buffer(dtx_encState *st, /* i/o : State struct */
)
{
- register Word16 i;
+ Word16 i;
Word32 L_frame_en;
Word32 L_temp;
Word16 log_en_e;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/levinson.cpp b/media/libstagefright/codecs/amrnb/enc/src/levinson.cpp
index 001897b..29cdac6 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/levinson.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/levinson.cpp
@@ -638,8 +638,8 @@ Word16 Levinson(
Flag *pOverflow
)
{
- register Word16 i;
- register Word16 j;
+ Word16 i;
+ Word16 j;
Word16 hi;
Word16 lo;
Word16 Kh; /* reflexion coefficient; hi and lo */
@@ -651,9 +651,9 @@ Word16 Levinson(
Word16 Al[M + 1];
Word16 Anh[M + 1]; /* LPC coef.for next iteration in */
Word16 Anl[M + 1]; /* double prec. */
- register Word32 t0; /* temporary variable */
- register Word32 t1; /* temporary variable */
- register Word32 t2; /* temporary variable */
+ Word32 t0; /* temporary variable */
+ Word32 t1; /* temporary variable */
+ Word32 t2; /* temporary variable */
Word16 *p_Rh;
Word16 *p_Rl;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp b/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp
index d3a2ec0..c039bb0 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/pitch_ol.cpp
@@ -320,7 +320,7 @@ static Word16 Lag_max( /* o : lag found */
)
#endif
{
- register Word16 i;
+ Word16 i;
Word16 *p;
Word32 max;
Word32 t0;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp b/media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp
index fdc2440..042920e 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/pre_proc.cpp
@@ -542,7 +542,7 @@ void Pre_Process(
Word16 signal[], /* input/output signal */
Word16 lg) /* length of signal */
{
- register Word16 i;
+ Word16 i;
Word16 x_n_2;
Word16 x_n_1;
Word32 L_tmp;
diff --git a/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp b/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
index d626de3..fa43f78 100644
--- a/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/src/set_sign.cpp
@@ -248,7 +248,7 @@ void set_sign(Word16 dn[], /* i/o : correlation between target and h[] */
Word16 n /* i : # of maximum correlations in dn2[] */
)
{
- register Word16 i, j, k;
+ Word16 i, j, k;
Word16 val, min;
Word16 pos = 0; /* initialization only needed to keep gcc silent */
diff --git a/media/libstagefright/codecs/amrwb/Android.mk b/media/libstagefright/codecs/amrwb/Android.mk
index 677107f..686f7a3 100644
--- a/media/libstagefright/codecs/amrwb/Android.mk
+++ b/media/libstagefright/codecs/amrwb/Android.mk
@@ -48,7 +48,9 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
LOCAL_CFLAGS := \
- -DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
+ -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_IMPORT_REF=
+
+LOCAL_CFLAGS += -Werror
LOCAL_MODULE := libstagefright_amrwbdec
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp b/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp
index d1ec790..5872512 100644
--- a/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp
+++ b/media/libstagefright/codecs/amrwb/src/pvamrwb_math_op.cpp
@@ -205,7 +205,7 @@ int16 div_16by16(int16 var1, int16 var2)
{
int16 var_out = 0;
- register int16 iteration;
+ int16 iteration;
int32 L_num;
int32 L_denom;
int32 L_denom_by_2;
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index c5b8e0c..024a292 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -86,6 +86,9 @@ LOCAL_SRC_FILES += \
endif
+# ARMV5E/Filt_6k_7k_opt.s does not compile with Clang.
+LOCAL_CLANG_ASFLAGS_arm += -no-integrated-as
+
LOCAL_MODULE := libstagefright_amrwbenc
LOCAL_ARM_MODE := arm
@@ -112,6 +115,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -126,6 +131,8 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/codecs/common/include \
frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrwbenc
diff --git a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
index 9ccb49c..91a512d 100644
--- a/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
+++ b/media/libstagefright/codecs/amrwbenc/SoftAMRWBEncoder.cpp
@@ -317,7 +317,7 @@ OMX_ERRORTYPE SoftAMRWBEncoder::internalSetParameter(
}
}
-void SoftAMRWBEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftAMRWBEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError) {
return;
}
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
index 8451195..f23b5a0 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/Filt_6k_7k_opt.s
@@ -29,6 +29,7 @@
.global Filt_6k_7k_asm
.extern voAWB_Copy
.extern fir_6k_7k
+ .hidden fir_6k_7k
Filt_6k_7k_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
index ac2dd13..deb7efc 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV5E/pred_lt4_1_opt.s
@@ -32,6 +32,7 @@
.section .text
.global pred_lt4_asm
.extern inter4_2
+ .hidden inter4_2
pred_lt4_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
index fc42a03..8df0caa 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/Filt_6k_7k_neon.s
@@ -28,6 +28,7 @@
.section .text
.global Filt_6k_7k_asm
.extern fir_6k_7k
+ .hidden fir_6k_7k
Filt_6k_7k_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
index 8d2aaf2..67be1ed 100644
--- a/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
+++ b/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/pred_lt4_1_neon.s
@@ -29,6 +29,7 @@
.section .text
.global pred_lt4_asm
.extern inter4_2
+ .hidden inter4_2
pred_lt4_asm:
diff --git a/media/libstagefright/codecs/amrwbenc/src/autocorr.c b/media/libstagefright/codecs/amrwbenc/src/autocorr.c
index 8c477ca..0b2ea89 100644
--- a/media/libstagefright/codecs/amrwbenc/src/autocorr.c
+++ b/media/libstagefright/codecs/amrwbenc/src/autocorr.c
@@ -28,6 +28,8 @@
#include "acelp.h"
#include "ham_wind.tab"
+#define UNUSED(x) (void)(x)
+
void Autocorr(
Word16 x[], /* (i) : Input signal */
Word16 m, /* (i) : LPC order */
@@ -40,6 +42,8 @@ void Autocorr(
Word32 L_sum, L_sum1, L_tmp, F_LEN;
Word16 *p1,*p2,*p3;
const Word16 *p4;
+ UNUSED(m);
+
/* Windowing of signal */
p1 = x;
p4 = vo_window;
diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/libstagefright/codecs/amrwbenc/src/convolve.c
index acba532..4c1f7d4 100644
--- a/media/libstagefright/codecs/amrwbenc/src/convolve.c
+++ b/media/libstagefright/codecs/amrwbenc/src/convolve.c
@@ -25,6 +25,8 @@
#include "typedef.h"
#include "basic_op.h"
+#define UNUSED(x) (void)(x)
+
void Convolve (
Word16 x[], /* (i) : input vector */
Word16 h[], /* (i) : impulse response */
@@ -35,6 +37,8 @@ void Convolve (
Word32 i, n;
Word16 *tmpH,*tmpX;
Word32 s;
+ UNUSED(L);
+
for (n = 0; n < 64;)
{
tmpH = h+n;
diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
index 0d66c31..b66b55e 100644
--- a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
+++ b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
@@ -31,6 +31,8 @@
#define UP_SAMP 4
#define L_INTERPOL1 4
+#define UNUSED(x) (void)(x)
+
/* Local functions */
#ifdef ASM_OPT
@@ -171,6 +173,7 @@ static void Norm_Corr(
Word32 corr, exp_corr, norm, exp, scale;
Word16 exp_norm, excf[L_SUBFR], tmp;
Word32 L_tmp, L_tmp1, L_tmp2;
+ UNUSED(L_subfr);
/* compute the filtered excitation for the first delay t_min */
k = -t_min;
diff --git a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c b/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
index 80a0b73..d658602 100644
--- a/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
+++ b/media/libstagefright/codecs/amrwbenc/src/q_pulse.c
@@ -188,7 +188,7 @@ Word32 quant_4p_4N( /* (o) return 4*N bits */
Word16 pos[], /* (i) position of the pulse 1..4 */
Word16 N) /* (i) number of bits for position */
{
- Word16 nb_pos, mask, n_1, tmp;
+ Word16 nb_pos, mask __unused, n_1, tmp;
Word16 posA[4], posB[4];
Word32 i, j, k, index;
diff --git a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
index 1bda05a..961aadc 100644
--- a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
+++ b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
@@ -26,6 +26,8 @@
#include "math_op.h"
#include "cnst.h"
+#define UNUSED(x) (void)(x)
+
void Syn_filt(
Word16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
Word16 x[], /* (i) : input signal */
@@ -95,6 +97,8 @@ void Syn_filt_32(
Word32 i,a0;
Word32 L_tmp, L_tmp1;
Word16 *p1, *p2, *p3;
+ UNUSED(m);
+
a0 = a[0] >> (4 + Qnew); /* input / 16 and >>Qnew */
/* Do the filtering. */
for (i = 0; i < lg; i++)
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
index ea9da52..df7b9b3 100644
--- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
+++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
@@ -39,6 +39,8 @@
#include "mem_align.h"
#include "cmnMemory.h"
+#define UNUSED(x) (void)(x)
+
#ifdef __cplusplus
extern "C" {
#endif
@@ -1602,6 +1604,8 @@ VO_U32 VO_API voAMRWB_Init(VO_HANDLE * phCodec, /* o: the audi
VO_MEM_OPERATOR voMemoprator;
#endif
VO_MEM_OPERATOR *pMemOP;
+ UNUSED(vType);
+
int interMem = 0;
if(pUserData == NULL || pUserData->memflag != VO_IMF_USERMEMOPERATOR || pUserData->memData == NULL )
diff --git a/media/libstagefright/codecs/amrwbenc/src/wb_vad.c b/media/libstagefright/codecs/amrwbenc/src/wb_vad.c
index 13dd2aa..2beaefd 100644
--- a/media/libstagefright/codecs/amrwbenc/src/wb_vad.c
+++ b/media/libstagefright/codecs/amrwbenc/src/wb_vad.c
@@ -404,7 +404,7 @@ static void noise_estimate_update(
alpha_down = ALPHA_DOWN1;
} else
{
- if ((st->stat_count == 0))
+ if (st->stat_count == 0)
{
alpha_up = ALPHA_UP2;
alpha_down = ALPHA_DOWN2;
diff --git a/media/libstagefright/codecs/avc/common/Android.mk b/media/libstagefright/codecs/avc/common/Android.mk
index 22dee15..844ef0a 100644
--- a/media/libstagefright/codecs/avc/common/Android.mk
+++ b/media/libstagefright/codecs/avc/common/Android.mk
@@ -16,4 +16,6 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/common/src/deblock.cpp b/media/libstagefright/codecs/avc/common/src/deblock.cpp
index de2d2b6..5f8b693 100644
--- a/media/libstagefright/codecs/avc/common/src/deblock.cpp
+++ b/media/libstagefright/codecs/avc/common/src/deblock.cpp
@@ -1279,7 +1279,7 @@ void EdgeLoop_Luma_vertical(uint8* SrcPtr, uint8 *Strength, int Alpha, int Beta,
int C0, c0, dif, AbsDelta, Strng, tmp, tmp1;
int L2 = 0, L1, L0, R0, R1, R2 = 0;
uint8 *ptr, *ptr1;
- register uint R_in, L_in;
+ uint R_in, L_in;
uint R_out, L_out;
diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk
index 7d17c2a..2ceebc8 100644
--- a/media/libstagefright/codecs/avc/enc/Android.mk
+++ b/media/libstagefright/codecs/avc/enc/Android.mk
@@ -28,7 +28,9 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/native/include/media/openmax
LOCAL_CFLAGS := \
- -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+ -DOSCL_IMPORT_REF= -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_EXPORT_REF=
+
+LOCAL_CFLAGS += -Werror
include $(BUILD_STATIC_LIBRARY)
@@ -49,7 +51,7 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common
LOCAL_CFLAGS := \
- -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+ -DOSCL_IMPORT_REF= -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_EXPORT_REF=
LOCAL_STATIC_LIBRARIES := \
@@ -69,4 +71,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_h264enc
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 1d398fb..928a74f 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftAVCEncoder"
#include <utils/Log.h>
+#include <utils/misc.h>
#include "avcenc_api.h"
#include "avcenc_int.h"
@@ -25,6 +26,7 @@
#include <HardwareAPI.h>
#include <MetadataBufferType.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -34,6 +36,12 @@
#include "SoftAVCEncoder.h"
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
template<class T>
@@ -45,31 +53,36 @@ static void InitOMXParams(T *params) {
params->nVersion.s.nStep = 0;
}
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 },
+};
+
typedef struct LevelConversion {
OMX_U32 omxLevel;
AVCLevel avcLevel;
+ uint32_t maxMacroBlocks;
} LevelConcersion;
static LevelConversion ConversionTable[] = {
- { OMX_VIDEO_AVCLevel1, AVC_LEVEL1_B },
- { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1 },
- { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1 },
- { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2 },
- { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3 },
- { OMX_VIDEO_AVCLevel2, AVC_LEVEL2 },
+ { OMX_VIDEO_AVCLevel1, AVC_LEVEL1_B, 99 },
+ { OMX_VIDEO_AVCLevel1b, AVC_LEVEL1, 99 },
+ { OMX_VIDEO_AVCLevel11, AVC_LEVEL1_1, 396 },
+ { OMX_VIDEO_AVCLevel12, AVC_LEVEL1_2, 396 },
+ { OMX_VIDEO_AVCLevel13, AVC_LEVEL1_3, 396 },
+ { OMX_VIDEO_AVCLevel2, AVC_LEVEL2, 396 },
#if 0
- // encoding speed is very poor if video
- // resolution is higher than CIF
- { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1 },
- { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2 },
- { OMX_VIDEO_AVCLevel3, AVC_LEVEL3 },
- { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1 },
- { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2 },
- { OMX_VIDEO_AVCLevel4, AVC_LEVEL4 },
- { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1 },
- { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2 },
- { OMX_VIDEO_AVCLevel5, AVC_LEVEL5 },
- { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1 },
+ // encoding speed is very poor if video resolution
+ // is higher than CIF or if level is higher than 2
+ { OMX_VIDEO_AVCLevel21, AVC_LEVEL2_1, 792 },
+ { OMX_VIDEO_AVCLevel22, AVC_LEVEL2_2, 1620 },
+ { OMX_VIDEO_AVCLevel3, AVC_LEVEL3, 1620 },
+ { OMX_VIDEO_AVCLevel31, AVC_LEVEL3_1, 3600 },
+ { OMX_VIDEO_AVCLevel32, AVC_LEVEL3_2, 5120 },
+ { OMX_VIDEO_AVCLevel4, AVC_LEVEL4, 8192 },
+ { OMX_VIDEO_AVCLevel41, AVC_LEVEL4_1, 8192 },
+ { OMX_VIDEO_AVCLevel42, AVC_LEVEL4_2, 8704 },
+ { OMX_VIDEO_AVCLevel5, AVC_LEVEL5, 22080 },
+ { OMX_VIDEO_AVCLevel51, AVC_LEVEL5_1, 36864 },
#endif
};
@@ -105,45 +118,15 @@ static status_t ConvertAvcSpecLevelToOmxAvcLevel(
return BAD_VALUE;
}
-inline static void ConvertYUV420SemiPlanarToYUV420Planar(
- uint8_t *inyuv, uint8_t* outyuv,
- int32_t width, int32_t height) {
-
- int32_t outYsize = width * height;
- uint32_t *outy = (uint32_t *) outyuv;
- uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
- uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
- /* Y copying */
- memcpy(outy, inyuv, outYsize);
-
- /* U & V copying */
- uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
- for (int32_t i = height >> 1; i > 0; --i) {
- for (int32_t j = width >> 2; j > 0; --j) {
- uint32_t temp = *inyuv_4++;
- uint32_t tempU = temp & 0xFF;
- tempU = tempU | ((temp >> 8) & 0xFF00);
-
- uint32_t tempV = (temp >> 8) & 0xFF;
- tempV = tempV | ((temp >> 16) & 0xFF00);
-
- // Flip U and V
- *outcb++ = tempV;
- *outcr++ = tempU;
- }
- }
-}
-
static void* MallocWrapper(
- void *userData, int32_t size, int32_t attrs) {
+ void * /* userData */, int32_t size, int32_t /* attrs */) {
void *ptr = malloc(size);
if (ptr)
memset(ptr, 0, size);
return ptr;
}
-static void FreeWrapper(void *userData, void* ptr) {
+static void FreeWrapper(void * /* userData */, void* ptr) {
free(ptr);
}
@@ -172,13 +155,11 @@ SoftAVCEncoder::SoftAVCEncoder(
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
- mVideoWidth(176),
- mVideoHeight(144),
- mVideoFrameRate(30),
- mVideoBitRate(192000),
- mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
- mStoreMetaDataInBuffers(false),
+ : SoftVideoEncoderOMXComponent(
+ name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
+ kProfileLevels, NELEM(kProfileLevels),
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
mIDRFrameRefreshIntervalInSec(1),
mAVCEncProfile(AVC_BASELINE),
mAVCEncLevel(AVC_LEVEL2),
@@ -192,7 +173,13 @@ SoftAVCEncoder::SoftAVCEncoder(
mInputFrameData(NULL),
mSliceGroup(NULL) {
- initPorts();
+ const size_t kOutputBufferSize =
+ 320 * ConversionTable[NELEM(ConversionTable) - 1].maxMacroBlocks;
+
+ initPorts(
+ kNumBuffers, kNumBuffers, kOutputBufferSize,
+ MEDIA_MIMETYPE_VIDEO_AVC, 2 /* minCompressionRatio */);
+
ALOGI("Construct SoftAVCEncoder");
}
@@ -217,7 +204,7 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
mHandle->CBAVC_Free = FreeWrapper;
CHECK(mEncParams != NULL);
- memset(mEncParams, 0, sizeof(mEncParams));
+ memset(mEncParams, 0, sizeof(*mEncParams));
mEncParams->rate_control = AVC_ON;
mEncParams->initQP = 0;
mEncParams->init_CBP_removal_delay = 1600;
@@ -254,29 +241,28 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
mEncParams->use_overrun_buffer = AVC_OFF;
- if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
// Color conversion is needed.
- CHECK(mInputFrameData == NULL);
+ free(mInputFrameData);
mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
CHECK(mInputFrameData != NULL);
}
// PV's AVC encoder requires the video dimension of multiple
- if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+ if (mWidth % 16 != 0 || mHeight % 16 != 0) {
ALOGE("Video frame size %dx%d must be a multiple of 16",
- mVideoWidth, mVideoHeight);
+ mWidth, mHeight);
return OMX_ErrorBadParameter;
}
- mEncParams->width = mVideoWidth;
- mEncParams->height = mVideoHeight;
- mEncParams->bitrate = mVideoBitRate;
- mEncParams->frame_rate = 1000 * mVideoFrameRate; // In frames/ms!
- mEncParams->CPB_size = (uint32_t) (mVideoBitRate >> 1);
+ mEncParams->width = mWidth;
+ mEncParams->height = mHeight;
+ mEncParams->bitrate = mBitrate;
+ mEncParams->frame_rate = (1000 * mFramerate) >> 16; // In frames/ms!, mFramerate is in Q16
+ mEncParams->CPB_size = (uint32_t) (mBitrate >> 1);
- int32_t nMacroBlocks = ((((mVideoWidth + 15) >> 4) << 4) *
- (((mVideoHeight + 15) >> 4) << 4)) >> 8;
+ int32_t nMacroBlocks = divUp(mWidth, 16) * divUp(mHeight, 16);
CHECK(mSliceGroup == NULL);
mSliceGroup = (uint32_t *) malloc(sizeof(uint32_t) * nMacroBlocks);
CHECK(mSliceGroup != NULL);
@@ -295,7 +281,7 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
mEncParams->idr_period = 1; // All I frames
} else {
mEncParams->idr_period =
- (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+ (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16; // mFramerate is in Q16
}
// Set profile and level
@@ -342,10 +328,10 @@ OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
PVAVCCleanUpEncoder(mHandle);
releaseOutputBuffers();
- delete mInputFrameData;
+ free(mInputFrameData);
mInputFrameData = NULL;
- delete mSliceGroup;
+ free(mSliceGroup);
mSliceGroup = NULL;
delete mEncParams;
@@ -368,71 +354,9 @@ void SoftAVCEncoder::releaseOutputBuffers() {
mOutputBuffers.clear();
}
-void SoftAVCEncoder::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
-
- const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
-
- // 31584 is PV's magic number. Not sure why.
- const size_t kOutputBufferSize =
- (kInputBufferSize > 31584) ? kInputBufferSize: 31584;
-
- def.nPortIndex = 0;
- def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kInputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 1;
-
- def.format.video.cMIMEType = const_cast<char *>("video/raw");
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
- def.format.video.xFramerate = (mVideoFrameRate << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-
- def.nPortIndex = 1;
- def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kOutputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
-
- def.format.video.cMIMEType = const_cast<char *>("video/avc");
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
- def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- def.format.video.xFramerate = (0 << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-}
-
OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
OMX_INDEXTYPE index, OMX_PTR params) {
switch (index) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -443,37 +367,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
}
bitRate->eControlRate = OMX_Video_ControlRateVariable;
- bitRate->nTargetBitrate = mVideoBitRate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- if (formatParams->nIndex == 0) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- } else if (formatParams->nIndex == 1) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- } else {
- formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
- } else {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- }
-
+ bitRate->nTargetBitrate = mBitrate;
return OMX_ErrorNone;
}
@@ -510,30 +404,8 @@ OMX_ERRORTYPE SoftAVCEncoder::internalGetParameter(
return OMX_ErrorNone;
}
- case OMX_IndexParamVideoProfileLevelQuerySupported:
- {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
-
- if (profileLevel->nPortIndex != 1) {
- return OMX_ErrorUndefined;
- }
-
- const size_t size =
- sizeof(ConversionTable) / sizeof(ConversionTable[0]);
-
- if (profileLevel->nProfileIndex >= size) {
- return OMX_ErrorNoMore;
- }
-
- profileLevel->eProfile = OMX_VIDEO_AVCProfileBaseline;
- profileLevel->eLevel = ConversionTable[profileLevel->nProfileIndex].omxLevel;
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
}
}
@@ -542,11 +414,6 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -557,105 +424,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
return OMX_ErrorUndefined;
}
- mVideoBitRate = bitRate->nTargetBitrate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamPortDefinition:
- {
- OMX_PARAM_PORTDEFINITIONTYPE *def =
- (OMX_PARAM_PORTDEFINITIONTYPE *)params;
- if (def->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (def->nPortIndex == 0) {
- if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
- return OMX_ErrorUndefined;
- }
- } else {
- if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingAVC ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
- return OMX_ErrorUndefined;
- }
- }
-
- OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
- if (OMX_ErrorNone != err) {
- return err;
- }
-
- if (def->nPortIndex == 0) {
- mVideoWidth = def->format.video.nFrameWidth;
- mVideoHeight = def->format.video.nFrameHeight;
- mVideoFrameRate = def->format.video.xFramerate >> 16;
- mVideoColorFormat = def->format.video.eColorFormat;
-
- OMX_PARAM_PORTDEFINITIONTYPE *portDef =
- &editPortInfo(0)->mDef;
- portDef->format.video.nFrameWidth = mVideoWidth;
- portDef->format.video.nFrameHeight = mVideoHeight;
- portDef->format.video.xFramerate = def->format.video.xFramerate;
- portDef->format.video.eColorFormat =
- (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
- portDef = &editPortInfo(1)->mDef;
- portDef->format.video.nFrameWidth = mVideoWidth;
- portDef->format.video.nFrameHeight = mVideoHeight;
- } else {
- mVideoBitRate = def->format.video.nBitrate;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamStandardComponentRole:
- {
- const OMX_PARAM_COMPONENTROLETYPE *roleParams =
- (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
- if (strncmp((const char *)roleParams->cRole,
- "video_encoder.avc",
- OMX_MAX_STRINGNAME_SIZE - 1)) {
- return OMX_ErrorUndefined;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
- ((formatParams->nIndex == 0 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
- (formatParams->nIndex == 1 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
- (formatParams->nIndex == 2 &&
- formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
- return OMX_ErrorUndefined;
- }
- mVideoColorFormat = formatParams->eColorFormat;
- } else {
- if (formatParams->eCompressionFormat != OMX_VIDEO_CodingAVC ||
- formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
- return OMX_ErrorUndefined;
- }
- }
-
+ mBitrate = bitRate->nTargetBitrate;
return OMX_ErrorNone;
}
@@ -692,37 +461,12 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
return OMX_ErrorNone;
}
- case kStoreMetaDataExtensionIndex:
- {
- StoreMetaDataInBuffersParams *storeParams =
- (StoreMetaDataInBuffersParams*)params;
- if (storeParams->nPortIndex != 0) {
- ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
- __FUNCTION__);
- return OMX_ErrorUndefined;
- }
-
- mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
- ALOGV("StoreMetaDataInBuffers set to: %s",
- mStoreMetaDataInBuffers ? " true" : "false");
-
- if (mStoreMetaDataInBuffers) {
- mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
- if (mInputFrameData == NULL) {
- mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
- }
- }
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
}
}
-void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mSawInputEOS) {
return;
}
@@ -795,8 +539,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
- buffer_handle_t srcBuffer; // for MetaDataMode only
-
// Get next input video frame
if (mReadyForNextFrame) {
// Save the input buffer info so that it can be
@@ -814,21 +556,16 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
if (inHeader->nFilledLen > 0) {
AVCFrameIO videoInput;
memset(&videoInput, 0, sizeof(videoInput));
- videoInput.height = ((mVideoHeight + 15) >> 4) << 4;
- videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
+ videoInput.height = align(mHeight, 16);
+ videoInput.pitch = align(mWidth, 16);
videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000; // in ms
- uint8_t *inputData = NULL;
- if (mStoreMetaDataInBuffers) {
- if (inHeader->nFilledLen != 8) {
- ALOGE("MetaData buffer is wrong size! "
- "(got %lu bytes, expected 8)", inHeader->nFilledLen);
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
- return;
- }
+ const uint8_t *inputData = NULL;
+ if (mInputDataIsMeta) {
inputData =
- extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
- &srcBuffer);
+ extractGraphicBuffer(
+ mInputFrameData, (mWidth * mHeight * 3) >> 1,
+ inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
+ mWidth, mHeight);
if (inputData == NULL) {
ALOGE("Unable to extract gralloc buffer in metadata mode");
mSignalledError = true;
@@ -837,16 +574,16 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
}
// TODO: Verify/convert pixel format enum
} else {
- inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ ConvertYUV420SemiPlanarToYUV420Planar(
+ inputData, mInputFrameData, mWidth, mHeight);
+ inputData = mInputFrameData;
+ }
}
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
- ConvertYUV420SemiPlanarToYUV420Planar(
- inputData, mInputFrameData, mVideoWidth, mVideoHeight);
- inputData = mInputFrameData;
- }
CHECK(inputData != NULL);
- videoInput.YCbCr[0] = inputData;
+ videoInput.YCbCr[0] = (uint8_t *)inputData;
videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
videoInput.YCbCr[2] = videoInput.YCbCr[1] +
((videoInput.height * videoInput.pitch) >> 2);
@@ -863,14 +600,12 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
if (encoderStatus < AVCENC_SUCCESS) {
ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
mSignalledError = true;
- releaseGrallocData(srcBuffer);
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
return;
} else {
ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
- releaseGrallocData(srcBuffer);
notifyEmptyBufferDone(inHeader);
return;
}
@@ -910,7 +645,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
if (encoderStatus < AVCENC_SUCCESS) {
ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
mSignalledError = true;
- releaseGrallocData(srcBuffer);
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
return;
}
@@ -920,7 +654,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
- releaseGrallocData(srcBuffer);
notifyEmptyBufferDone(inHeader);
outQueue.erase(outQueue.begin());
@@ -964,50 +697,10 @@ int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
}
void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
+ UNUSED_UNLESS_VERBOSE(buffer);
ALOGV("signalBufferReturned: %p", buffer);
}
-OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *(int32_t*)index = kStoreMetaDataExtensionIndex;
- return OMX_ErrorNone;
- }
- return OMX_ErrorUndefined;
-}
-
-uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
- OMX_U32 type = *(OMX_U32*)data;
- status_t res;
- if (type != kMetadataBufferTypeGrallocSource) {
- ALOGE("Data passed in with metadata mode does not have type "
- "kMetadataBufferTypeGrallocSource (%d), has type %ld instead",
- kMetadataBufferTypeGrallocSource, type);
- return NULL;
- }
- buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
-
- const Rect rect(mVideoWidth, mVideoHeight);
- uint8_t *img;
- res = GraphicBufferMapper::get().lock(imgBuffer,
- GRALLOC_USAGE_HW_VIDEO_ENCODER,
- rect, (void**)&img);
- if (res != OK) {
- ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
- imgBuffer);
- return NULL;
- }
-
- *buffer = imgBuffer;
- return img;
-}
-
-void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
- if (mStoreMetaDataInBuffers) {
- GraphicBufferMapper::get().unlock(buffer);
- }
-}
-
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
index 23d5ff1..81de109 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -22,14 +22,12 @@
#include <utils/Vector.h>
#include "avcenc_api.h"
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
namespace android {
-struct MediaBuffer;
-
struct SoftAVCEncoder : public MediaBufferObserver,
- public SimpleSoftOMXComponent {
+ public SoftVideoEncoderOMXComponent {
SoftAVCEncoder(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
@@ -45,11 +43,6 @@ struct SoftAVCEncoder : public MediaBufferObserver,
virtual void onQueueFilled(OMX_U32 portIndex);
- // Override SoftOMXComponent methods
-
- virtual OMX_ERRORTYPE getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index);
-
// Implement MediaBufferObserver
virtual void signalBufferReturned(MediaBuffer *buffer);
@@ -67,22 +60,12 @@ private:
kNumBuffers = 2,
};
- enum {
- kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1
- };
-
// OMX input buffer's timestamp and flags
typedef struct {
int64_t mTimeUs;
int32_t mFlags;
} InputBufferInfo;
- int32_t mVideoWidth;
- int32_t mVideoHeight;
- int32_t mVideoFrameRate;
- int32_t mVideoBitRate;
- int32_t mVideoColorFormat;
- bool mStoreMetaDataInBuffers;
int32_t mIDRFrameRefreshIntervalInSec;
AVCProfile mAVCEncProfile;
AVCLevel mAVCEncLevel;
@@ -103,15 +86,11 @@ private:
Vector<MediaBuffer *> mOutputBuffers;
Vector<InputBufferInfo> mInputBufferInfoVec;
- void initPorts();
OMX_ERRORTYPE initEncParams();
OMX_ERRORTYPE initEncoder();
OMX_ERRORTYPE releaseEncoder();
void releaseOutputBuffers();
- uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
- void releaseGrallocData(buffer_handle_t buffer);
-
DISALLOW_EVIL_CONSTRUCTORS(SoftAVCEncoder);
};
diff --git a/media/libstagefright/codecs/avc/enc/src/findhalfpel.cpp b/media/libstagefright/codecs/avc/enc/src/findhalfpel.cpp
index 38a2a15..0b8d9e2 100644
--- a/media/libstagefright/codecs/avc/enc/src/findhalfpel.cpp
+++ b/media/libstagefright/codecs/avc/enc/src/findhalfpel.cpp
@@ -151,8 +151,7 @@ void GenerateHalfPelPred(uint8* subpel_pred, uint8 *ncand, int lx)
uint8 tmp8;
int32 tmp32;
int16 tmp_horz[18*22], *dst_16, *src_16;
- register int a = 0, b = 0, c = 0, d = 0, e = 0, f = 0; // temp register
- int msk;
+ int a = 0, b = 0, c = 0, d = 0, e = 0, f = 0; // temp
int i, j;
/* first copy full-pel to the first array */
@@ -379,7 +378,6 @@ void GenerateHalfPelPred(uint8* subpel_pred, uint8 *ncand, int lx)
// one can just use the above code and change the for(i=2 to for(i=18
for (i = 16; i > 0; i -= 4)
{
- msk = 0;
for (j = 17; j > 0; j--)
{
a = *((uint32*)ref); /* load 4 bytes */
diff --git a/media/libstagefright/codecs/avc/enc/src/init.cpp b/media/libstagefright/codecs/avc/enc/src/init.cpp
index c258b57..6e1413a 100644
--- a/media/libstagefright/codecs/avc/enc/src/init.cpp
+++ b/media/libstagefright/codecs/avc/enc/src/init.cpp
@@ -177,10 +177,6 @@ AVCEnc_Status SetEncodeParam(AVCHandle* avcHandle, AVCEncParams* encParam,
seqParam->offset_for_non_ref_pic = extS->offset_for_non_ref_pic;
seqParam->offset_for_top_to_bottom_field = extS->offset_for_top_to_bottom_field;
seqParam->num_ref_frames_in_pic_order_cnt_cycle = extS->num_ref_frames_in_pic_order_cnt_cycle;
- if (extS->offset_for_ref_frame == NULL)
- {
- return AVCENC_ENCPARAM_MEM_FAIL;
- }
for (ii = 0; ii < (int) extS->num_ref_frames; ii++)
{
seqParam->offset_for_ref_frame[ii] = extS->offset_for_ref_frame[ii];
diff --git a/media/libstagefright/codecs/avc/enc/src/rate_control.cpp b/media/libstagefright/codecs/avc/enc/src/rate_control.cpp
index aa13873..09dcc28 100644
--- a/media/libstagefright/codecs/avc/enc/src/rate_control.cpp
+++ b/media/libstagefright/codecs/avc/enc/src/rate_control.cpp
@@ -171,7 +171,7 @@ AVCEnc_Status InitRateControlModule(AVCHandle *avcHandle)
AVCRateControl *rateCtrl = encvid->rateCtrl;
double L1, L2, L3, bpp;
int qp;
- int i, j;
+ int i;
rateCtrl->basicUnit = video->PicSizeInMbs;
diff --git a/media/libstagefright/codecs/avcdec/Android.mk b/media/libstagefright/codecs/avcdec/Android.mk
new file mode 100644
index 0000000..902ab57
--- /dev/null
+++ b/media/libstagefright/codecs/avcdec/Android.mk
@@ -0,0 +1,27 @@
+#ifeq ($(if $(wildcard external/libh264),1,0),1)
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libstagefright_soft_avcdec
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := libavcdec
+LOCAL_SRC_FILES := SoftAVCDec.cpp
+
+LOCAL_C_INCLUDES := $(TOP)/external/libavc/decoder
+LOCAL_C_INCLUDES += $(TOP)/external/libavc/common
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES += libstagefright_omx
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation
+LOCAL_SHARED_LIBRARIES += libutils
+LOCAL_SHARED_LIBRARIES += liblog
+
+LOCAL_LDFLAGS := -Wl,-Bsymbolic
+
+include $(BUILD_SHARED_LIBRARY)
+
+#endif
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
new file mode 100644
index 0000000..8388472
--- /dev/null
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -0,0 +1,808 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAVCDec"
+#include <utils/Log.h>
+
+#include "ih264_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+#include "ithread.h"
+#include "ih264d.h"
+#include "SoftAVCDec.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <OMX_VideoExt.h>
+
+namespace android {
+
+#define PRINT_TIME ALOGV
+
+#define componentName "video_decoder.avc"
+#define codingType OMX_VIDEO_CodingAVC
+#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_AVC
+
+/** Function and structure definitions to keep code similar for each codec */
+#define ivdec_api_function ih264d_api_function
+#define ivdext_init_ip_t ih264d_init_ip_t
+#define ivdext_init_op_t ih264d_init_op_t
+#define ivdext_fill_mem_rec_ip_t ih264d_fill_mem_rec_ip_t
+#define ivdext_fill_mem_rec_op_t ih264d_fill_mem_rec_op_t
+#define ivdext_ctl_set_num_cores_ip_t ih264d_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t
+
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
+
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
+
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1b },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel11 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel12 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel13 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel2 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel21 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel22 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel3 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel31 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel32 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel4 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel41 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel42 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel5 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel51 },
+ { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel52 },
+
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel1 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel1b },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel11 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel12 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel13 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel2 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel21 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel22 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel3 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel31 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel32 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel4 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel41 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel42 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel5 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel51 },
+ { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel52 },
+};
+
+SoftAVC::SoftAVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVideoDecoderOMXComponent(
+ name, componentName, codingType,
+ kProfileLevels, ARRAY_SIZE(kProfileLevels),
+ 320 /* width */, 240 /* height */, callbacks,
+ appData, component),
+ mMemRecords(NULL),
+ mFlushOutBuffer(NULL),
+ mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
+ mIvColorFormat(IV_YUV_420P),
+ mNewWidth(mWidth),
+ mNewHeight(mHeight),
+ mChangingResolution(false) {
+ initPorts(
+ kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE);
+
+ GETTIME(&mTimeStart, NULL);
+
+ // If input dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftAVC::~SoftAVC() {
+ CHECK_EQ(deInitDecoder(), (status_t)OK);
+}
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+void SoftAVC::logVersion() {
+ ivd_ctl_getversioninfo_ip_t s_ctl_ip;
+ ivd_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ s_ctl_ip.pv_version_buffer = au1_buf;
+ s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
+
+ status =
+ ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version number: 0x%x",
+ s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam decoder version number: %s",
+ (char *)s_ctl_ip.pv_version_buffer);
+ }
+ return;
+}
+
+status_t SoftAVC::setParams(size_t stride) {
+ ivd_ctl_set_config_ip_t s_ctl_ip;
+ ivd_ctl_set_config_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+ s_ctl_ip.u4_disp_wd = (UWORD32)stride;
+ s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+
+ s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+
+ ALOGV("Set the run-time (dynamic) parameters stride = %u", stride);
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the run-time parameters: 0x%x",
+ s_ctl_op.u4_error_code);
+
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftAVC::resetPlugin() {
+ mIsInFlush = false;
+ mReceivedEOS = false;
+ memset(mTimeStamps, 0, sizeof(mTimeStamps));
+ memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
+
+ /* Initialize both start and end times */
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+
+ return OK;
+}
+
+status_t SoftAVC::resetDecoder() {
+ ivd_ctl_reset_ip_t s_ctl_ip;
+ ivd_ctl_reset_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ /* Set the run-time (dynamic) parameters */
+ setParams(outputBufferWidth());
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ return OK;
+}
+
+status_t SoftAVC::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_cores_op;
+ IV_API_CALL_STATUS_T status;
+ s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
+ s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in setting number of cores: 0x%x",
+ s_set_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftAVC::setFlushMode() {
+ IV_API_CALL_STATUS_T status;
+ ivd_ctl_flush_ip_t s_video_flush_ip;
+ ivd_ctl_flush_op_t s_video_flush_op;
+
+ s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+
+ /* Set the decoder in Flush mode, subsequent decode() calls will flush */
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
+ s_video_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mIsInFlush = true;
+ return OK;
+}
+
+status_t SoftAVC::initDecoder() {
+ IV_API_CALL_STATUS_T status;
+
+ UWORD32 u4_num_reorder_frames;
+ UWORD32 u4_num_ref_frames;
+ UWORD32 u4_share_disp_buf;
+ WORD32 i4_level;
+
+ mNumCores = GetCPUCoreCount();
+
+ /* Initialize number of ref and reorder modes (for H264) */
+ u4_num_reorder_frames = 16;
+ u4_num_ref_frames = 16;
+ u4_share_disp_buf = 0;
+
+ uint32_t displayStride = outputBufferWidth();
+ uint32_t displayHeight = outputBufferHeight();
+ uint32_t displaySizeY = displayStride * displayHeight;
+
+ if (displaySizeY > (1920 * 1088)) {
+ i4_level = 50;
+ } else if (displaySizeY > (1280 * 720)) {
+ i4_level = 40;
+ } else if (displaySizeY > (720 * 576)) {
+ i4_level = 31;
+ } else if (displaySizeY > (624 * 320)) {
+ i4_level = 30;
+ } else if (displaySizeY > (352 * 288)) {
+ i4_level = 21;
+ } else {
+ i4_level = 20;
+ }
+
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
+ s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ ALOGV("Get number of mem records");
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in getting mem records: 0x%x",
+ s_num_mem_rec_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc(
+ 128, mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (mMemRecords == NULL) {
+ ALOGE("Allocation failure");
+ return NO_MEMORY;
+ }
+
+ memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
+
+ {
+ size_t i;
+ ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
+ ivdext_fill_mem_rec_op_t s_fill_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_ip_t);
+ s_fill_mem_ip.i4_level = i4_level;
+ s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_fill_mem_ip.u4_num_extra_disp_buf = 0;
+ s_fill_mem_ip.e_output_format = mIvColorFormat;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_op_t);
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
+ }
+
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in filling mem records: 0x%x",
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mNumMemRecords =
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
+
+ ps_mem_rec = mMemRecords;
+
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ivd_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for memory record #%zu of size %u",
+ i, ps_mem_rec->u4_mem_size);
+ status = IV_FAIL;
+ return NO_MEMORY;
+ }
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Initialize the decoder */
+ {
+ ivdext_init_ip_t s_init_ip;
+ ivdext_init_op_t s_init_op;
+
+ void *dec_fxns = (void *)ivdec_api_function;
+
+ s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
+ s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
+ s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
+
+ s_init_ip.i4_level = i4_level;
+ s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_init_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_init_ip.u4_num_extra_disp_buf = 0;
+
+ s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+
+ s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->pv_fxns = dec_fxns;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in init: 0x%x",
+ s_init_op.s_ivd_init_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* Reset the plugin state */
+ resetPlugin();
+
+ /* Set the run time (dynamic) parameters */
+ setParams(displayStride);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ /* Get codec version */
+ logVersion();
+
+ /* Allocate internal picture buffer */
+ uint32_t bufferSize = displaySizeY * 3 / 2;
+ mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize);
+ return NO_MEMORY;
+ }
+
+ mInitNeeded = false;
+ mFlushNeeded = false;
+ return OK;
+}
+
+status_t SoftAVC::deInitDecoder() {
+ size_t i;
+
+ if (mMemRecords) {
+ iv_mem_rec_t *ps_mem_rec;
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++) {
+ if (ps_mem_rec->pv_base) {
+ ivd_aligned_free(ps_mem_rec->pv_base);
+ }
+ ps_mem_rec++;
+ }
+ ivd_aligned_free(mMemRecords);
+ mMemRecords = NULL;
+ }
+
+ if (mFlushOutBuffer) {
+ ivd_aligned_free(mFlushOutBuffer);
+ mFlushOutBuffer = NULL;
+ }
+
+ mInitNeeded = true;
+ mChangingResolution = false;
+
+ return OK;
+}
+
+status_t SoftAVC::reInitDecoder() {
+ status_t ret;
+
+ deInitDecoder();
+
+ ret = initDecoder();
+ if (OK != ret) {
+ ALOGE("Create failure");
+ deInitDecoder();
+ return NO_MEMORY;
+ }
+ return OK;
+}
+
+void SoftAVC::onReset() {
+ SoftVideoDecoderOMXComponent::onReset();
+
+ resetDecoder();
+ resetPlugin();
+}
+
+OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
+ const uint32_t oldWidth = mWidth;
+ const uint32_t oldHeight = mHeight;
+ OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);
+ if (mWidth != oldWidth || mHeight != oldHeight) {
+ reInitDecoder();
+ }
+ return ret;
+}
+
+void SoftAVC::setDecodeArgs(
+ ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t timeStampIx) {
+ size_t sizeY = outputBufferWidth() * outputBufferHeight();
+ size_t sizeUV;
+ uint8_t *pBuf;
+
+ ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+
+ /* When in flush and after EOS with zero byte input,
+ * inHeader is set to zero. Hence check for non-null */
+ if (inHeader) {
+ ps_dec_ip->u4_ts = timeStampIx;
+ ps_dec_ip->pv_stream_buffer =
+ inHeader->pBuffer + inHeader->nOffset;
+ ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
+ } else {
+ ps_dec_ip->u4_ts = 0;
+ ps_dec_ip->pv_stream_buffer = NULL;
+ ps_dec_ip->u4_num_Bytes = 0;
+ }
+
+ if (outHeader) {
+ pBuf = outHeader->pBuffer;
+ } else {
+ pBuf = mFlushOutBuffer;
+ }
+
+ sizeUV = sizeY / 4;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+ ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
+ ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
+ ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
+ ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
+ return;
+}
+void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
+ /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
+ if (kOutputPortIndex == portIndex) {
+ setFlushMode();
+
+ while (true) {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ IV_API_CALL_STATUS_T status;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ if (0 == s_dec_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+ }
+}
+
+void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
+ UNUSED(portIndex);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+
+ /* If input EOS is seen and decoder is not in flush mode,
+ * set the decoder in flush mode.
+ * There can be a case where EOS is sent along with last picture data
+ * In that case, only after decoding that input data, decoder has to be
+ * put in flush. This case is handled here */
+
+ if (mReceivedEOS && !mIsInFlush) {
+ setFlushMode();
+ }
+
+ while (!outQueue.empty()) {
+ BufferInfo *inInfo;
+ OMX_BUFFERHEADERTYPE *inHeader;
+
+ BufferInfo *outInfo;
+ OMX_BUFFERHEADERTYPE *outHeader;
+ size_t timeStampIx;
+
+ inInfo = NULL;
+ inHeader = NULL;
+
+ if (!mIsInFlush) {
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ } else {
+ break;
+ }
+ }
+
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = 0;
+ outHeader->nOffset = 0;
+
+ if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
+ mReceivedEOS = true;
+ if (inHeader->nFilledLen == 0) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ setFlushMode();
+ }
+ }
+
+ // When there is an init required and the decoder is not in flush mode,
+ // update output port's definition and reinitialize decoder.
+ if (mInitNeeded && !mIsInFlush) {
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+ return;
+ }
+
+ /* Get a free slot in timestamp array to hold input timestamp */
+ {
+ size_t i;
+ timeStampIx = 0;
+ for (i = 0; i < MAX_TIME_STAMPS; i++) {
+ if (!mTimeStampsValid[i]) {
+ timeStampIx = i;
+ break;
+ }
+ }
+ if (inHeader != NULL) {
+ mTimeStampsValid[timeStampIx] = true;
+ mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
+ }
+ }
+
+ {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ WORD32 timeDelay, timeTaken;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+ // If input dump is enabled, then write to file
+ DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes);
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ IV_API_CALL_STATUS_T status;
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+
+ bool unsupportedDimensions =
+ (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+ bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ PRINT_TIME("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_dec_op.u4_num_bytes_consumed);
+ if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
+ mFlushNeeded = true;
+ }
+
+ if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
+ /* If the input did not contain picture data, then ignore
+ * the associated timestamp */
+ mTimeStampsValid[timeStampIx] = false;
+ }
+
+ // This is needed to handle CTS DecoderTest testCodecResetsH264WithoutSurface,
+ // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
+ if (unsupportedDimensions && !mFlushNeeded) {
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht);
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+
+ ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ return;
+ }
+
+ // If the decoder is in the changing resolution mode and there is no output present,
+ // that means the switching is done and it's ready to reset the decoder and the plugin.
+ if (mChangingResolution && !s_dec_op.u4_output_present) {
+ mChangingResolution = false;
+ resetDecoder();
+ resetPlugin();
+ continue;
+ }
+
+ if (unsupportedDimensions || resChanged) {
+ mChangingResolution = true;
+ if (mFlushNeeded) {
+ setFlushMode();
+ }
+
+ if (unsupportedDimensions) {
+ mNewWidth = s_dec_op.u4_pic_wd;
+ mNewHeight = s_dec_op.u4_pic_ht;
+ mInitNeeded = true;
+ }
+ continue;
+ }
+
+ if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
+ uint32_t width = s_dec_op.u4_pic_wd;
+ uint32_t height = s_dec_op.u4_pic_ht;
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, width, height);
+
+ if (portWillReset) {
+ resetDecoder();
+ return;
+ }
+ }
+
+ if (s_dec_op.u4_output_present) {
+ outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+
+ outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
+ mTimeStampsValid[s_dec_op.u4_ts] = false;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ } else {
+ /* If in flush mode and no output is returned by the codec,
+ * then come out of flush mode */
+ mIsInFlush = false;
+
+ /* If EOS was recieved on input port and there is no output
+ * from the codec, then signal EOS on output port */
+ if (mReceivedEOS) {
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ resetPlugin();
+ }
+ }
+ }
+
+ if (inHeader != NULL) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ return new android::SoftAVC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
new file mode 100644
index 0000000..191a71d
--- /dev/null
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_H264_DEC_H_
+
+#define SOFT_H264_DEC_H_
+
+#include "SoftVideoDecoderOMXComponent.h"
+#include <sys/time.h>
+
+namespace android {
+
+#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ivd_aligned_free(buf) free(buf)
+
+/** Number of entries in the time-stamp array */
+#define MAX_TIME_STAMPS 64
+
+/** Maximum number of cores supported by the codec */
+#define CODEC_MAX_NUM_CORES 4
+
+#define CODEC_MAX_WIDTH 1920
+
+#define CODEC_MAX_HEIGHT 1088
+
+/** Input buffer size */
+#define INPUT_BUF_SIZE (1024 * 1024)
+
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = ((end.tv_sec - start.tv_sec) * 1000000) + \
+ (end.tv_usec - start.tv_usec);
+
+struct SoftAVC : public SoftVideoDecoderOMXComponent {
+ SoftAVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftAVC();
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
+ virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
+private:
+ // Number of input and output buffers
+ enum {
+ kNumBuffers = 8
+ };
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by the codec
+
+ size_t mNumCores; // Number of cores to be uesd by the codec
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ // Internal buffer to be used to flush out the buffers from decoder
+ uint8_t *mFlushOutBuffer;
+
+ // Status of entries in the timestamp array
+ bool mTimeStampsValid[MAX_TIME_STAMPS];
+
+ // Timestamp array - Since codec does not take 64 bit timestamps,
+ // they are maintained in the plugin
+ OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format
+ IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format
+
+ bool mIsInFlush; // codec is flush mode
+ bool mReceivedEOS; // EOS is receieved on input port
+ bool mInitNeeded;
+ uint32_t mNewWidth;
+ uint32_t mNewHeight;
+ // The input stream has changed to a different resolution, which is still supported by the
+ // codec. So the codec is switching to decode the new resolution.
+ bool mChangingResolution;
+ bool mFlushNeeded;
+
+ status_t initDecoder();
+ status_t deInitDecoder();
+ status_t setFlushMode();
+ status_t setParams(size_t stride);
+ void logVersion();
+ status_t setNumCores();
+ status_t resetDecoder();
+ status_t resetPlugin();
+ status_t reInitDecoder();
+
+ void setDecodeArgs(
+ ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t timeStampIx);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/avcd_input"
+#define INPUT_DUMP_EXT "h264"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int) m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // SOFT_H264_DEC_H_
diff --git a/media/libstagefright/codecs/avcenc/Android.mk b/media/libstagefright/codecs/avcenc/Android.mk
new file mode 100644
index 0000000..24a4db9
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/Android.mk
@@ -0,0 +1,30 @@
+#ifeq ($(if $(wildcard external/libh264),1,0),1)
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libstagefright_soft_avcenc
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := libavcenc
+LOCAL_SRC_FILES := SoftAVCEnc.cpp
+
+LOCAL_C_INCLUDES := $(TOP)/external/libavc/encoder
+LOCAL_C_INCLUDES += $(TOP)/external/libavc/common
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/hardware
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES += libstagefright_omx
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation
+LOCAL_SHARED_LIBRARIES += libutils
+LOCAL_SHARED_LIBRARIES += liblog
+
+LOCAL_LDFLAGS := -Wl,-Bsymbolic
+
+include $(BUILD_SHARED_LIBRARY)
+
+#endif
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
new file mode 100644
index 0000000..bf5e353
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -0,0 +1,1335 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAVCEnc"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include "OMX_Video.h"
+
+#include <HardwareAPI.h>
+#include <MetadataBufferType.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+#include <ui/Rect.h>
+
+#include "ih264_typedefs.h"
+#include "iv2.h"
+#include "ive2.h"
+#include "ih264e.h"
+#include "SoftAVCEnc.h"
+
+namespace android {
+
+ #define ive_api_function ih264e_api_function
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+typedef struct LevelConversion {
+ OMX_VIDEO_AVCLEVELTYPE omxLevel;
+ WORD32 avcLevel;
+} LevelConcersion;
+
+static LevelConversion ConversionTable[] = {
+ { OMX_VIDEO_AVCLevel1, 10 },
+ { OMX_VIDEO_AVCLevel1b, 9 },
+ { OMX_VIDEO_AVCLevel11, 11 },
+ { OMX_VIDEO_AVCLevel12, 12 },
+ { OMX_VIDEO_AVCLevel13, 13 },
+ { OMX_VIDEO_AVCLevel2, 20 },
+ { OMX_VIDEO_AVCLevel21, 21 },
+ { OMX_VIDEO_AVCLevel22, 22 },
+ { OMX_VIDEO_AVCLevel3, 30 },
+ { OMX_VIDEO_AVCLevel31, 31 },
+ { OMX_VIDEO_AVCLevel32, 32 },
+ { OMX_VIDEO_AVCLevel4, 40 },
+ { OMX_VIDEO_AVCLevel41, 41 },
+ { OMX_VIDEO_AVCLevel42, 42 },
+ { OMX_VIDEO_AVCLevel5, 50 },
+ { OMX_VIDEO_AVCLevel51, 51 },
+};
+
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4 },
+ { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 },
+};
+
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+ OMX_VIDEO_AVCLEVELTYPE omxLevel, WORD32 *avcLevel) {
+ for (size_t i = 0; i < NELEM(ConversionTable); ++i) {
+ if (omxLevel == ConversionTable[i].omxLevel) {
+ *avcLevel = ConversionTable[i].avcLevel;
+ return OK;
+ }
+ }
+
+ ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported",
+ (int32_t)omxLevel);
+
+ return BAD_VALUE;
+}
+
+static status_t ConvertAvcSpecLevelToOmxAvcLevel(
+ WORD32 avcLevel, OMX_VIDEO_AVCLEVELTYPE *omxLevel) {
+ for (size_t i = 0; i < NELEM(ConversionTable); ++i) {
+ if (avcLevel == ConversionTable[i].avcLevel) {
+ *omxLevel = ConversionTable[i].omxLevel;
+ return OK;
+ }
+ }
+
+ ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported",
+ (int32_t)avcLevel);
+
+ return BAD_VALUE;
+}
+
+
+SoftAVC::SoftAVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVideoEncoderOMXComponent(
+ name, "video_encoder.avc", OMX_VIDEO_CodingAVC,
+ kProfileLevels, NELEM(kProfileLevels),
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
+ mIvVideoColorFormat(IV_YUV_420P),
+ mIDRFrameRefreshIntervalInSec(1),
+ mAVCEncProfile(IV_PROFILE_BASE),
+ mAVCEncLevel(31),
+ mPrevTimestampUs(-1),
+ mStarted(false),
+ mSawInputEOS(false),
+ mSignalledError(false),
+ mConversionBuffer(NULL),
+ mCodecCtx(NULL) {
+
+ initPorts(kNumBuffers, kNumBuffers, ((mWidth * mHeight * 3) >> 1),
+ MEDIA_MIMETYPE_VIDEO_AVC, 2);
+
+ // If dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+ CREATE_DUMP_FILE(mOutFile);
+
+}
+
+SoftAVC::~SoftAVC() {
+ releaseEncoder();
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ CHECK(outQueue.empty());
+ CHECK(inQueue.empty());
+}
+
+OMX_ERRORTYPE SoftAVC::initEncParams() {
+ mCodecCtx = NULL;
+ mMemRecords = NULL;
+ mNumMemRecords = DEFAULT_MEM_REC_CNT;
+ mHeaderGenerated = 0;
+ mNumCores = GetCPUCoreCount();
+ mArch = DEFAULT_ARCH;
+ mSliceMode = DEFAULT_SLICE_MODE;
+ mSliceParam = DEFAULT_SLICE_PARAM;
+ mHalfPelEnable = DEFAULT_HPEL;
+ mIInterval = DEFAULT_I_INTERVAL;
+ mIDRInterval = DEFAULT_IDR_INTERVAL;
+ mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
+ mFrameRate = DEFAULT_SRC_FRAME_RATE;
+ mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
+ mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
+ mEncSpeed = DEFAULT_ENC_SPEED;
+ mIntra4x4 = DEFAULT_INTRA4x4;
+ mAIRMode = DEFAULT_AIR;
+ mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD;
+ mPSNREnable = DEFAULT_PSNR_ENABLE;
+ mReconEnable = DEFAULT_RECON_ENABLE;
+
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+
+ return OMX_ErrorNone;
+}
+
+
+OMX_ERRORTYPE SoftAVC::setDimensions() {
+ ive_ctl_set_dimensions_ip_t s_dimensions_ip;
+ ive_ctl_set_dimensions_op_t s_dimensions_op;
+ IV_STATUS_T status;
+
+ s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
+ s_dimensions_ip.u4_ht = mHeight;
+ s_dimensions_ip.u4_wd = mWidth;
+ s_dimensions_ip.u4_strd = mStride;
+
+ s_dimensions_ip.u4_timestamp_high = -1;
+ s_dimensions_ip.u4_timestamp_low = -1;
+
+ s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
+ s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame dimensions = 0x%x\n",
+ s_dimensions_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setNumCores() {
+ IV_STATUS_T status;
+ ive_ctl_set_num_cores_ip_t s_num_cores_ip;
+ ive_ctl_set_num_cores_op_t s_num_cores_op;
+ s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
+ s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
+ s_num_cores_ip.u4_timestamp_high = -1;
+ s_num_cores_ip.u4_timestamp_low = -1;
+ s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
+
+ s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
+
+ status = ive_api_function(
+ mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set processor params = 0x%x\n",
+ s_num_cores_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setFrameRate() {
+ ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
+ ive_ctl_set_frame_rate_op_t s_frame_rate_op;
+ IV_STATUS_T status;
+
+ s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
+
+ s_frame_rate_ip.u4_src_frame_rate = mFrameRate;
+ s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate;
+
+ s_frame_rate_ip.u4_timestamp_high = -1;
+ s_frame_rate_ip.u4_timestamp_low = -1;
+
+ s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
+ s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame rate = 0x%x\n",
+ s_frame_rate_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setIpeParams() {
+ ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
+ ive_ctl_set_ipe_params_op_t s_ipe_params_op;
+ IV_STATUS_T status;
+
+ s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
+
+ s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
+ s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
+
+ s_ipe_params_ip.u4_timestamp_high = -1;
+ s_ipe_params_ip.u4_timestamp_low = -1;
+
+ s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
+ s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set ipe params = 0x%x\n",
+ s_ipe_params_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setBitRate() {
+ ive_ctl_set_bitrate_ip_t s_bitrate_ip;
+ ive_ctl_set_bitrate_op_t s_bitrate_op;
+ IV_STATUS_T status;
+
+ s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
+
+ s_bitrate_ip.u4_target_bitrate = mBitrate;
+
+ s_bitrate_ip.u4_timestamp_high = -1;
+ s_bitrate_ip.u4_timestamp_low = -1;
+
+ s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
+ s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
+ ive_ctl_set_frame_type_ip_t s_frame_type_ip;
+ ive_ctl_set_frame_type_op_t s_frame_type_op;
+ IV_STATUS_T status;
+
+ s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
+
+ s_frame_type_ip.e_frame_type = e_frame_type;
+
+ s_frame_type_ip.u4_timestamp_high = -1;
+ s_frame_type_ip.u4_timestamp_low = -1;
+
+ s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
+ s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set frame type = 0x%x\n",
+ s_frame_type_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setQp() {
+ ive_ctl_set_qp_ip_t s_qp_ip;
+ ive_ctl_set_qp_op_t s_qp_op;
+ IV_STATUS_T status;
+
+ s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
+
+ s_qp_ip.u4_i_qp = DEFAULT_I_QP;
+ s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_p_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_b_qp = DEFAULT_P_QP;
+ s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX;
+ s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN;
+
+ s_qp_ip.u4_timestamp_high = -1;
+ s_qp_ip.u4_timestamp_low = -1;
+
+ s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
+ s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
+ IV_STATUS_T status;
+ ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
+ ive_ctl_set_enc_mode_op_t s_enc_mode_op;
+
+ s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
+
+ s_enc_mode_ip.e_enc_mode = e_enc_mode;
+
+ s_enc_mode_ip.u4_timestamp_high = -1;
+ s_enc_mode_ip.u4_timestamp_low = -1;
+
+ s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
+ s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set in header encode mode = 0x%x\n",
+ s_enc_mode_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setVbvParams() {
+ ive_ctl_set_vbv_params_ip_t s_vbv_ip;
+ ive_ctl_set_vbv_params_op_t s_vbv_op;
+ IV_STATUS_T status;
+
+ s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
+
+ s_vbv_ip.u4_vbv_buf_size = 0;
+ s_vbv_ip.u4_vbv_buffer_delay = 1000;
+
+ s_vbv_ip.u4_timestamp_high = -1;
+ s_vbv_ip.u4_timestamp_low = -1;
+
+ s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
+ s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set VBC params = 0x%x\n", s_vbv_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setAirParams() {
+ ive_ctl_set_air_params_ip_t s_air_ip;
+ ive_ctl_set_air_params_op_t s_air_op;
+ IV_STATUS_T status;
+
+ s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
+
+ s_air_ip.e_air_mode = mAIRMode;
+ s_air_ip.u4_air_refresh_period = mAIRRefreshPeriod;
+
+ s_air_ip.u4_timestamp_high = -1;
+ s_air_ip.u4_timestamp_low = -1;
+
+ s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
+ s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setMeParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_me_params_ip_t s_me_params_ip;
+ ive_ctl_set_me_params_op_t s_me_params_op;
+
+ s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
+
+ s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
+ s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
+
+ s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
+ s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
+ s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
+ s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
+ s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
+
+ s_me_params_ip.u4_timestamp_high = -1;
+ s_me_params_ip.u4_timestamp_low = -1;
+
+ s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
+ s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setGopParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_gop_params_ip_t s_gop_params_ip;
+ ive_ctl_set_gop_params_op_t s_gop_params_op;
+
+ s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
+
+ s_gop_params_ip.u4_i_frm_interval = mIInterval;
+ s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
+ s_gop_params_ip.u4_num_b_frames = DEFAULT_B_FRAMES;
+
+ s_gop_params_ip.u4_timestamp_high = -1;
+ s_gop_params_ip.u4_timestamp_low = -1;
+
+ s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
+ s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set ME params = 0x%x\n",
+ s_gop_params_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setProfileParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_profile_params_ip_t s_profile_params_ip;
+ ive_ctl_set_profile_params_op_t s_profile_params_op;
+
+ s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
+
+ s_profile_params_ip.e_profile = DEFAULT_EPROFILE;
+
+ s_profile_params_ip.u4_timestamp_high = -1;
+ s_profile_params_ip.u4_timestamp_low = -1;
+
+ s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
+ s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to set profile params = 0x%x\n",
+ s_profile_params_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setDeblockParams() {
+ IV_STATUS_T status;
+ ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
+ ive_ctl_set_deblock_params_op_t s_deblock_params_op;
+
+ s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
+
+ s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
+
+ s_deblock_params_ip.u4_timestamp_high = -1;
+ s_deblock_params_ip.u4_timestamp_low = -1;
+
+ s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
+ s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
+
+ status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to enable/disable deblock params = 0x%x\n",
+ s_deblock_params_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+ return OMX_ErrorNone;
+}
+
+void SoftAVC::logVersion() {
+ ive_ctl_getversioninfo_ip_t s_ctl_ip;
+ ive_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
+ s_ctl_ip.pu1_version = au1_buf;
+ s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
+
+ status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
+ }
+ return;
+}
+
+OMX_ERRORTYPE SoftAVC::initEncoder() {
+ IV_STATUS_T status;
+ size_t i;
+ WORD32 level;
+ uint32_t displaySizeY;
+ CHECK(!mStarted);
+
+ OMX_ERRORTYPE errType = OMX_ErrorNone;
+
+ displaySizeY = mWidth * mHeight;
+ if (displaySizeY > (1920 * 1088)) {
+ level = 50;
+ } else if (displaySizeY > (1280 * 720)) {
+ level = 40;
+ } else if (displaySizeY > (720 * 576)) {
+ level = 31;
+ } else if (displaySizeY > (624 * 320)) {
+ level = 30;
+ } else if (displaySizeY > (352 * 288)) {
+ level = 21;
+ } else {
+ level = 20;
+ }
+ mAVCEncLevel = MAX(level, mAVCEncLevel);
+
+ if (OMX_ErrorNone != (errType = initEncParams())) {
+ ALOGE("Failed to initialize encoder params");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return errType;
+ }
+
+ mStride = ALIGN16(mWidth);
+
+ if (mInputDataIsMeta) {
+ if (mConversionBuffer) {
+ free(mConversionBuffer);
+ mConversionBuffer = NULL;
+ }
+
+ if (mConversionBuffer == NULL) {
+ mConversionBuffer = (uint8_t *)malloc(mStride * mHeight * 3 / 2);
+ if (mConversionBuffer == NULL) {
+ ALOGE("Allocating conversion buffer failed.");
+ return OMX_ErrorUndefined;
+ }
+ }
+ }
+
+ switch (mColorFormat) {
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ mIvVideoColorFormat = IV_YUV_420SP_UV;
+ ALOGV("colorFormat YUV_420SP");
+ break;
+ default:
+ case OMX_COLOR_FormatYUV420Planar:
+ mIvVideoColorFormat = IV_YUV_420P;
+ ALOGV("colorFormat YUV_420P");
+ break;
+ }
+
+ ALOGV("Params width %d height %d level %d colorFormat %d", mWidth,
+ mHeight, mAVCEncLevel, mIvVideoColorFormat);
+
+ /* Getting Number of MemRecords */
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
+ s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
+
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ status = ive_api_function(0, &s_num_mem_rec_ip, &s_num_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Get number of memory records failed = 0x%x\n",
+ s_num_mem_rec_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ /* Allocate array to hold memory records */
+ mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (NULL == mMemRecords) {
+ ALOGE("Unable to allocate memory for hold memory records: Size %d",
+ mNumMemRecords * sizeof(iv_mem_rec_t));
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return OMX_ErrorUndefined;
+ }
+
+ {
+ iv_mem_rec_t *ps_mem_rec;
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
+ ps_mem_rec->pv_base = NULL;
+ ps_mem_rec->u4_mem_size = 0;
+ ps_mem_rec->u4_mem_alignment = 0;
+ ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Getting MemRecords Attributes */
+ {
+ iv_fill_mem_rec_ip_t s_fill_mem_rec_ip;
+ iv_fill_mem_rec_op_t s_fill_mem_rec_op;
+
+ s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t);
+ s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t);
+
+ s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_rec_ip.ps_mem_rec = mMemRecords;
+ s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords;
+ s_fill_mem_rec_ip.u4_max_wd = mWidth;
+ s_fill_mem_rec_ip.u4_max_ht = mHeight;
+ s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel;
+ s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT;
+ s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+
+ status = ive_api_function(0, &s_fill_mem_rec_ip, &s_fill_mem_rec_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Fill memory records failed = 0x%x\n",
+ s_fill_mem_rec_op.u4_error_code);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return OMX_ErrorUndefined;
+ }
+ }
+
+ /* Allocating Memory for Mem Records */
+ {
+ WORD32 total_size;
+ iv_mem_rec_t *ps_mem_rec;
+ total_size = 0;
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ive_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for mem record id %d size %d\n", i,
+ ps_mem_rec->u4_mem_size);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return OMX_ErrorUndefined;
+
+ }
+ total_size += ps_mem_rec->u4_mem_size;
+
+ ps_mem_rec++;
+ }
+ printf("\nTotal memory for codec %d\n", total_size);
+ }
+
+ /* Codec Instance Creation */
+ {
+ ive_init_ip_t s_init_ip;
+ ive_init_op_t s_init_op;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+ mCodecCtx->pv_fxns = (void *)ive_api_function;
+
+ s_init_ip.u4_size = sizeof(ive_init_ip_t);
+ s_init_op.u4_size = sizeof(ive_init_op_t);
+
+ s_init_ip.e_cmd = IV_CMD_INIT;
+ s_init_ip.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.ps_mem_rec = mMemRecords;
+ s_init_ip.u4_max_wd = mWidth;
+ s_init_ip.u4_max_ht = mHeight;
+ s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
+ s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
+ s_init_ip.u4_max_level = mAVCEncLevel;
+ s_init_ip.e_inp_color_fmt = mIvVideoColorFormat;
+
+ if (mReconEnable || mPSNREnable) {
+ s_init_ip.u4_enable_recon = 1;
+ } else {
+ s_init_ip.u4_enable_recon = 0;
+ }
+ s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
+ s_init_ip.e_rc_mode = DEFAULT_RC_MODE;
+ s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;
+ s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE;
+ s_init_ip.u4_max_num_bframes = DEFAULT_B_FRAMES;
+ s_init_ip.e_content_type = IV_PROGRESSIVE;
+ s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
+ s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
+ s_init_ip.e_slice_mode = mSliceMode;
+ s_init_ip.u4_slice_param = mSliceParam;
+ s_init_ip.e_arch = mArch;
+ s_init_ip.e_soc = DEFAULT_SOC;
+
+ status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Init memory records failed = 0x%x\n",
+ s_init_op.u4_error_code);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0 /* arg2 */, NULL /* data */);
+ return OMX_ErrorUndefined;
+ }
+ }
+
+ /* Get Codec Version */
+ logVersion();
+
+ /* set processor details */
+ setNumCores();
+
+ /* Video control Set Frame dimensions */
+ setDimensions();
+
+ /* Video control Set Frame rates */
+ setFrameRate();
+
+ /* Video control Set IPE Params */
+ setIpeParams();
+
+ /* Video control Set Bitrate */
+ setBitRate();
+
+ /* Video control Set QP */
+ setQp();
+
+ /* Video control Set AIR params */
+ setAirParams();
+
+ /* Video control Set VBV params */
+ setVbvParams();
+
+ /* Video control Set Motion estimation params */
+ setMeParams();
+
+ /* Video control Set GOP params */
+ setGopParams();
+
+ /* Video control Set Deblock params */
+ setDeblockParams();
+
+ /* Video control Set Profile params */
+ setProfileParams();
+
+ /* Video control Set in Encode header mode */
+ setEncMode(IVE_ENC_MODE_HEADER);
+
+ ALOGV("init_codec successfull");
+
+ mSpsPpsHeaderReceived = false;
+ mStarted = true;
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::releaseEncoder() {
+ IV_STATUS_T status = IV_SUCCESS;
+ iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
+ iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+ UWORD32 i;
+
+ if (!mStarted) {
+ return OMX_ErrorNone;
+ }
+
+ s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
+ s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
+ s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
+ s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
+
+ status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Unable to retrieve memory records = 0x%x\n",
+ s_retrieve_mem_op.u4_error_code);
+ return OMX_ErrorUndefined;
+ }
+
+ /* Free memory records */
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
+ ive_aligned_free(ps_mem_rec->pv_base);
+ ps_mem_rec++;
+ }
+
+ free(mMemRecords);
+
+ if (mConversionBuffer != NULL) {
+ free(mConversionBuffer);
+ mConversionBuffer = NULL;
+ }
+
+ mStarted = false;
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamVideoBitrate:
+ {
+ OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
+ (OMX_VIDEO_PARAM_BITRATETYPE *)params;
+
+ if (bitRate->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ bitRate->eControlRate = OMX_Video_ControlRateVariable;
+ bitRate->nTargetBitrate = mBitrate;
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoAvc:
+ {
+ OMX_VIDEO_PARAM_AVCTYPE *avcParams = (OMX_VIDEO_PARAM_AVCTYPE *)params;
+
+ if (avcParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;
+ OMX_VIDEO_AVCLEVELTYPE omxLevel = OMX_VIDEO_AVCLevel31;
+ if (OMX_ErrorNone
+ != ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {
+ return OMX_ErrorUndefined;
+ }
+
+ avcParams->eLevel = omxLevel;
+ avcParams->nRefFrames = 1;
+ avcParams->nBFrames = 0;
+ avcParams->bUseHadamard = OMX_TRUE;
+ avcParams->nAllowedPictureTypes = (OMX_VIDEO_PictureTypeI
+ | OMX_VIDEO_PictureTypeP);
+ avcParams->nRefIdx10ActiveMinus1 = 0;
+ avcParams->nRefIdx11ActiveMinus1 = 0;
+ avcParams->bWeightedPPrediction = OMX_FALSE;
+ avcParams->bEntropyCodingCABAC = OMX_FALSE;
+ avcParams->bconstIpred = OMX_FALSE;
+ avcParams->bDirect8x8Inference = OMX_FALSE;
+ avcParams->bDirectSpatialTemporal = OMX_FALSE;
+ avcParams->nCabacInitIdc = 0;
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
+ int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoBitrate:
+ {
+ return internalSetBitrateParams(
+ (const OMX_VIDEO_PARAM_BITRATETYPE *)params);
+ }
+
+ case OMX_IndexParamVideoAvc:
+ {
+ OMX_VIDEO_PARAM_AVCTYPE *avcType = (OMX_VIDEO_PARAM_AVCTYPE *)params;
+
+ if (avcType->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline
+ || avcType->nRefFrames != 1 || avcType->nBFrames != 0
+ || avcType->bUseHadamard != OMX_TRUE
+ || (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0
+ || avcType->nRefIdx10ActiveMinus1 != 0
+ || avcType->nRefIdx11ActiveMinus1 != 0
+ || avcType->bWeightedPPrediction != OMX_FALSE
+ || avcType->bEntropyCodingCABAC != OMX_FALSE
+ || avcType->bconstIpred != OMX_FALSE
+ || avcType->bDirect8x8Inference != OMX_FALSE
+ || avcType->bDirectSpatialTemporal != OMX_FALSE
+ || avcType->nCabacInitIdc != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR _params) {
+ switch (index) {
+ case OMX_IndexConfigVideoIntraVOPRefresh:
+ {
+ OMX_CONFIG_INTRAREFRESHVOPTYPE *params =
+ (OMX_CONFIG_INTRAREFRESHVOPTYPE *)_params;
+
+ if (params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorBadPortIndex;
+ }
+
+ mKeyFrameRequested = params->IntraRefreshVOP;
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexConfigVideoBitrate:
+ {
+ OMX_VIDEO_CONFIG_BITRATETYPE *params =
+ (OMX_VIDEO_CONFIG_BITRATETYPE *)_params;
+
+ if (params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorBadPortIndex;
+ }
+
+ if (mBitrate != params->nEncodeBitrate) {
+ mBitrate = params->nEncodeBitrate;
+ mBitrateUpdated = true;
+ }
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::setConfig(index, _params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::internalSetBitrateParams(
+ const OMX_VIDEO_PARAM_BITRATETYPE *bitrate) {
+ if (bitrate->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ mBitrate = bitrate->nTargetBitrate;
+ mBitrateUpdated = true;
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftAVC::setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ OMX_BUFFERHEADERTYPE *inputBufferHeader,
+ OMX_BUFFERHEADERTYPE *outputBufferHeader) {
+ iv_raw_buf_t *ps_inp_raw_buf;
+ const uint8_t *source;
+ UWORD8 *pu1_buf;
+
+ ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
+ ps_encode_ip->s_out_buf.pv_buf = outputBufferHeader->pBuffer;
+ ps_encode_ip->s_out_buf.u4_bytes = 0;
+ ps_encode_ip->s_out_buf.u4_bufsize = outputBufferHeader->nAllocLen;
+ ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
+ ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
+
+ ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
+ ps_encode_ip->pv_bufs = NULL;
+ ps_encode_ip->pv_mb_info = NULL;
+ ps_encode_ip->pv_pic_info = NULL;
+ ps_encode_ip->u4_mb_info_type = 0;
+ ps_encode_ip->u4_pic_info_type = 0;
+ ps_encode_op->s_out_buf.pv_buf = NULL;
+
+ /* Initialize color formats */
+ ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
+
+ source = NULL;
+ if (inputBufferHeader) {
+ source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
+
+ if (mInputDataIsMeta) {
+ source = extractGraphicBuffer(
+ mConversionBuffer, (mWidth * mHeight * 3 / 2), source,
+ inputBufferHeader->nFilledLen, mWidth, mHeight);
+
+ if (source == NULL) {
+ ALOGE("Error in extractGraphicBuffer");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return OMX_ErrorUndefined;
+ }
+ }
+ }
+
+ pu1_buf = (UWORD8 *)source;
+ switch (mIvVideoColorFormat) {
+ case IV_YUV_420P:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ pu1_buf += (mStride) * mHeight;
+ ps_inp_raw_buf->apv_bufs[1] = pu1_buf;
+ pu1_buf += (mStride / 2) * mHeight / 2;
+ ps_inp_raw_buf->apv_bufs[2] = pu1_buf;
+
+ ps_inp_raw_buf->au4_wd[0] = mWidth;
+ ps_inp_raw_buf->au4_wd[1] = mWidth / 2;
+ ps_inp_raw_buf->au4_wd[2] = mWidth / 2;
+
+ ps_inp_raw_buf->au4_ht[0] = mHeight;
+ ps_inp_raw_buf->au4_ht[1] = mHeight / 2;
+ ps_inp_raw_buf->au4_ht[2] = mHeight / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = mStride;
+ ps_inp_raw_buf->au4_strd[1] = (mStride / 2);
+ ps_inp_raw_buf->au4_strd[2] = (mStride / 2);
+ break;
+ }
+
+ case IV_YUV_422ILE:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
+ ps_inp_raw_buf->au4_ht[0] = mHeight;
+ ps_inp_raw_buf->au4_strd[0] = mStride * 2;
+ break;
+ }
+
+ case IV_YUV_420SP_UV:
+ case IV_YUV_420SP_VU:
+ default:
+ {
+ ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
+ pu1_buf += (mStride) * mHeight;
+ ps_inp_raw_buf->apv_bufs[1] = pu1_buf;
+
+ ps_inp_raw_buf->au4_wd[0] = mWidth;
+ ps_inp_raw_buf->au4_wd[1] = mWidth;
+
+ ps_inp_raw_buf->au4_ht[0] = mHeight;
+ ps_inp_raw_buf->au4_ht[1] = mHeight / 2;
+
+ ps_inp_raw_buf->au4_strd[0] = mStride;
+ ps_inp_raw_buf->au4_strd[1] = mStride;
+ break;
+ }
+ }
+
+ ps_encode_ip->u4_is_last = 0;
+
+ if (inputBufferHeader) {
+ ps_encode_ip->u4_timestamp_high = (inputBufferHeader->nTimeStamp) >> 32;
+ ps_encode_ip->u4_timestamp_low = (inputBufferHeader->nTimeStamp) & 0xFFFFFFFF;
+ }
+
+ return OMX_ErrorNone;
+}
+
+void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
+ IV_STATUS_T status;
+ WORD32 timeDelay, timeTaken;
+
+ UNUSED(portIndex);
+
+ // Initialize encoder if not already initialized
+ if (mCodecCtx == NULL) {
+ if (OMX_ErrorNone != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0 /* arg2 */, NULL /* data */);
+ return;
+ }
+ }
+ if (mSignalledError || mSawInputEOS) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) {
+ OMX_ERRORTYPE error;
+ ive_video_encode_ip_t s_encode_ip;
+ ive_video_encode_op_t s_encode_op;
+
+ BufferInfo *inputBufferInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader;
+
+ BufferInfo *outputBufferInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader;
+
+ if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inputBufferInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inputBufferHeader);
+
+ outputBufferHeader->nFilledLen = 0;
+ outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outputBufferInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outputBufferHeader);
+ return;
+ }
+
+ outputBufferHeader->nTimeStamp = 0;
+ outputBufferHeader->nFlags = 0;
+ outputBufferHeader->nOffset = 0;
+ outputBufferHeader->nFilledLen = 0;
+ outputBufferHeader->nOffset = 0;
+
+ uint8_t *outPtr = (uint8_t *)outputBufferHeader->pBuffer;
+
+ if (!mSpsPpsHeaderReceived) {
+ error = setEncodeArgs(&s_encode_ip, &s_encode_op, NULL, outputBufferHeader);
+ if (error != OMX_ErrorNone) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return;
+ }
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Encode Frame failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ } else {
+ ALOGV("Bytes Generated in header %d\n",
+ s_encode_op.s_out_buf.u4_bytes);
+ }
+
+ mSpsPpsHeaderReceived = true;
+
+ outputBufferHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
+ outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes;
+ outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp;
+
+ outQueue.erase(outQueue.begin());
+ outputBufferInfo->mOwnedByUs = false;
+ DUMP_TO_FILE(
+ mOutFile, outputBufferHeader->pBuffer,
+ outputBufferHeader->nFilledLen);
+ notifyFillBufferDone(outputBufferHeader);
+
+ setEncMode(IVE_ENC_MODE_PICTURE);
+ return;
+ }
+
+ if (mBitrateUpdated) {
+ setBitRate();
+ }
+
+ if (mKeyFrameRequested) {
+ setFrameType(IV_IDR_FRAME);
+ }
+
+ mPrevTimestampUs = inputBufferHeader->nTimeStamp;
+
+ if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ mSawInputEOS = true;
+ }
+
+ error = setEncodeArgs(
+ &s_encode_ip, &s_encode_op, inputBufferHeader, outputBufferHeader);
+ if (error != OMX_ErrorNone) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return;
+ }
+
+ DUMP_TO_FILE(
+ mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
+ (mHeight * mStride * 3 / 2));
+ //DUMP_TO_FILE(mInFile, inputBufferHeader->pBuffer + inputBufferHeader->nOffset,
+ // inputBufferHeader->nFilledLen);
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Encode Frame failed = 0x%x\n",
+ s_encode_op.u4_error_code);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return;
+ }
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_encode_op.s_out_buf.u4_bytes);
+
+
+ outputBufferHeader->nFlags = inputBufferHeader->nFlags;
+ outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes;
+ outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp;
+
+ if (IV_IDR_FRAME
+ == s_encode_op.u4_encoded_frame_type) {
+ outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
+ }
+
+ inQueue.erase(inQueue.begin());
+ inputBufferInfo->mOwnedByUs = false;
+
+ notifyEmptyBufferDone(inputBufferHeader);
+
+ if (mSawInputEOS) {
+ outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+ }
+
+ outputBufferInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+
+ DUMP_TO_FILE(
+ mOutFile, outputBufferHeader->pBuffer,
+ outputBufferHeader->nFilledLen);
+ notifyFillBufferDone(outputBufferHeader);
+
+ }
+ return;
+}
+
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftAVC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
new file mode 100644
index 0000000..c4e26a9
--- /dev/null
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
@@ -0,0 +1,309 @@
+/*
+ * Copyright 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __SOFT_AVC_ENC_H__
+#define __SOFT_AVC_ENC_H__
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Vector.h>
+
+#include "SoftVideoEncoderOMXComponent.h"
+
+namespace android {
+
+struct MediaBuffer;
+
+#define CODEC_MAX_CORES 4
+#define LEN_STATUS_BUFFER (10 * 1024)
+#define MAX_VBV_BUFF_SIZE (120 * 16384)
+#define MAX_NUM_IO_BUFS 3
+
+#define DEFAULT_MAX_REF_FRM 1
+#define DEFAULT_MAX_REORDER_FRM 0
+#define DEFAULT_QP_MIN 10
+#define DEFAULT_QP_MAX 40
+#define DEFAULT_MAX_BITRATE 20000000
+#define DEFAULT_MAX_SRCH_RANGE_X 256
+#define DEFAULT_MAX_SRCH_RANGE_Y 256
+#define DEFAULT_MAX_FRAMERATE 120000
+#define DEFAULT_NUM_CORES 1
+#define DEFAULT_NUM_CORES_PRE_ENC 0
+#define DEFAULT_FPS 30
+#define DEFAULT_ENC_SPEED IVE_NORMAL
+
+#define DEFAULT_MEM_REC_CNT 0
+#define DEFAULT_RECON_ENABLE 0
+#define DEFAULT_CHKSUM_ENABLE 0
+#define DEFAULT_START_FRM 0
+#define DEFAULT_NUM_FRMS 0xFFFFFFFF
+#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU
+#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P
+#define DEFAULT_LOOPBACK 0
+#define DEFAULT_SRC_FRAME_RATE 30
+#define DEFAULT_TGT_FRAME_RATE 30
+#define DEFAULT_MAX_WD 1920
+#define DEFAULT_MAX_HT 1920
+#define DEFAULT_MAX_LEVEL 40
+#define DEFAULT_STRIDE 0
+#define DEFAULT_WD 1280
+#define DEFAULT_HT 720
+#define DEFAULT_PSNR_ENABLE 0
+#define DEFAULT_ME_SPEED 100
+#define DEFAULT_ENABLE_FAST_SAD 0
+#define DEFAULT_ENABLE_ALT_REF 0
+#define DEFAULT_RC_MODE IVE_RC_STORAGE
+#define DEFAULT_BITRATE 6000000
+#define DEFAULT_I_QP 22
+#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_P_QP 28
+#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_B_QP 22
+#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX
+#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN
+#define DEFAULT_AIR IVE_AIR_MODE_NONE
+#define DEFAULT_AIR_REFRESH_PERIOD 30
+#define DEFAULT_SRCH_RNG_X 64
+#define DEFAULT_SRCH_RNG_Y 48
+#define DEFAULT_I_INTERVAL 30
+#define DEFAULT_IDR_INTERVAL 1000
+#define DEFAULT_B_FRAMES 0
+#define DEFAULT_DISABLE_DEBLK_LEVEL 0
+#define DEFAULT_HPEL 1
+#define DEFAULT_QPEL 1
+#define DEFAULT_I4 1
+#define DEFAULT_EPROFILE IV_PROFILE_BASE
+#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE
+#define DEFAULT_SLICE_PARAM 256
+#define DEFAULT_ARCH ARCH_ARM_A9Q
+#define DEFAULT_SOC SOC_GENERIC
+#define DEFAULT_INTRA4x4 0
+#define STRLENGTH 500
+
+
+
+#define MIN(a, b) ((a) < (b))? (a) : (b)
+#define MAX(a, b) ((a) > (b))? (a) : (b)
+#define ALIGN16(x) ((((x) + 15) >> 4) << 4)
+#define ALIGN128(x) ((((x) + 127) >> 7) << 7)
+#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = ((end.tv_sec - start.tv_sec) * 1000000) + \
+ (end.tv_usec - start.tv_usec);
+
+#define ive_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ive_aligned_free(buf) free(buf)
+
+struct SoftAVC : public SoftVideoEncoderOMXComponent {
+ SoftAVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+ // Override SimpleSoftOMXComponent methods
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+
+protected:
+ virtual ~SoftAVC();
+
+private:
+ enum {
+ kNumBuffers = 2,
+ };
+
+ // OMX input buffer's timestamp and flags
+ typedef struct {
+ int64_t mTimeUs;
+ int32_t mFlags;
+ } InputBufferInfo;
+
+ int32_t mStride;
+
+ uint32_t mFrameRate;
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+
+ // If a request for a change it bitrate has been received.
+ bool mBitrateUpdated;
+
+ bool mKeyFrameRequested;
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+ char mOutFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ IV_COLOR_FORMAT_T mIvVideoColorFormat;
+
+ int32_t mIDRFrameRefreshIntervalInSec;
+ IV_PROFILE_T mAVCEncProfile;
+ WORD32 mAVCEncLevel;
+ int64_t mNumInputFrames;
+ int64_t mPrevTimestampUs;
+ bool mStarted;
+ bool mSpsPpsHeaderReceived;
+
+ bool mSawInputEOS;
+ bool mSignalledError;
+ bool mIntra4x4;
+ bool mEnableFastSad;
+ bool mEnableAltRef;
+ bool mReconEnable;
+ bool mPSNREnable;
+ IVE_SPEED_CONFIG mEncSpeed;
+
+ uint8_t *mConversionBuffer;
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by codec
+ size_t mNumCores; // Number of cores used by the codec
+
+ UWORD32 mHeaderGenerated;
+
+ IV_ARCH_T mArch;
+ IVE_SLICE_MODE_T mSliceMode;
+ UWORD32 mSliceParam;
+ bool mHalfPelEnable;
+ UWORD32 mIInterval;
+ UWORD32 mIDRInterval;
+ UWORD32 mDisableDeblkLevel;
+ IVE_AIR_MODE_T mAIRMode;
+ UWORD32 mAIRRefreshPeriod;
+
+ OMX_ERRORTYPE initEncParams();
+ OMX_ERRORTYPE initEncoder();
+ OMX_ERRORTYPE releaseEncoder();
+
+ // Verifies the component role tried to be set to this OMX component is
+ // strictly video_encoder.avc
+ OMX_ERRORTYPE internalSetRoleParams(
+ const OMX_PARAM_COMPONENTROLETYPE *role);
+
+ // Updates bitrate to reflect port settings.
+ OMX_ERRORTYPE internalSetBitrateParams(
+ const OMX_VIDEO_PARAM_BITRATETYPE *bitrate);
+
+ OMX_ERRORTYPE setConfig(
+ OMX_INDEXTYPE index, const OMX_PTR _params);
+
+ // Handles port definition changes.
+ OMX_ERRORTYPE internalSetPortParams(
+ const OMX_PARAM_PORTDEFINITIONTYPE *port);
+
+ OMX_ERRORTYPE internalSetFormatParams(
+ const OMX_VIDEO_PARAM_PORTFORMATTYPE *format);
+
+ OMX_ERRORTYPE setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type);
+ OMX_ERRORTYPE setQp();
+ OMX_ERRORTYPE setEncMode(IVE_ENC_MODE_T e_enc_mode);
+ OMX_ERRORTYPE setDimensions();
+ OMX_ERRORTYPE setNumCores();
+ OMX_ERRORTYPE setFrameRate();
+ OMX_ERRORTYPE setIpeParams();
+ OMX_ERRORTYPE setBitRate();
+ OMX_ERRORTYPE setAirParams();
+ OMX_ERRORTYPE setMeParams();
+ OMX_ERRORTYPE setGopParams();
+ OMX_ERRORTYPE setProfileParams();
+ OMX_ERRORTYPE setDeblockParams();
+ OMX_ERRORTYPE setVbvParams();
+ void logVersion();
+ OMX_ERRORTYPE setEncodeArgs(
+ ive_video_encode_ip_t *ps_encode_ip,
+ ive_video_encode_op_t *ps_encode_op,
+ OMX_BUFFERHEADERTYPE *inputBufferHeader,
+ OMX_BUFFERHEADERTYPE *outputBufferHeader);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/avce_input"
+#define INPUT_DUMP_EXT "yuv"
+#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output"
+#define OUTPUT_DUMP_EXT "h264"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+ strcpy(mOutFile, ""); \
+ sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ OUTPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ ALOGD("Opened file %s", m_filename); \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // __SOFT_AVC_ENC_H__
diff --git a/media/libstagefright/codecs/common/Android.mk b/media/libstagefright/codecs/common/Android.mk
index a33cb92..b0010ff 100644
--- a/media/libstagefright/codecs/common/Android.mk
+++ b/media/libstagefright/codecs/common/Android.mk
@@ -14,6 +14,8 @@ LOCAL_STATIC_LIBRARIES :=
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/common/cmnMemory.c b/media/libstagefright/codecs/common/cmnMemory.c
index aa52bd9..5bb6cc4 100644
--- a/media/libstagefright/codecs/common/cmnMemory.c
+++ b/media/libstagefright/codecs/common/cmnMemory.c
@@ -26,8 +26,12 @@
//VO_MEM_OPERATOR g_memOP;
+#define UNUSED(x) (void)(x)
+
VO_U32 cmnMemAlloc (VO_S32 uID, VO_MEM_INFO * pMemInfo)
{
+ UNUSED(uID);
+
if (!pMemInfo)
return VO_ERR_INVALID_ARG;
@@ -37,34 +41,48 @@ VO_U32 cmnMemAlloc (VO_S32 uID, VO_MEM_INFO * pMemInfo)
VO_U32 cmnMemFree (VO_S32 uID, VO_PTR pMem)
{
+ UNUSED(uID);
+
free (pMem);
return 0;
}
VO_U32 cmnMemSet (VO_S32 uID, VO_PTR pBuff, VO_U8 uValue, VO_U32 uSize)
{
+ UNUSED(uID);
+
memset (pBuff, uValue, uSize);
return 0;
}
VO_U32 cmnMemCopy (VO_S32 uID, VO_PTR pDest, VO_PTR pSource, VO_U32 uSize)
{
+ UNUSED(uID);
+
memcpy (pDest, pSource, uSize);
return 0;
}
VO_U32 cmnMemCheck (VO_S32 uID, VO_PTR pBuffer, VO_U32 uSize)
{
+ UNUSED(uID);
+ UNUSED(pBuffer);
+ UNUSED(uSize);
+
return 0;
}
VO_S32 cmnMemCompare (VO_S32 uID, VO_PTR pBuffer1, VO_PTR pBuffer2, VO_U32 uSize)
{
+ UNUSED(uID);
+
return memcmp(pBuffer1, pBuffer2, uSize);
}
VO_U32 cmnMemMove (VO_S32 uID, VO_PTR pDest, VO_PTR pSource, VO_U32 uSize)
{
+ UNUSED(uID);
+
memmove (pDest, pSource, uSize);
return 0;
}
diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk
index f01d605..59a11de 100644
--- a/media/libstagefright/codecs/flac/enc/Android.mk
+++ b/media/libstagefright/codecs/flac/enc/Android.mk
@@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax \
external/flac/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index e64fe72..9edffd2 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -27,6 +27,12 @@
#define FLAC_COMPRESSION_LEVEL_DEFAULT 5
#define FLAC_COMPRESSION_LEVEL_MAX 8
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
template<class T>
@@ -204,7 +210,7 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
mNumChannels = pcmParams->nChannels;
mSampleRate = pcmParams->nSamplingRate;
- ALOGV("will encode %ld channels at %ldHz", mNumChannels, mSampleRate);
+ ALOGV("will encode %d channels at %dHz", mNumChannels, mSampleRate);
return configureEncoder();
}
@@ -241,7 +247,7 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
if (defParams->nPortIndex == 0) {
if (defParams->nBufferSize > kMaxInputBufferSize) {
- ALOGE("Input buffer size must be at most %zu bytes",
+ ALOGE("Input buffer size must be at most %d bytes",
kMaxInputBufferSize);
return OMX_ErrorUnsupportedSetting;
}
@@ -257,8 +263,8 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
}
void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
-
- ALOGV("SoftFlacEncoder::onQueueFilled(portIndex=%ld)", portIndex);
+ UNUSED_UNLESS_VERBOSE(portIndex);
+ ALOGV("SoftFlacEncoder::onQueueFilled(portIndex=%d)", portIndex);
if (mSignalledError) {
return;
@@ -290,7 +296,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
}
if (inHeader->nFilledLen > kMaxInputBufferSize) {
- ALOGE("input buffer too large (%ld).", inHeader->nFilledLen);
+ ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
return;
@@ -343,16 +349,17 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
-
FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
const FLAC__byte buffer[],
- size_t bytes, unsigned samples, unsigned current_frame) {
- ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%d, samples=%d, curr_frame=%d)",
+ size_t bytes, unsigned samples,
+ unsigned current_frame) {
+ UNUSED_UNLESS_VERBOSE(current_frame);
+ ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%zu, samples=%u, curr_frame=%u)",
bytes, samples, current_frame);
#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
if (samples == 0) {
- ALOGI(" saving %d bytes of header", bytes);
+ ALOGI(" saving %zu bytes of header", bytes);
memcpy(mHeader + mHeaderOffset, buffer, bytes);
mHeaderOffset += bytes;// will contain header size when finished receiving header
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
@@ -363,7 +370,7 @@ FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
if ((samples == 0) || !mEncoderWriteData) {
// called by the encoder because there's header data to save, but it's not the role
// of this component (unless WRITE_FLAC_HEADER_IN_FIRST_BUFFER is defined)
- ALOGV("ignoring %d bytes of header data (samples=%d)", bytes, samples);
+ ALOGV("ignoring %zu bytes of header data (samples=%d)", bytes, samples);
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
}
@@ -384,9 +391,9 @@ FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
#endif
// write encoded data
- ALOGV(" writing %d bytes of encoded data on output port", bytes);
+ ALOGV(" writing %zu bytes of encoded data on output port", bytes);
if (bytes > outHeader->nAllocLen - outHeader->nOffset - outHeader->nFilledLen) {
- ALOGE(" not enough space left to write encoded data, dropping %u bytes", bytes);
+ ALOGE(" not enough space left to write encoded data, dropping %zu bytes", bytes);
// a fatal error would stop the encoding
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
}
@@ -405,7 +412,7 @@ FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
OMX_ERRORTYPE SoftFlacEncoder::configureEncoder() {
- ALOGV("SoftFlacEncoder::configureEncoder() numChannel=%ld, sampleRate=%ld",
+ ALOGV("SoftFlacEncoder::configureEncoder() numChannel=%d, sampleRate=%d",
mNumChannels, mSampleRate);
if (mSignalledError || (mFlacStreamEncoder == NULL)) {
@@ -414,7 +421,6 @@ OMX_ERRORTYPE SoftFlacEncoder::configureEncoder() {
}
FLAC__bool ok = true;
- FLAC__StreamEncoderInitStatus initStatus = FLAC__STREAM_ENCODER_INIT_STATUS_OK;
ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mNumChannels);
ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mSampleRate);
ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, 16);
@@ -444,8 +450,12 @@ return_result:
// static
FLAC__StreamEncoderWriteStatus SoftFlacEncoder::flacEncoderWriteCallback(
- const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[],
- size_t bytes, unsigned samples, unsigned current_frame, void *client_data) {
+ const FLAC__StreamEncoder * /* encoder */,
+ const FLAC__byte buffer[],
+ size_t bytes,
+ unsigned samples,
+ unsigned current_frame,
+ void *client_data) {
return ((SoftFlacEncoder*) client_data)->onEncodedFlacAvailable(
buffer, bytes, samples, current_frame);
}
diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk
index 4c80da6..a0112e1 100644
--- a/media/libstagefright/codecs/g711/dec/Android.mk
+++ b/media/libstagefright/codecs/g711/dec/Android.mk
@@ -14,4 +14,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_g711dec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.cpp b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
index bcdd3c7..015515e 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.cpp
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
@@ -41,8 +41,9 @@ SoftG711::SoftG711(
OMX_COMPONENTTYPE **component)
: SimpleSoftOMXComponent(name, callbacks, appData, component),
mIsMLaw(true),
+ mSignalledError(false),
mNumChannels(1),
- mSignalledError(false) {
+ mSamplingRate(8000) {
if (!strcmp(name, "OMX.google.g711.alaw.decoder")) {
mIsMLaw = false;
} else {
@@ -117,12 +118,19 @@ OMX_ERRORTYPE SoftG711::internalGetParameter(
pcmParams->eEndian = OMX_EndianBig;
pcmParams->bInterleaved = OMX_TRUE;
pcmParams->nBitPerSample = 16;
- pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ if (pcmParams->nPortIndex == 0) {
+ // input port
+ pcmParams->ePCMMode = mIsMLaw ? OMX_AUDIO_PCMModeMULaw
+ : OMX_AUDIO_PCMModeALaw;
+ } else {
+ // output port
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ }
pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
pcmParams->nChannels = mNumChannels;
- pcmParams->nSamplingRate = 8000;
+ pcmParams->nSamplingRate = mSamplingRate;
return OMX_ErrorNone;
}
@@ -152,6 +160,8 @@ OMX_ERRORTYPE SoftG711::internalSetParameter(
mNumChannels = pcmParams->nChannels;
}
+ mSamplingRate = pcmParams->nSamplingRate;
+
return OMX_ErrorNone;
}
@@ -182,7 +192,7 @@ OMX_ERRORTYPE SoftG711::internalSetParameter(
}
}
-void SoftG711::onQueueFilled(OMX_U32 portIndex) {
+void SoftG711::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError) {
return;
}
@@ -212,7 +222,7 @@ void SoftG711::onQueueFilled(OMX_U32 portIndex) {
}
if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) {
- ALOGE("input buffer too large (%ld).", inHeader->nFilledLen);
+ ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
mSignalledError = true;
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.h b/media/libstagefright/codecs/g711/dec/SoftG711.h
index bff0c68..16b6340 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.h
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.h
@@ -46,8 +46,9 @@ private:
};
bool mIsMLaw;
- OMX_U32 mNumChannels;
bool mSignalledError;
+ OMX_U32 mNumChannels;
+ int32_t mSamplingRate;
void initPorts();
diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk
index 71613d2..30868d5 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.mk
+++ b/media/libstagefright/codecs/gsm/dec/Android.mk
@@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax \
external/libgsm/inc
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
index 00e0c85..bd01a1a 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
@@ -34,6 +34,9 @@ static void InitOMXParams(T *params) {
params->nVersion.s.nStep = 0;
}
+// Microsoft WAV GSM encoding packs two GSM frames into 65 bytes.
+static const int kMSGSMFrameSize = 65;
+
SoftGSM::SoftGSM(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
@@ -64,7 +67,7 @@ void SoftGSM::initPorts() {
def.eDir = OMX_DirInput;
def.nBufferCountMin = kNumBuffers;
def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = sizeof(gsm_frame);
+ def.nBufferSize = 1024 / kMSGSMFrameSize * kMSGSMFrameSize;
def.bEnabled = OMX_TRUE;
def.bPopulated = OMX_FALSE;
def.eDomain = OMX_PortDomainAudio;
@@ -172,7 +175,7 @@ OMX_ERRORTYPE SoftGSM::internalSetParameter(
}
}
-void SoftGSM::onQueueFilled(OMX_U32 portIndex) {
+void SoftGSM::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError) {
return;
}
@@ -202,13 +205,13 @@ void SoftGSM::onQueueFilled(OMX_U32 portIndex) {
}
if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) {
- ALOGE("input buffer too large (%ld).", inHeader->nFilledLen);
+ ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
mSignalledError = true;
}
- if(((inHeader->nFilledLen / 65) * 65) != inHeader->nFilledLen) {
- ALOGE("input buffer not multiple of 65 (%ld).", inHeader->nFilledLen);
+ if(((inHeader->nFilledLen / kMSGSMFrameSize) * kMSGSMFrameSize) != inHeader->nFilledLen) {
+ ALOGE("input buffer not multiple of %d (%d).", kMSGSMFrameSize, inHeader->nFilledLen);
notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
mSignalledError = true;
}
@@ -258,6 +261,25 @@ int SoftGSM::DecodeGSM(gsm handle,
return ret;
}
+void SoftGSM::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0) {
+ gsm_destroy(mGsm);
+ mGsm = gsm_create();
+ int msopt = 1;
+ gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+ }
+}
+
+void SoftGSM::onReset() {
+ gsm_destroy(mGsm);
+ mGsm = gsm_create();
+ int msopt = 1;
+ gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+ mSignalledError = false;
+}
+
+
+
} // namespace android
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
index 8ab6116..0303dea 100644
--- a/media/libstagefright/codecs/gsm/dec/SoftGSM.h
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -43,6 +43,9 @@ protected:
virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
+
private:
enum {
kNumBuffers = 4,
diff --git a/media/libstagefright/codecs/hevcdec/Android.mk b/media/libstagefright/codecs/hevcdec/Android.mk
new file mode 100644
index 0000000..c0c694e
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/Android.mk
@@ -0,0 +1,30 @@
+ifeq ($(if $(wildcard external/libhevc),1,0),1)
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libstagefright_soft_hevcdec
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := libhevcdec
+LOCAL_SRC_FILES := SoftHEVC.cpp
+
+LOCAL_C_INCLUDES := $(TOP)/external/libhevc/decoder
+LOCAL_C_INCLUDES += $(TOP)/external/libhevc/common
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES += libstagefright_omx
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation
+LOCAL_SHARED_LIBRARIES += libutils
+LOCAL_SHARED_LIBRARIES += liblog
+
+# We need this because the current asm generates the following link error:
+# requires unsupported dynamic reloc R_ARM_REL32; recompile with -fPIC
+# Bug: 16853291
+LOCAL_LDFLAGS := -Wl,-Bsymbolic
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
new file mode 100644
index 0000000..cddd176
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -0,0 +1,770 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftHEVC"
+#include <utils/Log.h>
+
+#include "ihevc_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+#include "ithread.h"
+#include "ihevcd_cxa.h"
+#include "SoftHEVC.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaDefs.h>
+#include <OMX_VideoExt.h>
+
+namespace android {
+
+#define componentName "video_decoder.hevc"
+#define codingType OMX_VIDEO_CodingHEVC
+#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_HEVC
+
+/** Function and structure definitions to keep code similar for each codec */
+#define ivdec_api_function ihevcd_cxa_api_function
+#define ivdext_init_ip_t ihevcd_cxa_init_ip_t
+#define ivdext_init_op_t ihevcd_cxa_init_op_t
+#define ivdext_fill_mem_rec_ip_t ihevcd_cxa_fill_mem_rec_ip_t
+#define ivdext_fill_mem_rec_op_t ihevcd_cxa_fill_mem_rec_op_t
+#define ivdext_ctl_set_num_cores_ip_t ihevcd_cxa_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
+
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
+
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel1 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel2 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel21 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel3 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel31 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel4 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel41 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel5 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel51 },
+};
+
+SoftHEVC::SoftHEVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVideoDecoderOMXComponent(name, componentName, codingType,
+ kProfileLevels, ARRAY_SIZE(kProfileLevels),
+ 320 /* width */, 240 /* height */, callbacks,
+ appData, component),
+ mMemRecords(NULL),
+ mFlushOutBuffer(NULL),
+ mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
+ mIvColorFormat(IV_YUV_420P),
+ mNewWidth(mWidth),
+ mNewHeight(mHeight),
+ mChangingResolution(false) {
+ const size_t kMinCompressionRatio = 4 /* compressionRatio (for Level 4+) */;
+ const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
+ // INPUT_BUF_SIZE is given by HEVC codec as minimum input size
+ initPorts(
+ kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),
+ kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio);
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftHEVC::~SoftHEVC() {
+ ALOGD("In SoftHEVC::~SoftHEVC");
+ CHECK_EQ(deInitDecoder(), (status_t)OK);
+}
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+void SoftHEVC::logVersion() {
+ ivd_ctl_getversioninfo_ip_t s_ctl_ip;
+ ivd_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ s_ctl_ip.pv_version_buffer = au1_buf;
+ s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version number: 0x%x",
+ s_ctl_op.u4_error_code);
+ } else {
+ ALOGD("Ittiam decoder version number: %s",
+ (char *)s_ctl_ip.pv_version_buffer);
+ }
+ return;
+}
+
+status_t SoftHEVC::setParams(size_t stride) {
+ ivd_ctl_set_config_ip_t s_ctl_ip;
+ ivd_ctl_set_config_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+ s_ctl_ip.u4_disp_wd = (UWORD32)stride;
+ s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+
+ s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+
+ ALOGV("Set the run-time (dynamic) parameters stride = %u", stride);
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the run-time parameters: 0x%x",
+ s_ctl_op.u4_error_code);
+
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftHEVC::resetPlugin() {
+ mIsInFlush = false;
+ mReceivedEOS = false;
+ memset(mTimeStamps, 0, sizeof(mTimeStamps));
+ memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
+
+ /* Initialize both start and end times */
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+
+ return OK;
+}
+
+status_t SoftHEVC::resetDecoder() {
+ ivd_ctl_reset_ip_t s_ctl_ip;
+ ivd_ctl_reset_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ /* Set the run-time (dynamic) parameters */
+ setParams(outputBufferWidth());
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ return OK;
+}
+
+status_t SoftHEVC::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_cores_op;
+ IV_API_CALL_STATUS_T status;
+ s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
+ s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ ALOGD("Set number of cores to %u", s_set_cores_ip.u4_num_cores);
+ status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip,
+ (void *)&s_set_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in setting number of cores: 0x%x",
+ s_set_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftHEVC::setFlushMode() {
+ IV_API_CALL_STATUS_T status;
+ ivd_ctl_flush_ip_t s_video_flush_ip;
+ ivd_ctl_flush_op_t s_video_flush_op;
+
+ s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ ALOGD("Set the decoder in flush mode ");
+
+ /* Set the decoder in Flush mode, subsequent decode() calls will flush */
+ status = ivdec_api_function(mCodecCtx, (void *)&s_video_flush_ip,
+ (void *)&s_video_flush_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
+ s_video_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mIsInFlush = true;
+ return OK;
+}
+
+status_t SoftHEVC::initDecoder() {
+ IV_API_CALL_STATUS_T status;
+
+ UWORD32 u4_num_reorder_frames;
+ UWORD32 u4_num_ref_frames;
+ UWORD32 u4_share_disp_buf;
+ WORD32 i4_level;
+
+ mNumCores = GetCPUCoreCount();
+
+ /* Initialize number of ref and reorder modes (for HEVC) */
+ u4_num_reorder_frames = 16;
+ u4_num_ref_frames = 16;
+ u4_share_disp_buf = 0;
+
+ uint32_t displayStride = outputBufferWidth();
+ uint32_t displayHeight = outputBufferHeight();
+ uint32_t displaySizeY = displayStride * displayHeight;
+
+ if (displaySizeY > (1920 * 1088)) {
+ i4_level = 50;
+ } else if (displaySizeY > (1280 * 720)) {
+ i4_level = 40;
+ } else if (displaySizeY > (960 * 540)) {
+ i4_level = 31;
+ } else if (displaySizeY > (640 * 360)) {
+ i4_level = 30;
+ } else if (displaySizeY > (352 * 288)) {
+ i4_level = 21;
+ } else {
+ i4_level = 20;
+ }
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
+ s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ ALOGV("Get number of mem records");
+ status = ivdec_api_function(mCodecCtx, (void*)&s_num_mem_rec_ip,
+ (void*)&s_num_mem_rec_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in getting mem records: 0x%x",
+ s_num_mem_rec_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ mMemRecords = (iv_mem_rec_t*)ivd_aligned_malloc(
+ 128, mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (mMemRecords == NULL) {
+ ALOGE("Allocation failure");
+ return NO_MEMORY;
+ }
+
+ memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
+
+ {
+ size_t i;
+ ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
+ ivdext_fill_mem_rec_op_t s_fill_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_ip_t);
+ s_fill_mem_ip.i4_level = i4_level;
+ s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_fill_mem_ip.u4_num_extra_disp_buf = 0;
+ s_fill_mem_ip.e_output_format = mIvColorFormat;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_op_t);
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++)
+ ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_fill_mem_ip,
+ (void *)&s_fill_mem_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in filling mem records: 0x%x",
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mNumMemRecords =
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
+
+ ps_mem_rec = mMemRecords;
+
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ivd_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for memory record #%zu of size %u",
+ i, ps_mem_rec->u4_mem_size);
+ status = IV_FAIL;
+ return NO_MEMORY;
+ }
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Initialize the decoder */
+ {
+ ivdext_init_ip_t s_init_ip;
+ ivdext_init_op_t s_init_op;
+
+ void *dec_fxns = (void *)ivdec_api_function;
+
+ s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
+ s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
+ s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
+
+ s_init_ip.i4_level = i4_level;
+ s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_init_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_init_ip.u4_num_extra_disp_buf = 0;
+
+ s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+
+ s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+
+ mCodecCtx = (iv_obj_t*)mMemRecords[0].pv_base;
+ mCodecCtx->pv_fxns = dec_fxns;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+
+ ALOGD("Initializing decoder");
+ status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip,
+ (void *)&s_init_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in init: 0x%x",
+ s_init_op.s_ivd_init_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* Reset the plugin state */
+ resetPlugin();
+
+ /* Set the run time (dynamic) parameters */
+ setParams(displayStride);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ /* Get codec version */
+ logVersion();
+
+ /* Allocate internal picture buffer */
+ uint32_t bufferSize = displaySizeY * 3 / 2;
+ mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize);
+ return NO_MEMORY;
+ }
+
+ mInitNeeded = false;
+ mFlushNeeded = false;
+ return OK;
+}
+
+status_t SoftHEVC::deInitDecoder() {
+ size_t i;
+
+ if (mMemRecords) {
+ iv_mem_rec_t *ps_mem_rec;
+
+ ps_mem_rec = mMemRecords;
+ ALOGD("Freeing codec memory");
+ for (i = 0; i < mNumMemRecords; i++) {
+ if(ps_mem_rec->pv_base) {
+ ivd_aligned_free(ps_mem_rec->pv_base);
+ }
+ ps_mem_rec++;
+ }
+ ivd_aligned_free(mMemRecords);
+ mMemRecords = NULL;
+ }
+
+ if(mFlushOutBuffer) {
+ ivd_aligned_free(mFlushOutBuffer);
+ mFlushOutBuffer = NULL;
+ }
+
+ mInitNeeded = true;
+ mChangingResolution = false;
+
+ return OK;
+}
+
+status_t SoftHEVC::reInitDecoder() {
+ status_t ret;
+
+ deInitDecoder();
+
+ ret = initDecoder();
+ if (OK != ret) {
+ ALOGE("Create failure");
+ deInitDecoder();
+ return NO_MEMORY;
+ }
+ return OK;
+}
+
+void SoftHEVC::onReset() {
+ ALOGD("onReset called");
+ SoftVideoDecoderOMXComponent::onReset();
+
+ resetDecoder();
+ resetPlugin();
+}
+
+OMX_ERRORTYPE SoftHEVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
+ const uint32_t oldWidth = mWidth;
+ const uint32_t oldHeight = mHeight;
+ OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);
+ if (mWidth != oldWidth || mHeight != oldHeight) {
+ reInitDecoder();
+ }
+ return ret;
+}
+
+void SoftHEVC::setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t timeStampIx) {
+ size_t sizeY = outputBufferWidth() * outputBufferHeight();
+ size_t sizeUV;
+ uint8_t *pBuf;
+
+ ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+
+ /* When in flush and after EOS with zero byte input,
+ * inHeader is set to zero. Hence check for non-null */
+ if (inHeader) {
+ ps_dec_ip->u4_ts = timeStampIx;
+ ps_dec_ip->pv_stream_buffer = inHeader->pBuffer
+ + inHeader->nOffset;
+ ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
+ } else {
+ ps_dec_ip->u4_ts = 0;
+ ps_dec_ip->pv_stream_buffer = NULL;
+ ps_dec_ip->u4_num_Bytes = 0;
+ }
+
+ if (outHeader) {
+ pBuf = outHeader->pBuffer;
+ } else {
+ pBuf = mFlushOutBuffer;
+ }
+
+ sizeUV = sizeY / 4;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+ ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
+ ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
+ ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
+ ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
+ return;
+}
+void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
+ /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
+ if (kOutputPortIndex == portIndex) {
+ setFlushMode();
+
+ while (true) {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ IV_API_CALL_STATUS_T status;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
+ (void *)&s_dec_op);
+ if (0 == s_dec_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+ }
+}
+
+void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
+ UNUSED(portIndex);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+
+ /* If input EOS is seen and decoder is not in flush mode,
+ * set the decoder in flush mode.
+ * There can be a case where EOS is sent along with last picture data
+ * In that case, only after decoding that input data, decoder has to be
+ * put in flush. This case is handled here */
+
+ if (mReceivedEOS && !mIsInFlush) {
+ setFlushMode();
+ }
+
+ while (!outQueue.empty()) {
+ BufferInfo *inInfo;
+ OMX_BUFFERHEADERTYPE *inHeader;
+
+ BufferInfo *outInfo;
+ OMX_BUFFERHEADERTYPE *outHeader;
+ size_t timeStampIx;
+
+ inInfo = NULL;
+ inHeader = NULL;
+
+ if (!mIsInFlush) {
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ } else {
+ break;
+ }
+ }
+
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = 0;
+ outHeader->nOffset = 0;
+
+ if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
+ ALOGD("EOS seen on input");
+ mReceivedEOS = true;
+ if (inHeader->nFilledLen == 0) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ setFlushMode();
+ }
+ }
+
+ // When there is an init required and the decoder is not in flush mode,
+ // update output port's definition and reinitialize decoder.
+ if (mInitNeeded && !mIsInFlush) {
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+ return;
+ }
+
+ /* Get a free slot in timestamp array to hold input timestamp */
+ {
+ size_t i;
+ timeStampIx = 0;
+ for (i = 0; i < MAX_TIME_STAMPS; i++) {
+ if (!mTimeStampsValid[i]) {
+ timeStampIx = i;
+ break;
+ }
+ }
+ if (inHeader != NULL) {
+ mTimeStampsValid[timeStampIx] = true;
+ mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
+ }
+ }
+
+ {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ WORD32 timeDelay, timeTaken;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ IV_API_CALL_STATUS_T status;
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ // FIXME: Compare |status| to IHEVCD_UNSUPPORTED_DIMENSIONS, which is not one of the
+ // IV_API_CALL_STATUS_T, seems be wrong. But this is what the decoder returns right now.
+ // The decoder should be fixed so that |u4_error_code| instead of |status| returns
+ // IHEVCD_UNSUPPORTED_DIMENSIONS.
+ bool unsupportedDimensions =
+ ((IHEVCD_UNSUPPORTED_DIMENSIONS == (IHEVCD_CXA_ERROR_CODES_T)status)
+ || (IHEVCD_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code));
+ bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_dec_op.u4_num_bytes_consumed);
+ if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
+ mFlushNeeded = true;
+ }
+
+ if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
+ /* If the input did not contain picture data, then ignore
+ * the associated timestamp */
+ mTimeStampsValid[timeStampIx] = false;
+ }
+
+ // This is needed to handle CTS DecoderTest testCodecResetsHEVCWithoutSurface,
+ // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
+ if (unsupportedDimensions && !mFlushNeeded) {
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht);
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+
+ ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ return;
+ }
+
+ // If the decoder is in the changing resolution mode and there is no output present,
+ // that means the switching is done and it's ready to reset the decoder and the plugin.
+ if (mChangingResolution && !s_dec_op.u4_output_present) {
+ mChangingResolution = false;
+ resetDecoder();
+ resetPlugin();
+ continue;
+ }
+
+ if (unsupportedDimensions || resChanged) {
+ mChangingResolution = true;
+ if (mFlushNeeded) {
+ setFlushMode();
+ }
+
+ if (unsupportedDimensions) {
+ mNewWidth = s_dec_op.u4_pic_wd;
+ mNewHeight = s_dec_op.u4_pic_ht;
+ mInitNeeded = true;
+ }
+ continue;
+ }
+
+ if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
+ uint32_t width = s_dec_op.u4_pic_wd;
+ uint32_t height = s_dec_op.u4_pic_ht;
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, width, height);
+
+ if (portWillReset) {
+ resetDecoder();
+ return;
+ }
+ }
+
+ if (s_dec_op.u4_output_present) {
+ outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+
+ outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
+ mTimeStampsValid[s_dec_op.u4_ts] = false;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ } else {
+ /* If in flush mode and no output is returned by the codec,
+ * then come out of flush mode */
+ mIsInFlush = false;
+
+ /* If EOS was recieved on input port and there is no output
+ * from the codec, then signal EOS on output port */
+ if (mReceivedEOS) {
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ resetPlugin();
+ }
+ }
+ }
+
+ // TODO: Handle more than one picture data
+ if (inHeader != NULL) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(const char *name,
+ const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ return new android::SoftHEVC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
new file mode 100644
index 0000000..a91f528
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_HEVC_H_
+
+#define SOFT_HEVC_H_
+
+#include "SoftVideoDecoderOMXComponent.h"
+#include <sys/time.h>
+
+namespace android {
+
+#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ivd_aligned_free(buf) free(buf)
+
+/** Number of entries in the time-stamp array */
+#define MAX_TIME_STAMPS 64
+
+/** Maximum number of cores supported by the codec */
+#define CODEC_MAX_NUM_CORES 4
+
+#define CODEC_MAX_WIDTH 1920
+
+#define CODEC_MAX_HEIGHT 1088
+
+/** Input buffer size */
+#define INPUT_BUF_SIZE (1024 * 1024)
+
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = ((end.tv_sec - start.tv_sec) * 1000000) + \
+ (end.tv_usec - start.tv_usec);
+
+struct SoftHEVC: public SoftVideoDecoderOMXComponent {
+ SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftHEVC();
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
+ virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
+private:
+ // Number of input and output buffers
+ enum {
+ kNumBuffers = 8
+ };
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by the codec
+
+ size_t mNumCores; // Number of cores to be uesd by the codec
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ // Internal buffer to be used to flush out the buffers from decoder
+ uint8_t *mFlushOutBuffer;
+
+ // Status of entries in the timestamp array
+ bool mTimeStampsValid[MAX_TIME_STAMPS];
+
+ // Timestamp array - Since codec does not take 64 bit timestamps,
+ // they are maintained in the plugin
+ OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
+
+ OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format
+ IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format
+
+ bool mIsInFlush; // codec is flush mode
+ bool mReceivedEOS; // EOS is receieved on input port
+ bool mInitNeeded;
+ uint32_t mNewWidth;
+ uint32_t mNewHeight;
+ // The input stream has changed to a different resolution, which is still supported by the
+ // codec. So the codec is switching to decode the new resolution.
+ bool mChangingResolution;
+ bool mFlushNeeded;
+
+ status_t initDecoder();
+ status_t deInitDecoder();
+ status_t setFlushMode();
+ status_t setParams(size_t stride);
+ void logVersion();
+ status_t setNumCores();
+ status_t resetDecoder();
+ status_t resetPlugin();
+ status_t reInitDecoder();
+
+ void setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t timeStampIx);
+
+ DISALLOW_EVIL_CONSTRUCTORS (SoftHEVC);
+};
+
+} // namespace android
+
+#endif // SOFT_HEVC_H_
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
index a3d5779..1d232c6 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
@@ -46,6 +46,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_mpeg4dec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index fb2a430..ede645c 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -21,6 +21,7 @@
#include "SoftMPEG4.h"
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/IOMX.h>
@@ -70,7 +71,7 @@ SoftMPEG4::SoftMPEG4(
mPvTime(0) {
initPorts(
kNumInputBuffers,
- 8192 /* inputBufferSize */,
+ 352 * 288 * 3 / 2 /* minInputBufferSize */,
kNumOutputBuffers,
(mMode == MODE_MPEG4)
? MEDIA_MIMETYPE_VIDEO_MPEG4 : MEDIA_MIMETYPE_VIDEO_H263);
@@ -91,7 +92,7 @@ status_t SoftMPEG4::initDecoder() {
return OK;
}
-void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
+void SoftMPEG4::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
@@ -134,6 +135,12 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
}
uint8_t *bitstream = inHeader->pBuffer + inHeader->nOffset;
+ uint32_t *start_code = (uint32_t *)bitstream;
+ bool volHeader = *start_code == 0xB0010000;
+ if (volHeader) {
+ PVCleanUpVideoDecoder(mHandle);
+ mInitialized = false;
+ }
if (!mInitialized) {
uint8_t *vol_data[1];
@@ -141,7 +148,7 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
vol_data[0] = NULL;
- if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) || volHeader) {
vol_data[0] = bitstream;
vol_size = inHeader->nFilledLen;
}
@@ -150,7 +157,8 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
(mMode == MODE_MPEG4) ? MPEG4_MODE : H263_MODE;
Bool success = PVInitVideoDecoder(
- mHandle, vol_data, &vol_size, 1, mWidth, mHeight, mode);
+ mHandle, vol_data, &vol_size, 1,
+ outputBufferWidth(), outputBufferHeight(), mode);
if (!success) {
ALOGW("PVInitVideoDecoder failed. Unsupported content?");
@@ -169,21 +177,26 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
PVSetPostProcType((VideoDecControls *) mHandle, 0);
+ bool hasFrameData = false;
if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
inInfo->mOwnedByUs = false;
inQueue.erase(inQueue.begin());
inInfo = NULL;
notifyEmptyBufferDone(inHeader);
inHeader = NULL;
+ } else if (volHeader) {
+ hasFrameData = true;
}
mInitialized = true;
- if (mode == MPEG4_MODE && portSettingsChanged()) {
+ if (mode == MPEG4_MODE && handlePortSettingsChange()) {
return;
}
- continue;
+ if (!hasFrameData) {
+ continue;
+ }
}
if (!mFramesConfigured) {
@@ -223,7 +236,9 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
return;
}
- if (portSettingsChanged()) {
+ // H263 doesn't have VOL header, the frame size information is in short header, i.e. the
+ // decoder may detect size change after PVDecodeVideoFrame.
+ if (handlePortSettingsChange()) {
return;
}
@@ -269,7 +284,7 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
}
}
-bool SoftMPEG4::portSettingsChanged() {
+bool SoftMPEG4::handlePortSettingsChange() {
uint32_t disp_width, disp_height;
PVGetVideoDimensions(mHandle, (int32 *)&disp_width, (int32 *)&disp_height);
@@ -282,25 +297,24 @@ bool SoftMPEG4::portSettingsChanged() {
ALOGV("disp_width = %d, disp_height = %d, buf_width = %d, buf_height = %d",
disp_width, disp_height, buf_width, buf_height);
- if (mCropWidth != disp_width
- || mCropHeight != disp_height) {
- mCropLeft = 0;
- mCropTop = 0;
- mCropWidth = disp_width;
- mCropHeight = disp_height;
-
- notify(OMX_EventPortSettingsChanged,
- 1,
- OMX_IndexConfigCommonOutputCrop,
- NULL);
- }
+ CropSettingsMode cropSettingsMode = kCropUnSet;
+ if (disp_width != buf_width || disp_height != buf_height) {
+ cropSettingsMode = kCropSet;
- if (buf_width != mWidth || buf_height != mHeight) {
- mWidth = buf_width;
- mHeight = buf_height;
-
- updatePortDefinitions();
+ if (mCropWidth != disp_width || mCropHeight != disp_height) {
+ mCropLeft = 0;
+ mCropTop = 0;
+ mCropWidth = disp_width;
+ mCropHeight = disp_height;
+ cropSettingsMode = kCropChanged;
+ }
+ }
+ bool portWillReset = false;
+ const bool fakeStride = true;
+ SoftVideoDecoderOMXComponent::handlePortSettingsChange(
+ &portWillReset, buf_width, buf_height, cropSettingsMode, fakeStride);
+ if (portWillReset) {
if (mMode == MODE_H263) {
PVCleanUpVideoDecoder(mHandle);
@@ -309,7 +323,7 @@ bool SoftMPEG4::portSettingsChanged() {
vol_data[0] = NULL;
if (!PVInitVideoDecoder(
- mHandle, vol_data, &vol_size, 1, mWidth, mHeight,
+ mHandle, vol_data, &vol_size, 1, outputBufferWidth(), outputBufferHeight(),
H263_MODE)) {
notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
mSignalledError = true;
@@ -318,13 +332,9 @@ bool SoftMPEG4::portSettingsChanged() {
}
mFramesConfigured = false;
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- return true;
}
- return false;
+ return portWillReset;
}
void SoftMPEG4::onPortFlushCompleted(OMX_U32 portIndex) {
@@ -344,14 +354,14 @@ void SoftMPEG4::onReset() {
}
}
-void SoftMPEG4::updatePortDefinitions() {
- SoftVideoDecoderOMXComponent::updatePortDefinitions();
+void SoftMPEG4::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
+ SoftVideoDecoderOMXComponent::updatePortDefinitions(updateCrop, updateInputSize);
/* We have to align our width and height - this should affect stride! */
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
- def->nBufferSize =
- (((def->format.video.nFrameWidth + 15) & -16)
- * ((def->format.video.nFrameHeight + 15) & -16) * 3) / 2;
+ def->format.video.nStride = align(def->format.video.nStride, 16);
+ def->format.video.nSliceHeight = align(def->format.video.nSliceHeight, 16);
+ def->nBufferSize = (def->format.video.nStride * def->format.video.nSliceHeight * 3) / 2;
}
} // namespace android
@@ -373,5 +383,6 @@ android::SoftOMXComponent *createSoftOMXComponent(
} else {
CHECK(!"Unknown component");
}
+ return NULL;
}
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
index de14aaf..4114e7d 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.h
@@ -66,8 +66,8 @@ private:
status_t initDecoder();
- virtual void updatePortDefinitions();
- bool portSettingsChanged();
+ virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
+ bool handlePortSettingsChange();
DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4);
};
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
index a357ea6..1ac88a1 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/find_min_max.cpp
@@ -138,8 +138,8 @@ void FindMaxMin(
/*----------------------------------------------------------------------------
; Define all local variables
----------------------------------------------------------------------------*/
- register uint i, j;
- register int min, max;
+ uint i, j;
+ int min, max;
/*----------------------------------------------------------------------------
; Function body here
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
index 844bd14..90d7c6b 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/pvdec_api.cpp
@@ -247,10 +247,13 @@ OSCL_EXPORT_REF Bool PVInitVideoDecoder(VideoDecControls *decCtrl, uint8 *volbuf
video->vol[idx]->useReverseVLC = 0;
video->intra_acdcPredDisable = 1;
video->vol[idx]->scalability = 0;
- video->size = (int32)width * height;
- video->displayWidth = video->width = width;
- video->displayHeight = video->height = height;
+ video->displayWidth = width;
+ video->displayHeight = height;
+ video->width = (width + 15) & -16;
+ video->height = (height + 15) & -16;
+ video->size = (int32)video->width * video->height;
+
#ifdef PV_ANNEX_IJKT_SUPPORT
video->modified_quant = 0;
video->advanced_INTRA = 0;
@@ -289,8 +292,10 @@ Bool PVAllocVideoData(VideoDecControls *decCtrl, int width, int height, int nLay
if (video->shortVideoHeader == PV_TRUE)
{
- video->displayWidth = video->width = width;
- video->displayHeight = video->height = height;
+ video->displayWidth = width;
+ video->displayHeight = height;
+ video->width = (width + 15) & -16;
+ video->height = (height + 15) & -16;
video->nMBPerRow =
video->nMBinGOB = video->width / MB_SIZE;
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
index b3c350f..60c79a6 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/vop.cpp
@@ -118,6 +118,10 @@ PV_STATUS DecodeVOLHeader(VideoDecData *video, int layer)
{
/* support SPL0-3 & SSPL0-2 */
if (tmpvar != 0x01 && tmpvar != 0x02 && tmpvar != 0x03 && tmpvar != 0x08 &&
+ /* While not technically supported, try to decode SPL4&SPL5 files as well. */
+ /* We'll fail later if the size is too large. This is to allow playback of */
+ /* some <=CIF files generated by other encoders. */
+ tmpvar != 0x04 && tmpvar != 0x05 &&
tmpvar != 0x10 && tmpvar != 0x11 && tmpvar != 0x12 &&
tmpvar != 0x21 && tmpvar != 0x22 && /* Core Profile Levels */
tmpvar != 0xA1 && tmpvar != 0xA2 && tmpvar != 0xA3 &&
@@ -1426,7 +1430,7 @@ PV_STATUS DecodeShortHeader(VideoDecData *video, Vop *currVop)
video->nBitsForMBID = CalcNumBits((uint)video->nTotalMB - 1); /* otherwise calculate above */
}
size = (int32)video->width * video->height;
- if (video->currVop->predictionType == P_VOP && size > video->videoDecControls->size)
+ if (currVop->predictionType == P_VOP && size > video->videoDecControls->size)
{
status = PV_FAIL;
goto return_point;
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
index 83a2dd2..7117692 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
@@ -25,7 +25,7 @@ LOCAL_MODULE := libstagefright_m4vh263enc
LOCAL_CFLAGS := \
-DBX_RC \
- -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+ -DOSCL_IMPORT_REF= -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_EXPORT_REF=
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
@@ -33,6 +33,8 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -53,7 +55,7 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DBX_RC \
- -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+ -DOSCL_IMPORT_REF= -D"OSCL_UNUSED_ARG(x)=(void)(x)" -DOSCL_EXPORT_REF=
LOCAL_STATIC_LIBRARIES := \
@@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_mpeg4enc
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index e02af90..8240f83 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftMPEG4Encoder"
#include <utils/Log.h>
+#include <utils/misc.h>
#include "mp4enc_api.h"
#include "OMX_Video.h"
@@ -24,6 +25,7 @@
#include <HardwareAPI.h>
#include <MetadataBufferType.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -33,6 +35,8 @@
#include "SoftMPEG4Encoder.h"
+#include <inttypes.h>
+
namespace android {
template<class T>
@@ -44,49 +48,30 @@ static void InitOMXParams(T *params) {
params->nVersion.s.nStep = 0;
}
-inline static void ConvertYUV420SemiPlanarToYUV420Planar(
- uint8_t *inyuv, uint8_t* outyuv,
- int32_t width, int32_t height) {
-
- int32_t outYsize = width * height;
- uint32_t *outy = (uint32_t *) outyuv;
- uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
- uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
- /* Y copying */
- memcpy(outy, inyuv, outYsize);
-
- /* U & V copying */
- uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
- for (int32_t i = height >> 1; i > 0; --i) {
- for (int32_t j = width >> 2; j > 0; --j) {
- uint32_t temp = *inyuv_4++;
- uint32_t tempU = temp & 0xFF;
- tempU = tempU | ((temp >> 8) & 0xFF00);
+static const CodecProfileLevel kMPEG4ProfileLevels[] = {
+ { OMX_VIDEO_MPEG4ProfileCore, OMX_VIDEO_MPEG4Level2 },
+};
- uint32_t tempV = (temp >> 8) & 0xFF;
- tempV = tempV | ((temp >> 16) & 0xFF00);
-
- // Flip U and V
- *outcb++ = tempV;
- *outcr++ = tempU;
- }
- }
-}
+static const CodecProfileLevel kH263ProfileLevels[] = {
+ { OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level45 },
+};
SoftMPEG4Encoder::SoftMPEG4Encoder(
const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const char *mime,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(
+ name, componentRole, codingType,
+ profileLevels, numProfileLevels,
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
- mVideoWidth(176),
- mVideoHeight(144),
- mVideoFrameRate(30),
- mVideoBitRate(192000),
- mVideoColorFormat(OMX_COLOR_FormatYUV420Planar),
- mStoreMetaDataInBuffers(false),
mIDRFrameRefreshIntervalInSec(1),
mNumInputFrames(-1),
mStarted(false),
@@ -96,13 +81,15 @@ SoftMPEG4Encoder::SoftMPEG4Encoder(
mEncParams(new tagvideoEncOptions),
mInputFrameData(NULL) {
- if (!strcmp(name, "OMX.google.h263.encoder")) {
+ if (codingType == OMX_VIDEO_CodingH263) {
mEncodeMode = H263_MODE;
- } else {
- CHECK(!strcmp(name, "OMX.google.mpeg4.encoder"));
}
- initPorts();
+ // 256 * 1024 is a magic number for PV's encoder, not sure why
+ const size_t kOutputBufferSize = 256 * 1024;
+
+ initPorts(kNumBuffers, kNumBuffers, kOutputBufferSize, mime);
+
ALOGI("Construct SoftMPEG4Encoder");
}
@@ -126,9 +113,9 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
return OMX_ErrorUndefined;
}
mEncParams->encMode = mEncodeMode;
- mEncParams->encWidth[0] = mVideoWidth;
- mEncParams->encHeight[0] = mVideoHeight;
- mEncParams->encFrameRate[0] = mVideoFrameRate;
+ mEncParams->encWidth[0] = mWidth;
+ mEncParams->encHeight[0] = mHeight;
+ mEncParams->encFrameRate[0] = mFramerate >> 16; // mFramerate is in Q16 format
mEncParams->rcType = VBR_1;
mEncParams->vbvDelay = 5.0f;
@@ -139,26 +126,26 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
mEncParams->rvlcEnable = PV_OFF;
mEncParams->numLayers = 1;
mEncParams->timeIncRes = 1000;
- mEncParams->tickPerSrc = mEncParams->timeIncRes / mVideoFrameRate;
+ mEncParams->tickPerSrc = ((int64_t)mEncParams->timeIncRes << 16) / mFramerate;
- mEncParams->bitRate[0] = mVideoBitRate;
+ mEncParams->bitRate[0] = mBitrate;
mEncParams->iQuant[0] = 15;
mEncParams->pQuant[0] = 12;
mEncParams->quantType[0] = 0;
mEncParams->noFrameSkipped = PV_OFF;
- if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
// Color conversion is needed.
- CHECK(mInputFrameData == NULL);
+ free(mInputFrameData);
mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
+ (uint8_t *) malloc((mWidth * mHeight * 3 ) >> 1);
CHECK(mInputFrameData != NULL);
}
// PV's MPEG4 encoder requires the video dimension of multiple
- if (mVideoWidth % 16 != 0 || mVideoHeight % 16 != 0) {
+ if (mWidth % 16 != 0 || mHeight % 16 != 0) {
ALOGE("Video frame size %dx%d must be a multiple of 16",
- mVideoWidth, mVideoHeight);
+ mWidth, mHeight);
return OMX_ErrorBadParameter;
}
@@ -169,7 +156,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
mEncParams->intraPeriod = 1; // All I frames
} else {
mEncParams->intraPeriod =
- (mIDRFrameRefreshIntervalInSec * mVideoFrameRate);
+ (mIDRFrameRefreshIntervalInSec * mFramerate) >> 16;
}
mEncParams->numIntraMB = 0;
@@ -214,7 +201,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::releaseEncoder() {
PVCleanUpVideoEncoder(mHandle);
- delete mInputFrameData;
+ free(mInputFrameData);
mInputFrameData = NULL;
delete mEncParams;
@@ -228,81 +215,9 @@ OMX_ERRORTYPE SoftMPEG4Encoder::releaseEncoder() {
return OMX_ErrorNone;
}
-void SoftMPEG4Encoder::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
-
- const size_t kInputBufferSize = (mVideoWidth * mVideoHeight * 3) >> 1;
-
- // 256 * 1024 is a magic number for PV's encoder, not sure why
- const size_t kOutputBufferSize =
- (kInputBufferSize > 256 * 1024)
- ? kInputBufferSize: 256 * 1024;
-
- def.nPortIndex = 0;
- def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kInputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 1;
-
- def.format.video.cMIMEType = const_cast<char *>("video/raw");
-
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
- def.format.video.xFramerate = (mVideoFrameRate << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-
- def.nPortIndex = 1;
- def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kOutputBufferSize;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
-
- def.format.video.cMIMEType =
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
- ? const_cast<char *>(MEDIA_MIMETYPE_VIDEO_MPEG4)
- : const_cast<char *>(MEDIA_MIMETYPE_VIDEO_H263);
-
- def.format.video.eCompressionFormat =
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
- ? OMX_VIDEO_CodingMPEG4
- : OMX_VIDEO_CodingH263;
-
- def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- def.format.video.xFramerate = (0 << 16); // Q16 format
- def.format.video.nBitrate = mVideoBitRate;
- def.format.video.nFrameWidth = mVideoWidth;
- def.format.video.nFrameHeight = mVideoHeight;
- def.format.video.nStride = mVideoWidth;
- def.format.video.nSliceHeight = mVideoHeight;
-
- addPort(def);
-}
-
OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
OMX_INDEXTYPE index, OMX_PTR params) {
switch (index) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -313,41 +228,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
}
bitRate->eControlRate = OMX_Video_ControlRateVariable;
- bitRate->nTargetBitrate = mVideoBitRate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- if (formatParams->nIndex == 0) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- } else if (formatParams->nIndex == 1) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- } else {
- formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
- } else {
- formatParams->eCompressionFormat =
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES)
- ? OMX_VIDEO_CodingMPEG4
- : OMX_VIDEO_CodingH263;
-
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- }
-
+ bitRate->nTargetBitrate = mBitrate;
return OMX_ErrorNone;
}
@@ -396,32 +277,8 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalGetParameter(
return OMX_ErrorNone;
}
- case OMX_IndexParamVideoProfileLevelQuerySupported:
- {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)params;
-
- if (profileLevel->nPortIndex != 1) {
- return OMX_ErrorUndefined;
- }
-
- if (profileLevel->nProfileIndex > 0) {
- return OMX_ErrorNoMore;
- }
-
- if (mEncodeMode == H263_MODE) {
- profileLevel->eProfile = OMX_VIDEO_H263ProfileBaseline;
- profileLevel->eLevel = OMX_VIDEO_H263Level45;
- } else {
- profileLevel->eProfile = OMX_VIDEO_MPEG4ProfileCore;
- profileLevel->eLevel = OMX_VIDEO_MPEG4Level2;
- }
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, params);
}
}
@@ -430,11 +287,6 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamVideoErrorCorrection:
- {
- return OMX_ErrorNotImplemented;
- }
-
case OMX_IndexParamVideoBitrate:
{
OMX_VIDEO_PARAM_BITRATETYPE *bitRate =
@@ -445,101 +297,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
return OMX_ErrorUndefined;
}
- mVideoBitRate = bitRate->nTargetBitrate;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamPortDefinition:
- {
- OMX_PARAM_PORTDEFINITIONTYPE *def =
- (OMX_PARAM_PORTDEFINITIONTYPE *)params;
- if (def->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (def->nPortIndex == 0) {
- if (def->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar &&
- def->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
- return OMX_ErrorUndefined;
- }
- } else {
- if ((mEncodeMode == COMBINE_MODE_WITH_ERR_RES &&
- def->format.video.eCompressionFormat != OMX_VIDEO_CodingMPEG4) ||
- (mEncodeMode == H263_MODE &&
- def->format.video.eCompressionFormat != OMX_VIDEO_CodingH263) ||
- (def->format.video.eColorFormat != OMX_COLOR_FormatUnused)) {
- return OMX_ErrorUndefined;
- }
- }
-
- OMX_ERRORTYPE err = SimpleSoftOMXComponent::internalSetParameter(index, params);
- if (OMX_ErrorNone != err) {
- return err;
- }
-
- if (def->nPortIndex == 0) {
- mVideoWidth = def->format.video.nFrameWidth;
- mVideoHeight = def->format.video.nFrameHeight;
- mVideoFrameRate = def->format.video.xFramerate >> 16;
- mVideoColorFormat = def->format.video.eColorFormat;
- } else {
- mVideoBitRate = def->format.video.nBitrate;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamStandardComponentRole:
- {
- const OMX_PARAM_COMPONENTROLETYPE *roleParams =
- (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
- if (strncmp((const char *)roleParams->cRole,
- (mEncodeMode == H263_MODE)
- ? "video_encoder.h263": "video_encoder.mpeg4",
- OMX_MAX_STRINGNAME_SIZE - 1)) {
- return OMX_ErrorUndefined;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- const OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex > 2) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused ||
- ((formatParams->nIndex == 0 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) ||
- (formatParams->nIndex == 1 &&
- formatParams->eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar) ||
- (formatParams->nIndex == 2 &&
- formatParams->eColorFormat != OMX_COLOR_FormatAndroidOpaque) )) {
- return OMX_ErrorUndefined;
- }
- mVideoColorFormat = formatParams->eColorFormat;
- } else {
- if ((mEncodeMode == H263_MODE &&
- formatParams->eCompressionFormat != OMX_VIDEO_CodingH263) ||
- (mEncodeMode == COMBINE_MODE_WITH_ERR_RES &&
- formatParams->eCompressionFormat != OMX_VIDEO_CodingMPEG4) ||
- formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
- return OMX_ErrorUndefined;
- }
- }
-
+ mBitrate = bitRate->nTargetBitrate;
return OMX_ErrorNone;
}
@@ -590,37 +348,12 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
return OMX_ErrorNone;
}
- case kStoreMetaDataExtensionIndex:
- {
- StoreMetaDataInBuffersParams *storeParams =
- (StoreMetaDataInBuffersParams*)params;
- if (storeParams->nPortIndex != 0) {
- ALOGE("%s: StoreMetadataInBuffersParams.nPortIndex not zero!",
- __FUNCTION__);
- return OMX_ErrorUndefined;
- }
-
- mStoreMetaDataInBuffers = storeParams->bStoreMetaData;
- ALOGV("StoreMetaDataInBuffers set to: %s",
- mStoreMetaDataInBuffers ? " true" : "false");
-
- if (mStoreMetaDataInBuffers) {
- mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
- if (mInputFrameData == NULL) {
- mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
- }
- }
-
- return OMX_ErrorNone;
- }
-
default:
- return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, params);
}
}
-void SoftMPEG4Encoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mSawInputEOS) {
return;
}
@@ -677,55 +410,48 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 portIndex) {
mSawInputEOS = true;
}
- buffer_handle_t srcBuffer; // for MetaDataMode only
if (inHeader->nFilledLen > 0) {
- uint8_t *inputData = NULL;
- if (mStoreMetaDataInBuffers) {
- if (inHeader->nFilledLen != 8) {
- ALOGE("MetaData buffer is wrong size! "
- "(got %lu bytes, expected 8)", inHeader->nFilledLen);
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
- return;
- }
+ const uint8_t *inputData = NULL;
+ if (mInputDataIsMeta) {
inputData =
- extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
- &srcBuffer);
+ extractGraphicBuffer(
+ mInputFrameData, (mWidth * mHeight * 3) >> 1,
+ inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
+ mWidth, mHeight);
if (inputData == NULL) {
ALOGE("Unable to extract gralloc buffer in metadata mode");
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
return;
}
- // TODO: Verify/convert pixel format enum
} else {
- inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ ConvertYUV420SemiPlanarToYUV420Planar(
+ inputData, mInputFrameData, mWidth, mHeight);
+ inputData = mInputFrameData;
+ }
}
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
- ConvertYUV420SemiPlanarToYUV420Planar(
- inputData, mInputFrameData, mVideoWidth, mVideoHeight);
- inputData = mInputFrameData;
- }
CHECK(inputData != NULL);
VideoEncFrameIO vin, vout;
memset(&vin, 0, sizeof(vin));
memset(&vout, 0, sizeof(vout));
- vin.height = ((mVideoHeight + 15) >> 4) << 4;
- vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
+ vin.height = align(mHeight, 16);
+ vin.pitch = align(mWidth, 16);
vin.timestamp = (inHeader->nTimeStamp + 500) / 1000; // in ms
- vin.yChan = inputData;
+ vin.yChan = (uint8_t *)inputData;
vin.uChan = vin.yChan + vin.height * vin.pitch;
vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
- unsigned long modTimeMs = 0;
+ ULong modTimeMs = 0;
int32_t nLayer = 0;
MP4HintTrack hintTrack;
if (!PVEncodeVideoFrame(mHandle, &vin, &vout,
&modTimeMs, outPtr, &dataLength, &nLayer) ||
!PVGetHintTrack(mHandle, &hintTrack)) {
- ALOGE("Failed to encode frame or get hink track at frame %lld",
+ ALOGE("Failed to encode frame or get hink track at frame %" PRId64,
mNumInputFrames);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
@@ -742,7 +468,6 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 portIndex) {
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
- releaseGrallocData(srcBuffer);
notifyEmptyBufferDone(inHeader);
outQueue.erase(outQueue.begin());
@@ -757,51 +482,24 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 portIndex) {
}
}
-OMX_ERRORTYPE SoftMPEG4Encoder::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *(int32_t*)index = kStoreMetaDataExtensionIndex;
- return OMX_ErrorNone;
- }
- return OMX_ErrorUndefined;
-}
-
-uint8_t *SoftMPEG4Encoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
- OMX_U32 type = *(OMX_U32*)data;
- status_t res;
- if (type != kMetadataBufferTypeGrallocSource) {
- ALOGE("Data passed in with metadata mode does not have type "
- "kMetadataBufferTypeGrallocSource (%d), has type %ld instead",
- kMetadataBufferTypeGrallocSource, type);
- return NULL;
- }
- buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
-
- const Rect rect(mVideoWidth, mVideoHeight);
- uint8_t *img;
- res = GraphicBufferMapper::get().lock(imgBuffer,
- GRALLOC_USAGE_HW_VIDEO_ENCODER,
- rect, (void**)&img);
- if (res != OK) {
- ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
- imgBuffer);
- return NULL;
- }
-
- *buffer = imgBuffer;
- return img;
-}
-
-void SoftMPEG4Encoder::releaseGrallocData(buffer_handle_t buffer) {
- if (mStoreMetaDataInBuffers) {
- GraphicBufferMapper::get().unlock(buffer);
- }
-}
-
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
const char *name, const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData, OMX_COMPONENTTYPE **component) {
- return new android::SoftMPEG4Encoder(name, callbacks, appData, component);
+ using namespace android;
+ if (!strcmp(name, "OMX.google.h263.encoder")) {
+ return new android::SoftMPEG4Encoder(
+ name, "video_encoder.h263", OMX_VIDEO_CodingH263, MEDIA_MIMETYPE_VIDEO_H263,
+ kH263ProfileLevels, NELEM(kH263ProfileLevels),
+ callbacks, appData, component);
+ } else if (!strcmp(name, "OMX.google.mpeg4.encoder")) {
+ return new android::SoftMPEG4Encoder(
+ name, "video_encoder.mpeg4", OMX_VIDEO_CodingMPEG4, MEDIA_MIMETYPE_VIDEO_MPEG4,
+ kMPEG4ProfileLevels, NELEM(kMPEG4ProfileLevels),
+ callbacks, appData, component);
+ } else {
+ CHECK(!"Unknown component");
+ }
+ return NULL;
}
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index cc4ea8f..3389c37 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -19,17 +19,22 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/foundation/ABase.h>
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
#include "mp4enc_api.h"
namespace android {
-struct MediaBuffer;
+struct CodecProfileLevel;
-struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {
+struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
SoftMPEG4Encoder(
const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const char *mime,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component);
@@ -43,11 +48,6 @@ struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {
virtual void onQueueFilled(OMX_U32 portIndex);
- // Override SoftOMXComponent methods
-
- virtual OMX_ERRORTYPE getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index);
-
protected:
virtual ~SoftMPEG4Encoder();
@@ -56,10 +56,6 @@ private:
kNumBuffers = 2,
};
- enum {
- kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1
- };
-
// OMX input buffer's timestamp and flags
typedef struct {
int64_t mTimeUs;
@@ -67,12 +63,6 @@ private:
} InputBufferInfo;
MP4EncodingMode mEncodeMode;
- int32_t mVideoWidth;
- int32_t mVideoHeight;
- int32_t mVideoFrameRate;
- int32_t mVideoBitRate;
- int32_t mVideoColorFormat;
- bool mStoreMetaDataInBuffers;
int32_t mIDRFrameRefreshIntervalInSec;
int64_t mNumInputFrames;
@@ -85,14 +75,10 @@ private:
uint8_t *mInputFrameData;
Vector<InputBufferInfo> mInputBufferInfoVec;
- void initPorts();
OMX_ERRORTYPE initEncParams();
OMX_ERRORTYPE initEncoder();
OMX_ERRORTYPE releaseEncoder();
- uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
- void releaseGrallocData(buffer_handle_t buffer);
-
DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4Encoder);
};
diff --git a/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h b/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
index a54fd8b..9451479 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
@@ -29,7 +29,7 @@ typedef int Int;
typedef unsigned short UShort;
typedef short Short;
typedef unsigned int Bool;
-typedef unsigned long ULong;
+typedef uint32_t ULong;
#define PV_CODEC_INIT 0
#define PV_CODEC_STOP 1
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp
index fa4ae23..8d7d9f1 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/dct.cpp
@@ -267,7 +267,7 @@ extern "C"
Void Block4x4DCT_AANwSub(Short *out, UChar *cur, UChar *pred, Int width)
{
Short *dst;
- register Int k0, k1, k2, k3, k4, k5, k6, k7;
+ Int k0, k1, k2, k3, k4, k5, k6, k7;
Int round;
Int k12 = 0x022A02D4;
Int k14 = 0x0188053A;
@@ -473,7 +473,7 @@ extern "C"
Void Block2x2DCT_AANwSub(Short *out, UChar *cur, UChar *pred, Int width)
{
Short *dst;
- register Int k0, k1, k2, k3, k4, k5, k6, k7;
+ Int k0, k1, k2, k3, k4, k5, k6, k7;
Int round;
Int k12 = 0x022A02D4;
Int k14 = 0x018803B2;
@@ -863,7 +863,7 @@ extern "C"
Void Block4x4DCT_AANIntra(Short *out, UChar *cur, UChar *dummy2, Int width)
{
Short *dst;
- register Int k0, k1, k2, k3, k4, k5, k6, k7;
+ Int k0, k1, k2, k3, k4, k5, k6, k7;
Int round;
Int k12 = 0x022A02D4;
Int k14 = 0x0188053A;
@@ -1050,7 +1050,7 @@ extern "C"
Void Block2x2DCT_AANIntra(Short *out, UChar *cur, UChar *dummy2, Int width)
{
Short *dst;
- register Int k0, k1, k2, k3, k4, k5, k6, k7;
+ Int k0, k1, k2, k3, k4, k5, k6, k7;
Int round;
Int k12 = 0x022A02D4;
Int k14 = 0x018803B2;
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp
index 06e8926..9a967c2 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/motion_comp.cpp
@@ -363,7 +363,7 @@ Int GetPredAdvBy0x0(
/* initialize offset to adjust pixel counter */
/* the next row; full-pel resolution */
- tmp = (ULong)prev & 0x3;
+ tmp = (uintptr_t)prev & 0x3;
if (tmp == 0) /* word-aligned */
{
@@ -466,7 +466,7 @@ Int GetPredAdvBy0x1(
/* Branch based on pixel location (half-pel or full-pel) for x and y */
rec -= 12; /* preset */
- tmp = (ULong)prev & 3;
+ tmp = (uintptr_t)prev & 3;
mask = 254;
mask |= (mask << 8);
mask |= (mask << 16); /* 0xFEFEFEFE */
@@ -791,7 +791,7 @@ Int GetPredAdvBy1x0(
/* Branch based on pixel location (half-pel or full-pel) for x and y */
rec -= 12; /* preset */
- tmp = (ULong)prev & 3;
+ tmp = (uintptr_t)prev & 3;
mask = 254;
mask |= (mask << 8);
mask |= (mask << 16); /* 0xFEFEFEFE */
@@ -1140,7 +1140,7 @@ Int GetPredAdvBy1x1(
mask |= (mask << 8);
mask |= (mask << 16); /* 0x3f3f3f3f */
- tmp = (ULong)prev & 3;
+ tmp = (uintptr_t)prev & 3;
rec -= 4; /* preset */
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h b/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
index 0d5a3e8..2d44482 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
@@ -60,7 +60,7 @@ typedef unsigned short UShort;
typedef short Short;
typedef short int SInt;
typedef unsigned int Bool;
-typedef unsigned long ULong;
+typedef uint32_t ULong;
typedef void Void;
#define PV_CODEC_INIT 0
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp b/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp
index 7ea5dc4..2aec815 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/vlc_encode.cpp
@@ -271,7 +271,7 @@ PutCBPY(Int cbpy, Char intra, BitstreamEncVideo *bitstream)
Int ind;
Int length;
- if ((intra == 0))
+ if (intra == 0)
cbpy = 15 - cbpy;
ind = cbpy;
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index 135c715..948ae29 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -48,7 +48,9 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
LOCAL_CFLAGS := \
- -DOSCL_UNUSED_ARG=
+ -D"OSCL_UNUSED_ARG(x)=(void)(x)"
+
+LOCAL_CFLAGS += -Werror
LOCAL_MODULE := libstagefright_mp3dec
@@ -69,6 +71,8 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index 7c382fb..5396022 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -49,6 +49,8 @@ SoftMP3::SoftMP3(
mNumChannels(2),
mSamplingRate(44100),
mSignalledError(false),
+ mSawInputEos(false),
+ mSignalledOutputEos(false),
mOutputPortSettingsChange(NONE) {
initPorts();
initDecoder();
@@ -144,6 +146,23 @@ OMX_ERRORTYPE SoftMP3::internalGetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioMp3:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE *mp3Params =
+ (OMX_AUDIO_PARAM_MP3TYPE *)params;
+
+ if (mp3Params->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ mp3Params->nChannels = mNumChannels;
+ mp3Params->nBitRate = 0 /* unknown */;
+ mp3Params->nSampleRate = mSamplingRate;
+ // other fields are encoder-only
+
+ return OMX_ErrorNone;
+ }
+
default:
return SimpleSoftOMXComponent::internalGetParameter(index, params);
}
@@ -186,7 +205,7 @@ OMX_ERRORTYPE SoftMP3::internalSetParameter(
}
}
-void SoftMP3::onQueueFilled(OMX_U32 portIndex) {
+void SoftMP3::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
@@ -194,48 +213,36 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) {
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
- while (!inQueue.empty() && !outQueue.empty()) {
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+ BufferInfo *inInfo = NULL;
+ OMX_BUFFERHEADERTYPE *inHeader = NULL;
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ }
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ outHeader->nFlags = 0;
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- if (!mIsFirst) {
- // pad the end of the stream with 529 samples, since that many samples
- // were trimmed off the beginning when decoding started
- outHeader->nFilledLen =
- kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t);
-
- memset(outHeader->pBuffer, 0, outHeader->nFilledLen);
- } else {
- // Since we never discarded frames from the start, we won't have
- // to add any padding at the end either.
- outHeader->nFilledLen = 0;
+ if (inHeader) {
+ if (inHeader->nOffset == 0 && inHeader->nFilledLen) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
}
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ mSawInputEos = true;
+ }
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
- return;
- }
+ mConfig->pInputBuffer =
+ inHeader->pBuffer + inHeader->nOffset;
- if (inHeader->nOffset == 0) {
- mAnchorTimeUs = inHeader->nTimeStamp;
- mNumFramesOutput = 0;
+ mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
+ } else {
+ mConfig->pInputBuffer = NULL;
+ mConfig->inputBufferCurrentLength = 0;
}
-
- mConfig->pInputBuffer =
- inHeader->pBuffer + inHeader->nOffset;
-
- mConfig->inputBufferCurrentLength = inHeader->nFilledLen;
mConfig->inputBufferMaxLength = 0;
mConfig->inputBufferUsedLength = 0;
@@ -262,13 +269,28 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) {
mConfig->outputFrameSize = kOutputBufferSize / sizeof(int16_t);
}
- // This is recoverable, just ignore the current frame and
- // play silence instead.
- memset(outHeader->pBuffer,
- 0,
- mConfig->outputFrameSize * sizeof(int16_t));
-
- mConfig->inputBufferUsedLength = inHeader->nFilledLen;
+ if (decoderErr == NO_ENOUGH_MAIN_DATA_ERROR && mSawInputEos) {
+ if (!mIsFirst) {
+ // pad the end of the stream with 529 samples, since that many samples
+ // were trimmed off the beginning when decoding started
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = kPVMP3DecoderDelay * mNumChannels * sizeof(int16_t);
+
+ memset(outHeader->pBuffer, 0, outHeader->nFilledLen);
+ }
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mSignalledOutputEos = true;
+ } else {
+ // This is recoverable, just ignore the current frame and
+ // play silence instead.
+ memset(outHeader->pBuffer,
+ 0,
+ mConfig->outputFrameSize * sizeof(int16_t));
+
+ if (inHeader) {
+ mConfig->inputBufferUsedLength = inHeader->nFilledLen;
+ }
+ }
} else if (mConfig->samplingRate != mSamplingRate
|| mConfig->num_channels != mNumChannels) {
mSamplingRate = mConfig->samplingRate;
@@ -289,7 +311,7 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) {
outHeader->nFilledLen =
mConfig->outputFrameSize * sizeof(int16_t) - outHeader->nOffset;
- } else {
+ } else if (!mSignalledOutputEos) {
outHeader->nOffset = 0;
outHeader->nFilledLen = mConfig->outputFrameSize * sizeof(int16_t);
}
@@ -298,23 +320,24 @@ void SoftMP3::onQueueFilled(OMX_U32 portIndex) {
mAnchorTimeUs
+ (mNumFramesOutput * 1000000ll) / mConfig->samplingRate;
- outHeader->nFlags = 0;
-
- CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength);
+ if (inHeader) {
+ CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength);
- inHeader->nOffset += mConfig->inputBufferUsedLength;
- inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
+ inHeader->nOffset += mConfig->inputBufferUsedLength;
+ inHeader->nFilledLen -= mConfig->inputBufferUsedLength;
- mNumFramesOutput += mConfig->outputFrameSize / mNumChannels;
- if (inHeader->nFilledLen == 0) {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
}
+ mNumFramesOutput += mConfig->outputFrameSize / mNumChannels;
+
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
outInfo = NULL;
@@ -329,6 +352,9 @@ void SoftMP3::onPortFlushCompleted(OMX_U32 portIndex) {
// depend on fragments from the last one decoded.
pvmp3_InitDecoder(mConfig, mDecoderBuf);
mIsFirst = true;
+ mSignalledError = false;
+ mSawInputEos = false;
+ mSignalledOutputEos = false;
}
}
@@ -362,6 +388,8 @@ void SoftMP3::onReset() {
pvmp3_InitDecoder(mConfig, mDecoderBuf);
mIsFirst = true;
mSignalledError = false;
+ mSawInputEos = false;
+ mSignalledOutputEos = false;
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
index 4af91ea..f9e7b53 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.h
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h
@@ -61,6 +61,8 @@ private:
bool mIsFirst;
bool mSignalledError;
+ bool mSawInputEos;
+ bool mSignalledOutputEos;
enum {
NONE,
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_arm.s b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_arm.s
deleted file mode 100644
index 3a6dd4f..0000000
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_dct_9_arm.s
+++ /dev/null
@@ -1,210 +0,0 @@
-; ------------------------------------------------------------------
-; Copyright (C) 1998-2009 PacketVideo
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-; express or implied.
-; See the License for the specific language governing permissions
-; and limitations under the License.
-; -------------------------------------------------------------------
-
-;
-;
-; Filename: pvmp3_dct_9.s
-;
-;------------------------------------------------------------------------------
-; REVISION HISTORY
-;
-;
-; Who: Date: MM/DD/YYYY
-; Description:
-;
-;------------------------------------------------------------------------------
-
- AREA |.drectve|, DRECTVE
-
- DCB "-defaultlib:coredll.lib "
- DCB "-defaultlib:corelibc.lib "
-
- IMPORT pvmp3_mdct_18 ; pvmp3_mdct_18.cpp
-
-;------------------------------------------------------------------------------
-
- AREA |.rdata|, DATA, READONLY
- % 4
-
-
-;------------------------------------------------------------------------------
-
- AREA |.text|, CODE, READONLY
-
-
-;------------------------------------------------------------------------------
-
- EXPORT |pvmp3_dct_9|
-
-|pvmp3_dct_9| PROC
- stmfd sp!,{r4-r10,lr}
- ldr r2, [r0, #0x20]
- ldr r3, [r0]
- ldr r12,[r0, #4]
- add r1,r2,r3
- sub lr,r2,r3
- ldr r3,[r0, #0x1c]
- ldr r4,[r0, #0x18]
- add r2,r3,r12
- ldr r5,[r0,#8]
- sub r3,r3,r12
- add r12,r4,r5
- sub r4,r4,r5
- ldr r5,[r0, #0x14]
- ldr r7,[r0, #0xc]
- ldr r9,[r0, #0x10]
- add r6,r5,r7
- sub r5,r5,r7
- add r7,r1,r12
- add r8,r9,r2
- add r7,r7,r6
- add r10,r7,r8
- rsb r7,r8,r7,asr #1
- str r7,[r0, #0x18]
- rsb r2,r9,r2,asr #1
- str r10,[r0]
- ldr r11,|cos_2pi_9|
- rsb r7,r2,#0
-
- mov r9,r1,lsl #1
- mov r1,r9 ;;;;;; !!!!!!
- mov r8,r7
-
-; vec[4] = fxp_mac32_Q32( vec[4], tmp0<<1, cos_2pi_9);
-
- smlal r1,r8,r11,r9
- ldr r10,|cos_4pi_9|
- ldr r11,|cos_pi_9|
-
-; vec[8] = fxp_mac32_Q32( vec[8], tmp0<<1, cos_4pi_9);
-
- smlal r1,r7,r10,r9
-
-
-
-; vec[2] = fxp_mac32_Q32( vec[2], tmp0<<1, cos_pi_9);
-
- smlal r9,r2,r11,r9
- mov r1,r12,lsl #1
- rsb r9,r10,#0
- ldr r11,|cos_5pi_9|
-
- smlal r12,r2,r9,r1
-
-
-
-; vec[2] = fxp_mac32_Q32( vec[2], tmp2<<1, cos_5pi_9);
-
- ldr r9,|cos_2pi_9|
- mov r12,r1 ;;;;;; !!!!!!
- smlal r12,r8,r11,r1
-
-
-; vec[8] = fxp_mac32_Q32( vec[8], tmp2<<1, cos_2pi_9);
-
- smlal r1,r7,r9,r1
- mov r1,r6,lsl #1
- smlal r12,r7,r11,r1
- and r6,r10,r11,asr #14
- smlal r12,r8,r6,r1
- ldr r10,|cos_11pi_18|
- add r12,r11,r6
- smlal r1,r2,r12,r1
- ldr r9,|cos_8pi_9|
- str r2,[r0,#8]
- mov r1,r5,lsl #1
-
-; vec[8] = fxp_mac32_Q32( vec[8], tmp3<<1, cos_8pi_9);
-
- smull r2,r6,r9,r1
- str r7,[r0,#0x20]
- mov r2,r4,lsl #1
- ldr r7,|cos_13pi_18|
- smlal r12,r6,r10,r2
-
- mov r3,r3,lsl #1
-
-; vec[5] = fxp_mac32_Q32( vec[5], tmp8<<1, cos_13pi_18);
-
- smlal r12,r6,r7,r3
- add r4,r5,r4
- mov r12,lr,lsl #1
- sub lr,r4,lr
- ldr r7,|cos_17pi_18|
- str r8,[r0, #0x10]
- ldr r4,|cos_pi_6|
-
- mov lr,lr,lsl #1
-
-; vec[1] = fxp_mac32_Q32( vec[1], tmp8<<1, cos_17pi_18);
-
- smlal r8,r6,r7,r12
-
-; vec[3] = fxp_mul32_Q32((tmp5 + tmp6 - tmp8)<<1, cos_pi_6);
-
- smull r5,lr,r4,lr
- str r6,[r0, #4]
- str lr,[r0, #0xc]
-
-
-; vec[5] = fxp_mul32_Q32(tmp5<<1, cos_17pi_18);
- smull r5,lr,r7,r1
- rsb r6,r9,#0
-; vec[5] = fxp_mac32_Q32( vec[5], tmp6<<1, cos_7pi_18);
- smlal r5,lr,r6,r2
-; vec[5] = fxp_mac32_Q32( vec[5], tmp7<<1, cos_pi_6);
- smlal r5,lr,r4,r3
-; vec[5] = fxp_mac32_Q32( vec[5], tmp8<<1, cos_13pi_18);
- smlal r5,lr,r10,r12
- str lr,[r0, #0x14]
- rsb lr,r10,#0
-
-; vec[7] = fxp_mul32_Q32(tmp5<<1, cos_5pi_18);
- smull r5,r1,lr,r1
-; vec[7] = fxp_mac32_Q32( vec[7], tmp6<<1, cos_17pi_18);
- smlal r2,r1,r7,r2
-; vec[7] = fxp_mac32_Q32( vec[7], tmp7<<1, cos_pi_6);
- smlal r3,r1,r4,r3
-; vec[7] = fxp_mac32_Q32( vec[7], tmp8<<1, cos_11pi_18);
- smlal r12,r1,r9,r12
- str r1,[r0, #0x1c]
- ldmfd sp!,{r4-r10,pc}
-|cos_2pi_9|
- DCD 0x620dbe80
-|cos_4pi_9|
- DCD 0x163a1a80
-|cos_pi_9|
- DCD 0x7847d900
-|cos_5pi_9|
- DCD 0x87b82700
-|cos_8pi_9|
- DCD 0xd438af00
-|cos_11pi_18|
- DCD 0xadb92280
-|cos_13pi_18|
- DCD 0x91261480
-|cos_17pi_18|
- DCD 0x81f1d200
-|cos_pi_6|
- DCD 0x6ed9eb80
- ENDP
-
-
-
-
-
- END
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_arm.s b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_arm.s
deleted file mode 100644
index 9401d8c..0000000
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_arm.s
+++ /dev/null
@@ -1,369 +0,0 @@
-; ------------------------------------------------------------------
-; Copyright (C) 1998-2009 PacketVideo
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-; express or implied.
-; See the License for the specific language governing permissions
-; and limitations under the License.
-; -------------------------------------------------------------------
-
-;
-;
-; Filename: pvmp3_dct_18.s
-;
-;------------------------------------------------------------------------------
-; REVISION HISTORY
-;
-;
-; Who: Date: MM/DD/YYYY
-; Description:
-;
-;------------------------------------------------------------------------------
-
- EXPORT pvmp3_mdct_18
-
- IMPORT ||Lib$$Request$$armlib|| [WEAK]
- IMPORT ||Lib$$Request$$cpplib|| [WEAK]
- IMPORT pvmp3_dct_9
-
-
-;------------------------------------------------------------------------------
-
- AREA |.text|, CODE, READONLY, ALIGN=2
-
-
-;------------------------------------------------------------------------------
-
-|pvmp3_mdct_18| PROC
- stmfd sp!,{r4-r10,lr}
- mov r7,r2
- ldr r2,table
- mov r6,r1
- add r3,r2,#0x24
- add r12,r3,#0x44
- add r1,r0,#0x44
- mov r5,r0
-
-; for ( i=9; i!=0; i--)
-; {
-
- mov r4,#9
-Loop_1
-
-; tmp = *(pt_vec);
-; tmp1 = *(pt_vec_o);
-
- ldr lr,[r0] ;; tmp == lr
- ldr r8,[r3],#4 ;; tmp1 == r8
-
-; tmp = fxp_mul32_Q32( tmp<<1, *(pt_cos++ ));
-; tmp1 = fxp_mul32_Q27( tmp1, *(pt_cos_x--));
-
- mov lr,lr,lsl #1
- smull r10,lr,r8,lr
- ldr r8,[r12],#-4
- ldr r9,[r1]
- subs r4,r4,#1
- smull r9,r10,r8,r9
- mov r8,r9,lsr #27
- add r8,r8,r10,lsl #5
-
-; *(pt_vec++) = tmp + tmp1 ;
-; *(pt_vec_o--) = fxp_mul32_Q28( (tmp - tmp1), *(pt_cos_split++));
-
- add r9,lr,r8
- sub r8,lr,r8
- ldr lr,[r2],#4
- str r9,[r0],#4
- smull r8,r9,lr,r8
- mov lr,r8,lsr #28
- add lr,lr,r9,lsl #4
- str lr,[r1],#-4
- bne Loop_1
-
-; }
-
- mov r0,r5 ;; r0 = vec
- bl pvmp3_dct_9
- add r0,r5,#0x24 ;; r0 = &vec[9]
- bl pvmp3_dct_9
-
- ldr r0,[r5,#0x20]
- ldr r2,[r5,#0x40]
- str r0,[r5,#0x40]
- ldr r0,[r5,#0x1c]
- ldr r3,[r5,#0x38]
- str r0,[r5,#0x38]
- ldr r1,[r5,#0x18]
- ldr r0,[r5,#0x30]
- str r1,[r5,#0x30]
- ldr r12,[r5,#0x14]
- ldr r1,[r5,#0x28]
- str r12,[r5,#0x28]
- ldr r12,[r5,#0x10]
- str r12,[r5,#0x20]
- ldr r12,[r5,#0xc]
- str r12,[r5,#0x18]
- ldr r12,[r5,#8]
- str r12,[r5,#0x10]
- ldr r12,[r5,#4]
- str r12,[r5,#8]
- ldr r12,[r5,#0x24]
- sub r12,r12,r1
- str r12,[r5,#4]
- ldr r12,[r5,#0x2c]
- sub r1,r12,r1
- str r1,[r5,#0xc]
- sub r1,r12,r0
- str r1,[r5,#0x14]
- ldr r1,[r5,#0x34]
- sub r0,r1,r0
- str r0,[r5,#0x1c]
- sub r0,r1,r3
- str r0,[r5,#0x24]
- ldr r1,[r5,#0x3c]
- sub r3,r1,r3
- sub r1,r1,r2
- str r1,[r5,#0x34]
- str r3,[r5,#0x2c]
- ldr r1,[r5,#0x44]
- sub r1,r1,r2
- str r1,[r5,#0x3c]
- ldr r12,[r5,#0]
-
-Loop_2
- add r1,r5,r4,lsl #2
- ldr r2,[r1,#0x28]
- ldr r3,[r6,r4,lsl #2]
- add r0,r0,r2
- str r0,[r1,#0x28]
- ldr lr,[r7,r4,lsl #2]
- ldr r1,[r1,#4]
- smlal r0,r3,lr,r0
- mov r0,r2
- add r2,r12,r1
- rsb r2,r2,#0
- str r3,[r5,r4,lsl #2]
- str r2,[r6,r4,lsl #2]
- add r4,r4,#1
- cmp r4,#6
- mov r12,r1
-
- blt Loop_2
-
- ldr r1,[r5,#0x40]
- ldr r2,[r6,#0x18]
- add r3,r0,r1
- str r3,[r5,#0x40]
- ldr lr,[r7,r4,lsl #2]
- mov r3,r3,lsl #1
- ldr r0,[r5,#0x1c]
- smlal r3,r2,lr,r3
- add r3,r12,r0
- str r2,[r5,#0x18]
- ldr r2,[r6,#0x1c]
- rsb r3,r3,#0
- str r3,[r6,#0x18]
- ldr r3,[r5,#0x20]
- add r0,r3,r0
- rsb r0,r0,#0
- str r0,[r6,#0x1c]
- ldr r3,[r5,#0x44]
- ldr r0,[r6,#0x20]
- add r3,r3,r1
- mov r1,r2
- ldr r10,[r7,#0x1c]
- mov r2,r3,lsl #1
- smlal r12,r1,r10,r2
- str r1,[r5,#0x1c]
- ldr r1,[r5,#0x20]
- ldr r3,[r5,#0x24]
- add r1,r1,r3
- rsb r1,r1,#0
- str r1,[r6,#0x20]
- ldr r1,[r5,#0x44]
- ldr r3,[r7,#0x20]
- mov r1,r1,lsl #1
- smlal r12,r0,r3,r1
- ldr lr,[r7,#0x24]
- ldr r3,[r6,#0x24]
- str r0,[r5,#0x20]
- smlal r1,r3,lr,r1
- ldr r0,[r6,#0x40]
- ldr r12,[r6,#0x44]
- str r3,[r5,#0x24]
- ldr r1,[r5,#0x28]
- ldr r3,[r7,#0x44]
- mov r1,r1,lsl #1
- smlal r1,r12,r3,r1
- ldr r1,[r5,#0x40]
- str r12,[r5,#0x44]
- rsb r8,r1,#0
- str r8,[r5,#0x28]
- ldr r1,[r5,#0x2c]
- ldr r3,[r7,#0x40]
- mov r1,r1,lsl #1
- smlal r1,r0,r3,r1
- str r0,[r5,#0x40]
- ldr r0,[r5,#0x3c]
- ldr r1,[r6,#0x38]
- ldr r3,[r6,#0x3c]
- rsb r9,r0,#0
- str r9,[r5,#0x2c]
- ldr r0,[r5,#0x30]
- ldr r12,[r7,#0x3c]
- mov r0,r0,lsl #1
- smlal r0,r3,r12,r0
- str r3,[r5,#0x3c]
- ldr r0,[r5,#0x38]
- rsb r0,r0,#0
- str r0,[r5,#0x30]
- ldr r3,[r5,#0x34]
- ldr r12,[r7,#0x38]
- mov r3,r3,lsl #1
- smlal r3,r1,r12,r3
- mov r0,r0,lsl #1
- str r1,[r5,#0x38]
- ldr r4,[r7,#0x34]
- ldr r1,[r6,#0x34]
- ldr r3,[r6,#0x30]
- smlal r0,r1,r4,r0
- ldr r12,[r6,#0x2c]
- ldr lr,[r6,#0x28]
- str r1,[r5,#0x34]
- ldr r1,[r7,#0x30]
- mov r0,r9,lsl #1
- smlal r0,r3,r1,r0
- mov r0,r8,lsl #1
- ldr r1,[r7,#0x2c]
- str r3,[r5,#0x30]
- smlal r0,r12,r1,r0
- ldr r0,[r7,#0x28]
- str r12,[r5,#0x2c]
- smlal r2,lr,r0,r2
- str lr,[r5,#0x28]
- ldr r1,[r6,#4]
- ldr r12,[r7,#0x48]
- mov r2,r1,lsl #1
- ldr r1,[r6,#0x20]
- ldr r0,[r6]
- mov r1,r1,lsl #1
- smull r4,lr,r12,r1
- ldr r3,[r6,#0x1c]
- str lr,[r6]
- ldr r12,[r7,#0x4c]
- mov r3,r3,lsl #1
- smull r4,lr,r12,r3
- mov r0,r0,lsl #1
- ldr r12,[r7,#0x64]
- str lr,[r6,#4]
- smull r4,lr,r12,r2
- ldr r12,[r7,#0x68]
- str lr,[r6,#0x1c]
- smull r4,lr,r12,r0
- ldr r12,[r7,#0x6c]
- str lr,[r6,#0x20]
- smull lr,r0,r12,r0
- ldr r12,[r7,#0x70]
- str r0,[r6,#0x24]
- smull r0,r2,r12,r2
- ldr r0,[r7,#0x88]
- str r2,[r6,#0x28]
- smull r3,r2,r0,r3
- ldr r0,[r7,#0x8c]
- str r2,[r6,#0x40]
- smull r2,r1,r0,r1
- str r1,[r6,#0x44]
- ldr r0,[r6,#0x18]
- ldr lr,[r7,#0x50]
- mov r1,r0,lsl #1
- ldr r0,[r6,#0x14]
- smull r5,r4,lr,r1
- ldr r12,[r6,#0x10]
- mov r3,r0,lsl #1
- ldr r0,[r6,#0xc]
- mov r12,r12,lsl #1
- mov r2,r0,lsl #1
- ldr r0,[r6,#8]
- str r4,[r6,#8]
- ldr lr,[r7,#0x54]
- mov r0,r0,lsl #1
- smull r5,r4,lr,r3
- ldr lr,[r7,#0x58]
- str r4,[r6,#0xc]
- smull r5,r4,lr,r12
- ldr lr,[r7,#0x5c]
- str r4,[r6,#0x10]
- smull r5,r4,lr,r2
- ldr lr,[r7,#0x60]
- str r4,[r6,#0x14]
- smull r5,r4,lr,r0
- ldr lr,[r7,#0x74]
- str r4,[r6,#0x18]
- smull r4,r0,lr,r0
- ldr lr,[r7,#0x78]
- str r0,[r6,#0x2c]
- smull r0,r2,lr,r2
- ldr r0,[r7,#0x7c]
- str r2,[r6,#0x30]
- smull r12,r2,r0,r12
- ldr r0,[r7,#0x80]
- str r2,[r6,#0x34]
- smull r3,r2,r0,r3
- ldr r0,[r7,#0x84]
- str r2,[r6,#0x38]
- smull r2,r1,r0,r1
- str r1,[r6,#0x3c]
- ldmfd sp!,{r4-r10,pc}
-table
- DCD ||.constdata$1||
- ENDP
-
-;------------------------------------------------------------------------------
-
- AREA |.constdata|, DATA, READONLY, ALIGN=2
-
-;------------------------------------------------------------------------------
-
-||.constdata$1||
-cosTerms_dct18
- DCD 0x0807d2b0
- DCD 0x08483ee0
- DCD 0x08d3b7d0
- DCD 0x09c42570
- DCD 0x0b504f30
- DCD 0x0df29440
- DCD 0x12edfb20
- DCD 0x1ee8dd40
- DCD 0x5bca2a00
-cosTerms_1_ov_cos_phi
- DCD 0x400f9c00
- DCD 0x408d6080
- DCD 0x418dcb80
- DCD 0x431b1a00
- DCD 0x4545ea00
- DCD 0x48270680
- DCD 0x4be25480
- DCD 0x50ab9480
- DCD 0x56ce4d80
- DCD 0x05ebb630
- DCD 0x06921a98
- DCD 0x0771d3a8
- DCD 0x08a9a830
- DCD 0x0a73d750
- DCD 0x0d4d5260
- DCD 0x127b1ca0
- DCD 0x1ea52b40
- DCD 0x5bb3cc80
-
-
-
- END
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_wm.asm b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_wm.asm
deleted file mode 100644
index 5be75d4..0000000
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_mdct_18_wm.asm
+++ /dev/null
@@ -1,366 +0,0 @@
-; ------------------------------------------------------------------
-; Copyright (C) 1998-2009 PacketVideo
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-; express or implied.
-; See the License for the specific language governing permissions
-; and limitations under the License.
-; -------------------------------------------------------------------
-
-;
-;
-; Filename: pvmp3_dct_18.s
-;
-;------------------------------------------------------------------------------
-; REVISION HISTORY
-;
-;
-; Who: Date: MM/DD/YYYY
-; Description:
-;
-;------------------------------------------------------------------------------
-
- EXPORT |pvmp3_mdct_18|
-
- IMPORT pvmp3_dct_9
-
-
-;------------------------------------------------------------------------------
-
- AREA |.text|, CODE, READONLY, ALIGN=2
-
-
-;------------------------------------------------------------------------------
-
-|pvmp3_mdct_18| PROC
- stmfd sp!,{r4-r10,lr}
- mov r7,r2
- ldr r2,table
- mov r6,r1
- add r3,r2,#0x24
- add r12,r3,#0x44
- add r1,r0,#0x44
- mov r5,r0
-
-; for ( i=9; i!=0; i--)
-; {
-
- mov r4,#9
-Loop_1
-
-; tmp = *(pt_vec);
-; tmp1 = *(pt_vec_o);
-
- ldr lr,[r0] ;; tmp == lr
- ldr r8,[r3],#4 ;; tmp1 == r8
-
-; tmp = fxp_mul32_Q32( tmp<<1, *(pt_cos++ ));
-; tmp1 = fxp_mul32_Q27( tmp1, *(pt_cos_x--));
-
- mov lr,lr,lsl #1
- smull r10,lr,r8,lr
- ldr r8,[r12],#-4
- ldr r9,[r1]
- subs r4,r4,#1
- smull r9,r10,r8,r9
- mov r8,r9,lsr #27
- add r8,r8,r10,lsl #5
-
-; *(pt_vec++) = tmp + tmp1 ;
-; *(pt_vec_o--) = fxp_mul32_Q28( (tmp - tmp1), *(pt_cos_split++));
-
- add r9,lr,r8
- sub r8,lr,r8
- ldr lr,[r2],#4
- str r9,[r0],#4
- smull r8,r9,lr,r8
- mov lr,r8,lsr #28
- add lr,lr,r9,lsl #4
- str lr,[r1],#-4
- bne Loop_1
-
-; }
-
- mov r0,r5 ;; r0 = vec
- bl pvmp3_dct_9
- add r0,r5,#0x24 ;; r0 = &vec[9]
- bl pvmp3_dct_9
-
- ldr r0,[r5,#0x20]
- ldr r2,[r5,#0x40]
- str r0,[r5,#0x40]
- ldr r0,[r5,#0x1c]
- ldr r3,[r5,#0x38]
- str r0,[r5,#0x38]
- ldr r1,[r5,#0x18]
- ldr r0,[r5,#0x30]
- str r1,[r5,#0x30]
- ldr r12,[r5,#0x14]
- ldr r1,[r5,#0x28]
- str r12,[r5,#0x28]
- ldr r12,[r5,#0x10]
- str r12,[r5,#0x20]
- ldr r12,[r5,#0xc]
- str r12,[r5,#0x18]
- ldr r12,[r5,#8]
- str r12,[r5,#0x10]
- ldr r12,[r5,#4]
- str r12,[r5,#8]
- ldr r12,[r5,#0x24]
- sub r12,r12,r1
- str r12,[r5,#4]
- ldr r12,[r5,#0x2c]
- sub r1,r12,r1
- str r1,[r5,#0xc]
- sub r1,r12,r0
- str r1,[r5,#0x14]
- ldr r1,[r5,#0x34]
- sub r0,r1,r0
- str r0,[r5,#0x1c]
- sub r0,r1,r3
- str r0,[r5,#0x24]
- ldr r1,[r5,#0x3c]
- sub r3,r1,r3
- sub r1,r1,r2
- str r1,[r5,#0x34]
- str r3,[r5,#0x2c]
- ldr r1,[r5,#0x44]
- sub r1,r1,r2
- str r1,[r5,#0x3c]
- ldr r12,[r5,#0]
-
-Loop_2
- add r1,r5,r4,lsl #2
- ldr r2,[r1,#0x28]
- ldr r3,[r6,r4,lsl #2]
- add r0,r0,r2
- str r0,[r1,#0x28]
- ldr lr,[r7,r4,lsl #2]
- ldr r1,[r1,#4]
- smlal r0,r3,lr,r0
- mov r0,r2
- add r2,r12,r1
- rsb r2,r2,#0
- str r3,[r5,r4,lsl #2]
- str r2,[r6,r4,lsl #2]
- add r4,r4,#1
- cmp r4,#6
- mov r12,r1
-
- blt Loop_2
-
- ldr r1,[r5,#0x40]
- ldr r2,[r6,#0x18]
- add r3,r0,r1
- str r3,[r5,#0x40]
- ldr lr,[r7,r4,lsl #2]
- mov r3,r3,lsl #1
- ldr r0,[r5,#0x1c]
- smlal r3,r2,lr,r3
- add r3,r12,r0
- str r2,[r5,#0x18]
- ldr r2,[r6,#0x1c]
- rsb r3,r3,#0
- str r3,[r6,#0x18]
- ldr r3,[r5,#0x20]
- add r0,r3,r0
- rsb r0,r0,#0
- str r0,[r6,#0x1c]
- ldr r3,[r5,#0x44]
- ldr r0,[r6,#0x20]
- add r3,r3,r1
- mov r1,r2
- ldr r10,[r7,#0x1c]
- mov r2,r3,lsl #1
- smlal r12,r1,r10,r2
- str r1,[r5,#0x1c]
- ldr r1,[r5,#0x20]
- ldr r3,[r5,#0x24]
- add r1,r1,r3
- rsb r1,r1,#0
- str r1,[r6,#0x20]
- ldr r1,[r5,#0x44]
- ldr r3,[r7,#0x20]
- mov r1,r1,lsl #1
- smlal r12,r0,r3,r1
- ldr lr,[r7,#0x24]
- ldr r3,[r6,#0x24]
- str r0,[r5,#0x20]
- smlal r1,r3,lr,r1
- ldr r0,[r6,#0x40]
- ldr r12,[r6,#0x44]
- str r3,[r5,#0x24]
- ldr r1,[r5,#0x28]
- ldr r3,[r7,#0x44]
- mov r1,r1,lsl #1
- smlal r1,r12,r3,r1
- ldr r1,[r5,#0x40]
- str r12,[r5,#0x44]
- rsb r8,r1,#0
- str r8,[r5,#0x28]
- ldr r1,[r5,#0x2c]
- ldr r3,[r7,#0x40]
- mov r1,r1,lsl #1
- smlal r1,r0,r3,r1
- str r0,[r5,#0x40]
- ldr r0,[r5,#0x3c]
- ldr r1,[r6,#0x38]
- ldr r3,[r6,#0x3c]
- rsb r9,r0,#0
- str r9,[r5,#0x2c]
- ldr r0,[r5,#0x30]
- ldr r12,[r7,#0x3c]
- mov r0,r0,lsl #1
- smlal r0,r3,r12,r0
- str r3,[r5,#0x3c]
- ldr r0,[r5,#0x38]
- rsb r0,r0,#0
- str r0,[r5,#0x30]
- ldr r3,[r5,#0x34]
- ldr r12,[r7,#0x38]
- mov r3,r3,lsl #1
- smlal r3,r1,r12,r3
- mov r0,r0,lsl #1
- str r1,[r5,#0x38]
- ldr r4,[r7,#0x34]
- ldr r1,[r6,#0x34]
- ldr r3,[r6,#0x30]
- smlal r0,r1,r4,r0
- ldr r12,[r6,#0x2c]
- ldr lr,[r6,#0x28]
- str r1,[r5,#0x34]
- ldr r1,[r7,#0x30]
- mov r0,r9,lsl #1
- smlal r0,r3,r1,r0
- mov r0,r8,lsl #1
- ldr r1,[r7,#0x2c]
- str r3,[r5,#0x30]
- smlal r0,r12,r1,r0
- ldr r0,[r7,#0x28]
- str r12,[r5,#0x2c]
- smlal r2,lr,r0,r2
- str lr,[r5,#0x28]
- ldr r1,[r6,#4]
- ldr r12,[r7,#0x48]
- mov r2,r1,lsl #1
- ldr r1,[r6,#0x20]
- ldr r0,[r6]
- mov r1,r1,lsl #1
- smull r4,lr,r12,r1
- ldr r3,[r6,#0x1c]
- str lr,[r6]
- ldr r12,[r7,#0x4c]
- mov r3,r3,lsl #1
- smull r4,lr,r12,r3
- mov r0,r0,lsl #1
- ldr r12,[r7,#0x64]
- str lr,[r6,#4]
- smull r4,lr,r12,r2
- ldr r12,[r7,#0x68]
- str lr,[r6,#0x1c]
- smull r4,lr,r12,r0
- ldr r12,[r7,#0x6c]
- str lr,[r6,#0x20]
- smull lr,r0,r12,r0
- ldr r12,[r7,#0x70]
- str r0,[r6,#0x24]
- smull r0,r2,r12,r2
- ldr r0,[r7,#0x88]
- str r2,[r6,#0x28]
- smull r3,r2,r0,r3
- ldr r0,[r7,#0x8c]
- str r2,[r6,#0x40]
- smull r2,r1,r0,r1
- str r1,[r6,#0x44]
- ldr r0,[r6,#0x18]
- ldr lr,[r7,#0x50]
- mov r1,r0,lsl #1
- ldr r0,[r6,#0x14]
- smull r5,r4,lr,r1
- ldr r12,[r6,#0x10]
- mov r3,r0,lsl #1
- ldr r0,[r6,#0xc]
- mov r12,r12,lsl #1
- mov r2,r0,lsl #1
- ldr r0,[r6,#8]
- str r4,[r6,#8]
- ldr lr,[r7,#0x54]
- mov r0,r0,lsl #1
- smull r5,r4,lr,r3
- ldr lr,[r7,#0x58]
- str r4,[r6,#0xc]
- smull r5,r4,lr,r12
- ldr lr,[r7,#0x5c]
- str r4,[r6,#0x10]
- smull r5,r4,lr,r2
- ldr lr,[r7,#0x60]
- str r4,[r6,#0x14]
- smull r5,r4,lr,r0
- ldr lr,[r7,#0x74]
- str r4,[r6,#0x18]
- smull r4,r0,lr,r0
- ldr lr,[r7,#0x78]
- str r0,[r6,#0x2c]
- smull r0,r2,lr,r2
- ldr r0,[r7,#0x7c]
- str r2,[r6,#0x30]
- smull r12,r2,r0,r12
- ldr r0,[r7,#0x80]
- str r2,[r6,#0x34]
- smull r3,r2,r0,r3
- ldr r0,[r7,#0x84]
- str r2,[r6,#0x38]
- smull r2,r1,r0,r1
- str r1,[r6,#0x3c]
- ldmfd sp!,{r4-r10,pc}
-table
- DCD cosTerms_dct18
- ENDP
-
-;------------------------------------------------------------------------------
-
- AREA |.constdata|, DATA, READONLY, ALIGN=2
-
-;------------------------------------------------------------------------------
-
-cosTerms_dct18
- DCD 0x0807d2b0
- DCD 0x08483ee0
- DCD 0x08d3b7d0
- DCD 0x09c42570
- DCD 0x0b504f30
- DCD 0x0df29440
- DCD 0x12edfb20
- DCD 0x1ee8dd40
- DCD 0x5bca2a00
-cosTerms_1_ov_cos_phi
- DCD 0x400f9c00
- DCD 0x408d6080
- DCD 0x418dcb80
- DCD 0x431b1a00
- DCD 0x4545ea00
- DCD 0x48270680
- DCD 0x4be25480
- DCD 0x50ab9480
- DCD 0x56ce4d80
- DCD 0x05ebb630
- DCD 0x06921a98
- DCD 0x0771d3a8
- DCD 0x08a9a830
- DCD 0x0a73d750
- DCD 0x0d4d5260
- DCD 0x127b1ca0
- DCD 0x1ea52b40
- DCD 0x5bb3cc80
-
-
-
- END
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_arm.s b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_arm.s
deleted file mode 100644
index abec599..0000000
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_arm.s
+++ /dev/null
@@ -1,237 +0,0 @@
-; ------------------------------------------------------------------
-; Copyright (C) 1998-2009 PacketVideo
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-; express or implied.
-; See the License for the specific language governing permissions
-; and limitations under the License.
-; -------------------------------------------------------------------
-
-;
-;
-; Filename: pvmp3_polyphase_filter_window.s
-;
-;------------------------------------------------------------------------------
-; REVISION HISTORY
-;
-;
-; Who: Date: MM/DD/YYYY
-; Description:
-;
-;------------------------------------------------------------------------------
-
- EXPORT pvmp3_polyphase_filter_window
-
- IMPORT ||Lib$$Request$$armlib|| [WEAK]
- IMPORT ||Lib$$Request$$cpplib|| [WEAK]
- IMPORT pqmfSynthWin
-
-
-
-;------------------------------------------------------------------------------
-
- AREA |.text|, CODE, READONLY, ALIGN=2
-
-
-;------------------------------------------------------------------------------
-
-|pvmp3_polyphase_filter_window| PROC
-
- stmfd sp!,{r0-r2,r4-r11,lr}
-
- sub sp,sp,#4
- ldr r2,[sp,#0xc]
- ldr r1,PolyPh_filter_coeff
-
- sub r2,r2,#1
- mov r10,#1
- str r2,[sp]
-
-; Accumulators r9, r11::> Initialization
-
-Loop_j
- mov r9, #0x20
- mov r11, #0x20
- mov r4, #0x10
-Loop_i
- add r2,r4,r10
- add r3,r0,r2,lsl #2
- sub r2,r4,r10
- ldr r5,[r3]
- ldr lr,[r1]
- add r12,r0,r2,lsl #2
- ldr r6,[r12,#0x780]
- smlal r2,r9,lr,r5
- smlal r2,r11,lr,r6
- ldr r2,[r1,#4]
- ldr r7,[r12,#0x80]
- smlal r5,r11,r2,r5
- smull r6,r5,r2,r6
- sub r9,r9,r5
- ldr r5,[r1,#8]
- ldr r8,[r3,#0x700]
- add r4,r4,#0x200
- smlal r6,r9,r5,r7
- smull r6,r2,r5,r8
- ldr r5,[r1,#0xc]
- sub r11,r11,r2
- smlal r8,r9,r5,r8
- smlal r7,r11,r5,r7
- ldr r5,[r3,#0x100]
- ldr r2,[r1,#0x10]
- ldr r6,[r12,#0x680]
- smlal lr,r9,r2,r5
- smlal lr,r11,r2,r6
- ldr r2,[r1,#0x14]
- ldr r7,[r12,#0x180]
- smlal r5,r11,r2,r5
- smull r6,r5,r2,r6
- ldr r6,[r1,#0x18]
- ldr r8,[r3,#0x600]
- sub r9,r9,r5
- smlal r5,r9,r6,r7
- smull r2,r5,r6,r8
- ldr r6,[r1,#0x1c]
- sub r11,r11,r5
- smlal r8,r9,r6,r8
- ldr r2,[r1,#0x20]
- ldr r5,[r3,#0x200]
- smlal r7,r11,r6,r7
- ldr r6,[r12,#0x580]
- smlal lr,r9,r2,r5
- smlal lr,r11,r2,r6
- ldr r2,[r1,#0x24]
- ldr r7,[r12,#0x280]
- smlal r5,r11,r2,r5
- smull r6,r5,r2,r6
- ldr r6,[r1,#0x28]
- ldr r8,[r3,#0x500]
- sub r9,r9,r5
- smlal r5,r9,r6,r7
- smull r2,r5,r6,r8
- ldr r6,[r1,#0x2c]
- sub r11,r11,r5
-
- smlal r8,r9,r6,r8
- smlal r7,r11,r6,r7
- ldr r5,[r3,#0x300]
- ldr r8,[r1,#0x30]
- ldr r6,[r12,#0x480]
- smlal r7,r9,r8,r5
- smlal r7,r11,r8,r6
- ldr r8,[r1,#0x34]
- ldr r12,[r12,#0x380]
- smlal r5,r11,r8,r5
- smull r6,r5,r8,r6
- ldr r6,[r1,#0x38]
-
-
- ldr r3,[r3,#0x400]
- sub r9,r9,r5
- smlal r7,r9,r6,r12
- smull r8,r7,r6,r3
- cmp r4,#0x210
- sub r11,r11,r7
-
- ldr r2,[r1,#0x3c]
- add r1,r1,#0x40
- smlal r3,r9,r2,r3
- smlal r12,r11,r2,r12
-
- blt Loop_i
-
- mov r3,r9, asr #6
- mov r4,r3, asr #15
- teq r4,r3, asr #31
- ldr r12,LOW_16BITS
- ldr r2,[sp]
- eorne r3,r12,r3,asr #31
- ldr r4,[sp,#8]
- mov r2,r10,lsl r2
- add r4,r4,r2,lsl #1
- strh r3,[r4]
-
- mov r3,r11,asr #6
- mov r4,r3,asr #15
- teq r4,r3,asr #31
- eorne r3,r12,r3,asr #31
- ldr r12,[sp,#0xc]
- ldr r11,[sp,#8]
- rsb r2,r2,r12,lsl #5
- add r2,r11,r2,lsl #1
- strh r3,[r2]
-
- add r10,r10,#1
- cmp r10,#0x10
- blt Loop_j
-
-; Accumulators r4, r5 Initialization
-
- mov r4,#0x20
- mov r5,#0x20
- mov r3,#0x10
-PolyPh_filter_loop2
- add r2,r0,r3,lsl #2
- ldr r12,[r2]
- ldr r8,[r1]
- ldr r6,[r2,#0x80]
- smlal r12,r4,r8,r12
- ldr r12,[r1,#4]
- ldr r7,[r2,#0x40]
- smlal r6,r4,r12,r6
-
- ldr r12,[r1,#8]
- ldr r6,[r2,#0x180]
- smlal r7,r5,r12,r7
- ldr r12,[r2,#0x100]
- ldr r7,[r1,#0xc]
- ldr r2,[r2,#0x140]
- smlal r12,r4,r7,r12
- ldr r12,[r1,#0x10]
- add r3,r3,#0x80
- smlal r6,r4,r12,r6
- ldr r6,[r1,#0x14]
- cmp r3,#0x210
- smlal r2,r5,r6,r2
- add r1,r1,#0x18
-
- blt PolyPh_filter_loop2
- mov r0,r4,asr #6
- mov r2,r0,asr #15
- teq r2,r0,asr #31
- ldrne r12,LOW_16BITS
- ldr r1,[sp,#8]
- eorne r0,r12,r0,asr #31
- strh r0,[r1,#0]
- mov r0,r5,asr #6
- mov r2,r0,asr #15
- teq r2,r0,asr #31
- ldrne r12,LOW_16BITS
- ldr r2,[sp]
- mov r1,#0x10
- eorne r0,r12,r0,asr #31
- ldr r12,[sp,#8]
- mov r1,r1,lsl r2
- add r1,r12,r1,lsl #1
- strh r0,[r1]
- add sp,sp,#0x10
- ldmfd sp!,{r4-r11,pc}
-
-
-PolyPh_filter_coeff
- DCD pqmfSynthWin
-LOW_16BITS
- DCD 0x00007fff
-
- ENDP
-
-
- END
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
index b74c849..1140ed7 100644
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
+++ b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_gcc.s
@@ -35,6 +35,7 @@
.text
.extern pqmfSynthWin
+.hidden pqmfSynthWin
diff --git a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_wm.asm b/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_wm.asm
deleted file mode 100644
index f957267..0000000
--- a/media/libstagefright/codecs/mp3dec/src/asm/pvmp3_polyphase_filter_window_wm.asm
+++ /dev/null
@@ -1,231 +0,0 @@
-; ------------------------------------------------------------------
-; Copyright (C) 1998-2009 PacketVideo
-;
-; Licensed under the Apache License, Version 2.0 (the "License");
-; you may not use this file except in compliance with the License.
-; You may obtain a copy of the License at
-;
-; http://www.apache.org/licenses/LICENSE-2.0
-;
-; Unless required by applicable law or agreed to in writing, software
-; distributed under the License is distributed on an "AS IS" BASIS,
-; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-; express or implied.
-; See the License for the specific language governing permissions
-; and limitations under the License.
-; -------------------------------------------------------------------
-
-;
-;
-; Filename: pvmp3_polyphase_filter_window.s
-;
-;------------------------------------------------------------------------------
-; REVISION HISTORY
-;
-;
-; Who: Date: MM/DD/YYYY
-; Description:
-;
-;------------------------------------------------------------------------------
-
- CODE32
-
- AREA |.drectve|, DRECTVE
-
- EXPORT |pvmp3_polyphase_filter_window|
- IMPORT |pqmfSynthWin|
-
- AREA |.pdata|, PDATA
-
- AREA |.text|, CODE, ARM
-
-|pvmp3_polyphase_filter_window| PROC
- stmfd sp!,{r0-r2,r4-r11,lr}
-
- sub sp,sp,#4
- ldr r2,[sp,#0xc]
- ldr r1,PolyPh_filter_coeff
-
- sub r2,r2,#1
- mov r10,#1
- str r2,[sp]
-
-; Accumulators r9, r11::> Initialization
-
-Loop_j
- mov r9, #0x20
- mov r11, #0x20
- mov r4, #0x10
-Loop_i
- add r2,r4,r10
- add r3,r0,r2,lsl #2
- sub r2,r4,r10
- ldr r5,[r3]
- ldr lr,[r1]
- add r12,r0,r2,lsl #2
- ldr r6,[r12,#0x780]
- smlal r2,r9,lr,r5
- smlal r2,r11,lr,r6
- ldr r2,[r1,#4]
- ldr r7,[r12,#0x80]
- smlal r5,r11,r2,r5
- smull r6,r5,r2,r6
- sub r9,r9,r5
- ldr r5,[r1,#8]
- ldr r8,[r3,#0x700]
- add r4,r4,#0x200
- smlal r6,r9,r5,r7
- smull r6,r2,r5,r8
- ldr r5,[r1,#0xc]
- sub r11,r11,r2
- smlal r8,r9,r5,r8
- smlal r7,r11,r5,r7
- ldr r5,[r3,#0x100]
- ldr r2,[r1,#0x10]
- ldr r6,[r12,#0x680]
- smlal lr,r9,r2,r5
- smlal lr,r11,r2,r6
- ldr r2,[r1,#0x14]
- ldr r7,[r12,#0x180]
- smlal r5,r11,r2,r5
- smull r6,r5,r2,r6
- ldr r6,[r1,#0x18]
- ldr r8,[r3,#0x600]
- sub r9,r9,r5
- smlal r5,r9,r6,r7
- smull r2,r5,r6,r8
- ldr r6,[r1,#0x1c]
- sub r11,r11,r5
- smlal r8,r9,r6,r8
- ldr r2,[r1,#0x20]
- ldr r5,[r3,#0x200]
- smlal r7,r11,r6,r7
- ldr r6,[r12,#0x580]
- smlal lr,r9,r2,r5
- smlal lr,r11,r2,r6
- ldr r2,[r1,#0x24]
- ldr r7,[r12,#0x280]
- smlal r5,r11,r2,r5
- smull r6,r5,r2,r6
- ldr r6,[r1,#0x28]
- ldr r8,[r3,#0x500]
- sub r9,r9,r5
- smlal r5,r9,r6,r7
- smull r2,r5,r6,r8
- ldr r6,[r1,#0x2c]
- sub r11,r11,r5
-
- smlal r8,r9,r6,r8
- smlal r7,r11,r6,r7
- ldr r5,[r3,#0x300]
- ldr r8,[r1,#0x30]
- ldr r6,[r12,#0x480]
- smlal r7,r9,r8,r5
- smlal r7,r11,r8,r6
- ldr r8,[r1,#0x34]
- ldr r12,[r12,#0x380]
- smlal r5,r11,r8,r5
- smull r6,r5,r8,r6
- ldr r6,[r1,#0x38]
-
-
- ldr r3,[r3,#0x400]
- sub r9,r9,r5
- smlal r7,r9,r6,r12
- smull r8,r7,r6,r3
- cmp r4,#0x210
- sub r11,r11,r7
-
- ldr r2,[r1,#0x3c]
- add r1,r1,#0x40
- smlal r3,r9,r2,r3
- smlal r12,r11,r2,r12
-
- blt Loop_i
-
- mov r3,r9, asr #6
- mov r4,r3, asr #15
- teq r4,r3, asr #31
- ldr r12,LOW_16BITS
- ldr r2,[sp]
- eorne r3,r12,r3,asr #31
- ldr r4,[sp,#8]
- mov r2,r10,lsl r2
- add r4,r4,r2,lsl #1
- strh r3,[r4]
-
- mov r3,r11,asr #6
- mov r4,r3,asr #15
- teq r4,r3,asr #31
- eorne r3,r12,r3,asr #31
- ldr r12,[sp,#0xc]
- ldr r11,[sp,#8]
- rsb r2,r2,r12,lsl #5
- add r2,r11,r2,lsl #1
- strh r3,[r2]
-
- add r10,r10,#1
- cmp r10,#0x10
- blt Loop_j
-
-; Accumulators r4, r5 Initialization
-
- mov r4,#0x20
- mov r5,#0x20
- mov r3,#0x10
-PolyPh_filter_loop2
- add r2,r0,r3,lsl #2
- ldr r12,[r2]
- ldr r8,[r1]
- ldr r6,[r2,#0x80]
- smlal r12,r4,r8,r12
- ldr r12,[r1,#4]
- ldr r7,[r2,#0x40]
- smlal r6,r4,r12,r6
-
- ldr r12,[r1,#8]
- ldr r6,[r2,#0x180]
- smlal r7,r5,r12,r7
- ldr r12,[r2,#0x100]
- ldr r7,[r1,#0xc]
- ldr r2,[r2,#0x140]
- smlal r12,r4,r7,r12
- ldr r12,[r1,#0x10]
- add r3,r3,#0x80
- smlal r6,r4,r12,r6
- ldr r6,[r1,#0x14]
- cmp r3,#0x210
- smlal r2,r5,r6,r2
- add r1,r1,#0x18
-
- blt PolyPh_filter_loop2
- mov r0,r4,asr #6
- mov r2,r0,asr #15
- teq r2,r0,asr #31
- ldrne r12,LOW_16BITS
- ldr r1,[sp,#8]
- eorne r0,r12,r0,asr #31
- strh r0,[r1,#0]
- mov r0,r5,asr #6
- mov r2,r0,asr #15
- teq r2,r0,asr #31
- ldrne r12,LOW_16BITS
- ldr r2,[sp]
- mov r1,#0x10
- eorne r0,r12,r0,asr #31
- ldr r12,[sp,#8]
- mov r1,r1,lsl r2
- add r1,r12,r1,lsl #1
- strh r0,[r1]
- add sp,sp,#0x10
- ldmfd sp!,{r4-r11,pc}
-
-
-PolyPh_filter_coeff
- DCD pqmfSynthWin
-LOW_16BITS
- DCD 0x00007fff
-
- ENDP ; |pvmp3_polyphase_filter_window|
- END
-
diff --git a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp b/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
index ee42dc5..499672b 100644
--- a/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
+++ b/media/libstagefright/codecs/mp3dec/src/pvmp3_mpeg2_get_scale_data.cpp
@@ -139,7 +139,7 @@ void pvmp3_mpeg2_get_scale_data(mp3SideInfo *si,
int16 blocknumber = 0;
granuleInfo *gr_info = &(si->ch[ch].gran[gr]);
- uint32 scalefac_comp, int_scalefac_comp, new_slen[4];
+ uint32 scalefac_comp, int_scalefac_comp, new_slen[4] = { 0,0,0,0 };
scalefac_comp = gr_info->scalefac_compress;
diff --git a/media/libstagefright/codecs/mpeg2dec/Android.mk b/media/libstagefright/codecs/mpeg2dec/Android.mk
new file mode 100644
index 0000000..23b126d
--- /dev/null
+++ b/media/libstagefright/codecs/mpeg2dec/Android.mk
@@ -0,0 +1,27 @@
+ifeq ($(if $(wildcard external/libmpeg2),1,0),1)
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libstagefright_soft_mpeg2dec
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := libmpeg2dec
+LOCAL_SRC_FILES := SoftMPEG2.cpp
+
+LOCAL_C_INCLUDES := $(TOP)/external/libmpeg2/decoder
+LOCAL_C_INCLUDES += $(TOP)/external/libmpeg2/common
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES += libstagefright_omx
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation
+LOCAL_SHARED_LIBRARIES += libutils
+LOCAL_SHARED_LIBRARIES += liblog
+
+LOCAL_LDFLAGS := -Wl,-Bsymbolic
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
new file mode 100644
index 0000000..fb7394b
--- /dev/null
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -0,0 +1,756 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftMPEG2"
+#include <utils/Log.h>
+
+#include "iv_datatypedef.h"
+#include "iv.h"
+#include "ivd.h"
+#include "ithread.h"
+#include "impeg2d.h"
+#include "SoftMPEG2.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <OMX_VideoExt.h>
+
+namespace android {
+
+#define componentName "video_decoder.mpeg2"
+#define codingType OMX_VIDEO_CodingMPEG2
+#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_MPEG2
+
+/** Function and structure definitions to keep code similar for each codec */
+#define ivdec_api_function impeg2d_api_function
+#define ivdext_init_ip_t impeg2d_init_ip_t
+#define ivdext_init_op_t impeg2d_init_op_t
+#define ivdext_fill_mem_rec_ip_t impeg2d_fill_mem_rec_ip_t
+#define ivdext_fill_mem_rec_op_t impeg2d_fill_mem_rec_op_t
+#define ivdext_ctl_set_num_cores_ip_t impeg2d_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t impeg2d_ctl_set_num_cores_op_t
+
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES
+
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelLL },
+ { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelML },
+ { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelH14 },
+ { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelHL },
+
+ { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelLL },
+ { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelML },
+ { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelH14 },
+ { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelHL },
+};
+
+SoftMPEG2::SoftMPEG2(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVideoDecoderOMXComponent(
+ name, componentName, codingType,
+ kProfileLevels, ARRAY_SIZE(kProfileLevels),
+ 320 /* width */, 240 /* height */, callbacks,
+ appData, component),
+ mMemRecords(NULL),
+ mFlushOutBuffer(NULL),
+ mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
+ mIvColorFormat(IV_YUV_420P),
+ mNewWidth(mWidth),
+ mNewHeight(mHeight),
+ mChangingResolution(false) {
+ initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE);
+
+ // If input dump is enabled, then open create an empty file
+ GENERATE_FILE_NAMES();
+ CREATE_DUMP_FILE(mInFile);
+
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftMPEG2::~SoftMPEG2() {
+ CHECK_EQ(deInitDecoder(), (status_t)OK);
+}
+
+
+static size_t getMinTimestampIdx(OMX_S64 *pNTimeStamp, bool *pIsTimeStampValid) {
+ OMX_S64 minTimeStamp = LLONG_MAX;
+ int idx = -1;
+ for (size_t i = 0; i < MAX_TIME_STAMPS; i++) {
+ if (pIsTimeStampValid[i]) {
+ if (pNTimeStamp[i] < minTimeStamp) {
+ minTimeStamp = pNTimeStamp[i];
+ idx = i;
+ }
+ }
+ }
+ return idx;
+}
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+void SoftMPEG2::logVersion() {
+ ivd_ctl_getversioninfo_ip_t s_ctl_ip;
+ ivd_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ s_ctl_ip.pv_version_buffer = au1_buf;
+ s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version number: 0x%x",
+ s_ctl_op.u4_error_code);
+ } else {
+ ALOGV("Ittiam decoder version number: %s",
+ (char *)s_ctl_ip.pv_version_buffer);
+ }
+ return;
+}
+
+status_t SoftMPEG2::setParams(size_t stride) {
+ ivd_ctl_set_config_ip_t s_ctl_ip;
+ ivd_ctl_set_config_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+ s_ctl_ip.u4_disp_wd = (UWORD32)stride;
+ s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+
+ s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+
+ ALOGV("Set the run-time (dynamic) parameters stride = %u", stride);
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the run-time parameters: 0x%x",
+ s_ctl_op.u4_error_code);
+
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftMPEG2::resetPlugin() {
+ mIsInFlush = false;
+ mReceivedEOS = false;
+ memset(mTimeStamps, 0, sizeof(mTimeStamps));
+ memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
+
+ /* Initialize both start and end times */
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+
+ return OK;
+}
+
+status_t SoftMPEG2::resetDecoder() {
+ ivd_ctl_reset_ip_t s_ctl_ip;
+ ivd_ctl_reset_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ /* Set the run-time (dynamic) parameters */
+ setParams(outputBufferWidth());
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ return OK;
+}
+
+status_t SoftMPEG2::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_cores_op;
+ IV_API_CALL_STATUS_T status;
+ s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
+ s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in setting number of cores: 0x%x",
+ s_set_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftMPEG2::setFlushMode() {
+ IV_API_CALL_STATUS_T status;
+ ivd_ctl_flush_ip_t s_video_flush_ip;
+ ivd_ctl_flush_op_t s_video_flush_op;
+
+ s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+
+ /* Set the decoder in Flush mode, subsequent decode() calls will flush */
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
+ s_video_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mIsInFlush = true;
+ return OK;
+}
+
+status_t SoftMPEG2::initDecoder() {
+ IV_API_CALL_STATUS_T status;
+
+ UWORD32 u4_num_reorder_frames;
+ UWORD32 u4_num_ref_frames;
+ UWORD32 u4_share_disp_buf;
+
+ mNumCores = GetCPUCoreCount();
+
+ /* Initialize number of ref and reorder modes (for MPEG2) */
+ u4_num_reorder_frames = 16;
+ u4_num_ref_frames = 16;
+ u4_share_disp_buf = 0;
+
+ uint32_t displayStride = outputBufferWidth();
+ uint32_t displayHeight = outputBufferHeight();
+ uint32_t displaySizeY = displayStride * displayHeight;
+
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
+ s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in getting mem records: 0x%x",
+ s_num_mem_rec_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc(
+ 128, mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (mMemRecords == NULL) {
+ ALOGE("Allocation failure");
+ return NO_MEMORY;
+ }
+
+ memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
+
+ {
+ size_t i;
+ ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
+ ivdext_fill_mem_rec_op_t s_fill_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_ip_t);
+
+ s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_fill_mem_ip.e_output_format = mIvColorFormat;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_op_t);
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
+ }
+
+ status = ivdec_api_function(
+ mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in filling mem records: 0x%x",
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mNumMemRecords =
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
+
+ ps_mem_rec = mMemRecords;
+
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ivd_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for memory record #%zu of size %u",
+ i, ps_mem_rec->u4_mem_size);
+ status = IV_FAIL;
+ return NO_MEMORY;
+ }
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Initialize the decoder */
+ {
+ ivdext_init_ip_t s_init_ip;
+ ivdext_init_op_t s_init_op;
+
+ void *dec_fxns = (void *)ivdec_api_function;
+
+ s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
+ s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
+ s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
+
+ s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
+
+ s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+
+ s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+
+ mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx->pv_fxns = dec_fxns;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in init: 0x%x",
+ s_init_op.s_ivd_init_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* Reset the plugin state */
+ resetPlugin();
+
+ /* Set the run time (dynamic) parameters */
+ setParams(displayStride);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ /* Get codec version */
+ logVersion();
+
+ /* Allocate internal picture buffer */
+ uint32_t bufferSize = displaySizeY * 3 / 2;
+ mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize);
+ return NO_MEMORY;
+ }
+
+ mInitNeeded = false;
+ mFlushNeeded = false;
+ return OK;
+}
+
+status_t SoftMPEG2::deInitDecoder() {
+ size_t i;
+
+ if (mMemRecords) {
+ iv_mem_rec_t *ps_mem_rec;
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++) {
+ if (ps_mem_rec->pv_base) {
+ ivd_aligned_free(ps_mem_rec->pv_base);
+ }
+ ps_mem_rec++;
+ }
+ ivd_aligned_free(mMemRecords);
+ mMemRecords = NULL;
+ }
+
+ if (mFlushOutBuffer) {
+ ivd_aligned_free(mFlushOutBuffer);
+ mFlushOutBuffer = NULL;
+ }
+
+ mInitNeeded = true;
+ mChangingResolution = false;
+
+ return OK;
+}
+
+status_t SoftMPEG2::reInitDecoder() {
+ status_t ret;
+
+ deInitDecoder();
+
+ ret = initDecoder();
+ if (OK != ret) {
+ ALOGE("Create failure");
+ deInitDecoder();
+ return NO_MEMORY;
+ }
+ return OK;
+}
+
+void SoftMPEG2::onReset() {
+ SoftVideoDecoderOMXComponent::onReset();
+
+ resetDecoder();
+ resetPlugin();
+}
+
+OMX_ERRORTYPE SoftMPEG2::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
+ const uint32_t oldWidth = mWidth;
+ const uint32_t oldHeight = mHeight;
+ OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);
+ if (mWidth != oldWidth || mHeight != oldHeight) {
+ reInitDecoder();
+ }
+ return ret;
+}
+
+void SoftMPEG2::setDecodeArgs(
+ ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t timeStampIx) {
+ size_t sizeY = outputBufferWidth() * outputBufferHeight();
+ size_t sizeUV;
+ uint8_t *pBuf;
+
+ ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+
+ /* When in flush and after EOS with zero byte input,
+ * inHeader is set to zero. Hence check for non-null */
+ if (inHeader) {
+ ps_dec_ip->u4_ts = timeStampIx;
+ ps_dec_ip->pv_stream_buffer = inHeader->pBuffer
+ + inHeader->nOffset;
+ ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
+ } else {
+ ps_dec_ip->u4_ts = 0;
+ ps_dec_ip->pv_stream_buffer = NULL;
+ ps_dec_ip->u4_num_Bytes = 0;
+ }
+
+ if (outHeader) {
+ pBuf = outHeader->pBuffer;
+ } else {
+ pBuf = mFlushOutBuffer;
+ }
+
+ sizeUV = sizeY / 4;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+ ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
+ ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
+ ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
+ ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
+ return;
+}
+void SoftMPEG2::onPortFlushCompleted(OMX_U32 portIndex) {
+ /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
+ if (kOutputPortIndex == portIndex) {
+ setFlushMode();
+
+ while (true) {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ IV_API_CALL_STATUS_T status;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ if (0 == s_dec_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+ }
+}
+
+void SoftMPEG2::onQueueFilled(OMX_U32 portIndex) {
+ UNUSED(portIndex);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+
+ /* If input EOS is seen and decoder is not in flush mode,
+ * set the decoder in flush mode.
+ * There can be a case where EOS is sent along with last picture data
+ * In that case, only after decoding that input data, decoder has to be
+ * put in flush. This case is handled here */
+
+ if (mReceivedEOS && !mIsInFlush) {
+ setFlushMode();
+ }
+
+ while (!outQueue.empty()) {
+ BufferInfo *inInfo;
+ OMX_BUFFERHEADERTYPE *inHeader;
+
+ BufferInfo *outInfo;
+ OMX_BUFFERHEADERTYPE *outHeader;
+ size_t timeStampIx;
+
+ inInfo = NULL;
+ inHeader = NULL;
+
+ if (!mIsInFlush) {
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ } else {
+ break;
+ }
+ }
+
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = 0;
+ outHeader->nOffset = 0;
+
+ if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
+ mReceivedEOS = true;
+ if (inHeader->nFilledLen == 0) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ setFlushMode();
+ }
+ }
+
+ // When there is an init required and the decoder is not in flush mode,
+ // update output port's definition and reinitialize decoder.
+ if (mInitNeeded && !mIsInFlush) {
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+ return;
+ }
+
+ /* Get a free slot in timestamp array to hold input timestamp */
+ {
+ size_t i;
+ timeStampIx = 0;
+ for (i = 0; i < MAX_TIME_STAMPS; i++) {
+ if (!mTimeStampsValid[i]) {
+ timeStampIx = i;
+ break;
+ }
+ }
+ if (inHeader != NULL) {
+ mTimeStampsValid[timeStampIx] = true;
+ mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
+ }
+ }
+
+ {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ WORD32 timeDelay, timeTaken;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+ // If input dump is enabled, then write to file
+ DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes);
+
+ if (s_dec_ip.u4_num_Bytes > 0) {
+ char *ptr = (char *)s_dec_ip.pv_stream_buffer;
+ }
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ IV_API_CALL_STATUS_T status;
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+
+ bool unsupportedDimensions = (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code);
+ bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_dec_op.u4_num_bytes_consumed);
+ if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) {
+ mFlushNeeded = true;
+ }
+
+ if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
+ /* If the input did not contain picture data, then ignore
+ * the associated timestamp */
+ mTimeStampsValid[timeStampIx] = false;
+ }
+
+ // This is needed to handle CTS DecoderTest testCodecResetsMPEG2WithoutSurface,
+ // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
+ if (unsupportedDimensions && !mFlushNeeded) {
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht);
+
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
+
+ ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
+ return;
+ }
+
+ // If the decoder is in the changing resolution mode and there is no output present,
+ // that means the switching is done and it's ready to reset the decoder and the plugin.
+ if (mChangingResolution && !s_dec_op.u4_output_present) {
+ mChangingResolution = false;
+ resetDecoder();
+ resetPlugin();
+ continue;
+ }
+
+ if (unsupportedDimensions || resChanged) {
+ mChangingResolution = true;
+ if (mFlushNeeded) {
+ setFlushMode();
+ }
+
+ if (unsupportedDimensions) {
+ mNewWidth = s_dec_op.u4_pic_wd;
+ mNewHeight = s_dec_op.u4_pic_ht;
+ mInitNeeded = true;
+ }
+ continue;
+ }
+
+ if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
+ uint32_t width = s_dec_op.u4_pic_wd;
+ uint32_t height = s_dec_op.u4_pic_ht;
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, width, height);
+
+ if (portWillReset) {
+ resetDecoder();
+ return;
+ }
+ }
+
+ if (s_dec_op.u4_output_present) {
+ size_t timeStampIdx;
+ outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+
+ timeStampIdx = getMinTimestampIdx(mTimeStamps, mTimeStampsValid);
+ outHeader->nTimeStamp = mTimeStamps[timeStampIdx];
+ mTimeStampsValid[timeStampIdx] = false;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ } else {
+ /* If in flush mode and no output is returned by the codec,
+ * then come out of flush mode */
+ mIsInFlush = false;
+
+ /* If EOS was recieved on input port and there is no output
+ * from the codec, then signal EOS on output port */
+ if (mReceivedEOS) {
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ resetPlugin();
+ }
+ }
+ }
+
+ // TODO: Handle more than one picture data
+ if (inHeader != NULL) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ return new android::SoftMPEG2(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
new file mode 100644
index 0000000..f7b1961
--- /dev/null
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_MPEG2_H_
+
+#define SOFT_MPEG2_H_
+
+#include "SoftVideoDecoderOMXComponent.h"
+#include <sys/time.h>
+
+namespace android {
+
+#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ivd_aligned_free(buf) free(buf)
+
+/** Number of entries in the time-stamp array */
+#define MAX_TIME_STAMPS 64
+
+/** Maximum number of cores supported by the codec */
+#define CODEC_MAX_NUM_CORES 4
+
+#define CODEC_MAX_WIDTH 1920
+
+#define CODEC_MAX_HEIGHT 1088
+
+/** Input buffer size */
+#define INPUT_BUF_SIZE (1024 * 1024)
+
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = ((end.tv_sec - start.tv_sec) * 1000000) + \
+ (end.tv_usec - start.tv_usec);
+
+struct SoftMPEG2 : public SoftVideoDecoderOMXComponent {
+ SoftMPEG2(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftMPEG2();
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
+ virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
+private:
+ // Number of input and output buffers
+ enum {
+ kNumBuffers = 8
+ };
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by the codec
+
+ size_t mNumCores; // Number of cores to be uesd by the codec
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ // Internal buffer to be used to flush out the buffers from decoder
+ uint8_t *mFlushOutBuffer;
+
+ // Status of entries in the timestamp array
+ bool mTimeStampsValid[MAX_TIME_STAMPS];
+
+ // Timestamp array - Since codec does not take 64 bit timestamps,
+ // they are maintained in the plugin
+ OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
+
+#ifdef FILE_DUMP_ENABLE
+ char mInFile[200];
+#endif /* FILE_DUMP_ENABLE */
+
+ OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format
+ IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format
+
+ bool mIsInFlush; // codec is flush mode
+ bool mReceivedEOS; // EOS is receieved on input port
+ bool mInitNeeded;
+ uint32_t mNewWidth;
+ uint32_t mNewHeight;
+ // The input stream has changed to a different resolution, which is still supported by the
+ // codec. So the codec is switching to decode the new resolution.
+ bool mChangingResolution;
+ bool mFlushNeeded;
+
+ status_t initDecoder();
+ status_t deInitDecoder();
+ status_t setFlushMode();
+ status_t setParams(size_t stride);
+ void logVersion();
+ status_t setNumCores();
+ status_t resetDecoder();
+ status_t resetPlugin();
+ status_t reInitDecoder();
+
+ void setDecodeArgs(
+ ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t timeStampIx);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG2);
+};
+
+#ifdef FILE_DUMP_ENABLE
+
+#define INPUT_DUMP_PATH "/sdcard/media/mpeg2d_input"
+#define INPUT_DUMP_EXT "m2v"
+
+#define GENERATE_FILE_NAMES() { \
+ GETTIME(&mTimeStart, NULL); \
+ strcpy(mInFile, ""); \
+ sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \
+ mTimeStart.tv_sec, mTimeStart.tv_usec, \
+ INPUT_DUMP_EXT); \
+}
+
+#define CREATE_DUMP_FILE(m_filename) { \
+ FILE *fp = fopen(m_filename, "wb"); \
+ if (fp != NULL) { \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not open file %s", m_filename); \
+ } \
+}
+#define DUMP_TO_FILE(m_filename, m_buf, m_size) \
+{ \
+ FILE *fp = fopen(m_filename, "ab"); \
+ if (fp != NULL && m_buf != NULL) { \
+ int i; \
+ i = fwrite(m_buf, 1, m_size, fp); \
+ ALOGD("fwrite ret %d to write %d", i, m_size); \
+ if (i != (int)m_size) { \
+ ALOGD("Error in fwrite, returned %d", i); \
+ perror("Error in write to file"); \
+ } \
+ fclose(fp); \
+ } else { \
+ ALOGD("Could not write to file %s", m_filename);\
+ } \
+}
+#else /* FILE_DUMP_ENABLE */
+#define INPUT_DUMP_PATH
+#define INPUT_DUMP_EXT
+#define OUTPUT_DUMP_PATH
+#define OUTPUT_DUMP_EXT
+#define GENERATE_FILE_NAMES()
+#define CREATE_DUMP_FILE(m_filename)
+#define DUMP_TO_FILE(m_filename, m_buf, m_size)
+#endif /* FILE_DUMP_ENABLE */
+
+} // namespace android
+
+#endif // SOFT_MPEG2_H_
diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk
index 7f2c46d..93ff64c 100644
--- a/media/libstagefright/codecs/on2/dec/Android.mk
+++ b/media/libstagefright/codecs/on2/dec/Android.mk
@@ -20,4 +20,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_vpxdec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 476e986..8a95643 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -23,9 +23,6 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaDefs.h>
-#include "vpx/vpx_decoder.h"
-#include "vpx/vpx_codec.h"
-#include "vpx/vp8dx.h"
namespace android {
@@ -41,11 +38,15 @@ SoftVPX::SoftVPX(
NULL /* profileLevels */, 0 /* numProfileLevels */,
320 /* width */, 240 /* height */, callbacks, appData, component),
mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
- mCtx(NULL) {
- initPorts(kNumBuffers, 768 * 1024 /* inputBufferSize */,
- kNumBuffers,
- codingType == OMX_VIDEO_CodingVP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9);
-
+ mCtx(NULL),
+ mImg(NULL) {
+ // arbitrary from avc/hevc as vpx does not specify a min compression ratio
+ const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
+ const char *mime = mMode == MODE_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9;
+ const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
+ initPorts(
+ kNumBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* inputBufferSize */,
+ kNumBuffers, mime, kMinCompressionRatio);
CHECK_EQ(initDecoder(), (status_t)OK);
}
@@ -85,7 +86,7 @@ status_t SoftVPX::initDecoder() {
return OK;
}
-void SoftVPX::onQueueFilled(OMX_U32 portIndex) {
+void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
if (mOutputPortSettingsChange != NONE) {
return;
}
@@ -118,35 +119,30 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) {
}
}
- if (vpx_codec_decode(
- (vpx_codec_ctx_t *)mCtx,
- inHeader->pBuffer + inHeader->nOffset,
- inHeader->nFilledLen,
- NULL,
- 0)) {
- ALOGE("on2 decoder failed to decode frame.");
+ if (mImg == NULL) {
+ if (vpx_codec_decode(
+ (vpx_codec_ctx_t *)mCtx,
+ inHeader->pBuffer + inHeader->nOffset,
+ inHeader->nFilledLen,
+ NULL,
+ 0)) {
+ ALOGE("on2 decoder failed to decode frame.");
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ vpx_codec_iter_t iter = NULL;
+ mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
}
- vpx_codec_iter_t iter = NULL;
- vpx_image_t *img = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
-
- if (img != NULL) {
- CHECK_EQ(img->fmt, IMG_FMT_I420);
+ if (mImg != NULL) {
+ CHECK_EQ(mImg->fmt, IMG_FMT_I420);
- uint32_t width = img->d_w;
- uint32_t height = img->d_h;
-
- if (width != mWidth || height != mHeight) {
- mWidth = width;
- mHeight = height;
-
- updatePortDefinitions();
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
+ uint32_t width = mImg->d_w;
+ uint32_t height = mImg->d_h;
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, width, height);
+ if (portWillReset) {
return;
}
@@ -155,31 +151,16 @@ void SoftVPX::onQueueFilled(OMX_U32 portIndex) {
outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0;
outHeader->nTimeStamp = inHeader->nTimeStamp;
- const uint8_t *srcLine = (const uint8_t *)img->planes[PLANE_Y];
uint8_t *dst = outHeader->pBuffer;
- for (size_t i = 0; i < img->d_h; ++i) {
- memcpy(dst, srcLine, img->d_w);
-
- srcLine += img->stride[PLANE_Y];
- dst += img->d_w;
- }
-
- srcLine = (const uint8_t *)img->planes[PLANE_U];
- for (size_t i = 0; i < img->d_h / 2; ++i) {
- memcpy(dst, srcLine, img->d_w / 2);
-
- srcLine += img->stride[PLANE_U];
- dst += img->d_w / 2;
- }
-
- srcLine = (const uint8_t *)img->planes[PLANE_V];
- for (size_t i = 0; i < img->d_h / 2; ++i) {
- memcpy(dst, srcLine, img->d_w / 2);
-
- srcLine += img->stride[PLANE_V];
- dst += img->d_w / 2;
- }
-
+ const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V];
+ size_t srcYStride = mImg->stride[PLANE_Y];
+ size_t srcUStride = mImg->stride[PLANE_U];
+ size_t srcVStride = mImg->stride[PLANE_V];
+ copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
+
+ mImg = NULL;
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
outInfo = NULL;
@@ -211,4 +192,5 @@ android::SoftOMXComponent *createSoftOMXComponent(
} else {
CHECK(!"Unknown component");
}
+ return NULL;
}
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index cd5eb28..8f68693 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -20,6 +20,10 @@
#include "SoftVideoDecoderOMXComponent.h"
+#include "vpx/vpx_decoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vp8dx.h"
+
namespace android {
struct SoftVPX : public SoftVideoDecoderOMXComponent {
@@ -47,6 +51,8 @@ private:
void *mCtx;
+ vpx_image_t *mImg;
+
status_t initDecoder();
DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);
diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk
index 4060a0a..253fa04 100644
--- a/media/libstagefright/codecs/on2/enc/Android.mk
+++ b/media/libstagefright/codecs/on2/enc/Android.mk
@@ -6,16 +6,11 @@ LOCAL_SRC_FILES := \
LOCAL_C_INCLUDES := \
$(TOP)/external/libvpx/libvpx \
- $(TOP)/external/openssl/include \
$(TOP)/external/libvpx/libvpx/vpx_codec \
$(TOP)/external/libvpx/libvpx/vpx_ports \
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax \
-ifeq ($(TARGET_DEVICE), manta)
- LOCAL_CFLAGS += -DSURFACE_IS_BGR32
-endif
-
LOCAL_STATIC_LIBRARIES := \
libvpx
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 5efe022..970acf3 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -19,6 +19,7 @@
#include "SoftVPXEncoder.h"
#include <utils/Log.h>
+#include <utils/misc.h>
#include <media/hardware/HardwareAPI.h>
#include <media/hardware/MetadataBufferType.h>
@@ -27,7 +28,6 @@
namespace android {
-
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -51,181 +51,55 @@ static int GetCPUCoreCount() {
return cpuCoreCount;
}
-
-// This color conversion utility is copied from SoftMPEG4Encoder.cpp
-inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv,
- uint8_t* outyuv,
- int32_t width,
- int32_t height) {
- int32_t outYsize = width * height;
- uint32_t *outy = (uint32_t *) outyuv;
- uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
- uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
- /* Y copying */
- memcpy(outy, inyuv, outYsize);
-
- /* U & V copying */
- uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
- for (int32_t i = height >> 1; i > 0; --i) {
- for (int32_t j = width >> 2; j > 0; --j) {
- uint32_t temp = *inyuv_4++;
- uint32_t tempU = temp & 0xFF;
- tempU = tempU | ((temp >> 8) & 0xFF00);
-
- uint32_t tempV = (temp >> 8) & 0xFF;
- tempV = tempV | ((temp >> 16) & 0xFF00);
-
- // Flip U and V
- *outcb++ = tempV;
- *outcr++ = tempU;
- }
- }
-}
-
-static void ConvertRGB32ToPlanar(
- const uint8_t *src, uint8_t *dstY, int32_t width, int32_t height) {
- CHECK((width & 1) == 0);
- CHECK((height & 1) == 0);
-
- uint8_t *dstU = dstY + width * height;
- uint8_t *dstV = dstU + (width / 2) * (height / 2);
-
- for (int32_t y = 0; y < height; ++y) {
- for (int32_t x = 0; x < width; ++x) {
-#ifdef SURFACE_IS_BGR32
- unsigned blue = src[4 * x];
- unsigned green = src[4 * x + 1];
- unsigned red= src[4 * x + 2];
-#else
- unsigned red= src[4 * x];
- unsigned green = src[4 * x + 1];
- unsigned blue = src[4 * x + 2];
-#endif
-
- unsigned luma =
- ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
-
- dstY[x] = luma;
-
- if ((x & 1) == 0 && (y & 1) == 0) {
- unsigned U =
- ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
-
- unsigned V =
- ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
-
- dstU[x / 2] = U;
- dstV[x / 2] = V;
- }
- }
-
- if ((y & 1) == 0) {
- dstU += width / 2;
- dstV += width / 2;
- }
-
- src += 4 * width;
- dstY += width;
- }
-}
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version0 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version1 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version2 },
+ { OMX_VIDEO_VP8ProfileMain, OMX_VIDEO_VP8Level_Version3 },
+};
SoftVPXEncoder::SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(
+ name, "video_encoder.vp8", OMX_VIDEO_CodingVP8,
+ kProfileLevels, NELEM(kProfileLevels),
+ 176 /* width */, 144 /* height */,
+ callbacks, appData, component),
mCodecContext(NULL),
mCodecConfiguration(NULL),
mCodecInterface(NULL),
- mWidth(176),
- mHeight(144),
- mBitrate(192000), // in bps
mBitrateUpdated(false),
mBitrateControlMode(VPX_VBR), // variable bitrate
- mFrameDurationUs(33333), // Defaults to 30 fps
mDCTPartitions(0),
mErrorResilience(OMX_FALSE),
- mColorFormat(OMX_COLOR_FormatYUV420Planar),
mLevel(OMX_VIDEO_VP8Level_Version0),
+ mKeyFrameInterval(0),
+ mMinQuantizer(0),
+ mMaxQuantizer(0),
+ mTemporalLayers(0),
+ mTemporalPatternType(OMX_VIDEO_VPXTemporalLayerPatternNone),
+ mTemporalPatternLength(0),
+ mTemporalPatternIdx(0),
+ mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
mConversionBuffer(NULL),
- mInputDataIsMeta(false),
- mGrallocModule(NULL),
mKeyFrameRequested(false) {
- initPorts();
-}
+ memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
+ mTemporalLayerBitrateRatio[0] = 100;
+ const size_t kMinOutputBufferSize = 1024 * 1024; // arbitrary
-SoftVPXEncoder::~SoftVPXEncoder() {
- releaseEncoder();
+ initPorts(
+ kNumBuffers, kNumBuffers, kMinOutputBufferSize,
+ MEDIA_MIMETYPE_VIDEO_VP8, 2 /* minCompressionRatio */);
}
-void SoftVPXEncoder::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE inputPort;
- OMX_PARAM_PORTDEFINITIONTYPE outputPort;
-
- InitOMXParams(&inputPort);
- InitOMXParams(&outputPort);
-
- inputPort.nBufferCountMin = kNumBuffers;
- inputPort.nBufferCountActual = inputPort.nBufferCountMin;
- inputPort.bEnabled = OMX_TRUE;
- inputPort.bPopulated = OMX_FALSE;
- inputPort.eDomain = OMX_PortDomainVideo;
- inputPort.bBuffersContiguous = OMX_FALSE;
- inputPort.format.video.pNativeRender = NULL;
- inputPort.format.video.nFrameWidth = mWidth;
- inputPort.format.video.nFrameHeight = mHeight;
- inputPort.format.video.nStride = inputPort.format.video.nFrameWidth;
- inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight;
- inputPort.format.video.nBitrate = 0;
- // frameRate is reciprocal of frameDuration, which is
- // in microseconds. It is also in Q16 format.
- inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16;
- inputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
- inputPort.nPortIndex = kInputPortIndex;
- inputPort.eDir = OMX_DirInput;
- inputPort.nBufferAlignment = kInputBufferAlignment;
- inputPort.format.video.cMIMEType =
- const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
- inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- inputPort.format.video.eColorFormat = mColorFormat;
- inputPort.format.video.pNativeWindow = NULL;
- inputPort.nBufferSize =
- (inputPort.format.video.nStride *
- inputPort.format.video.nSliceHeight * 3) / 2;
-
- addPort(inputPort);
-
- outputPort.nBufferCountMin = kNumBuffers;
- outputPort.nBufferCountActual = outputPort.nBufferCountMin;
- outputPort.bEnabled = OMX_TRUE;
- outputPort.bPopulated = OMX_FALSE;
- outputPort.eDomain = OMX_PortDomainVideo;
- outputPort.bBuffersContiguous = OMX_FALSE;
- outputPort.format.video.pNativeRender = NULL;
- outputPort.format.video.nFrameWidth = mWidth;
- outputPort.format.video.nFrameHeight = mHeight;
- outputPort.format.video.nStride = outputPort.format.video.nFrameWidth;
- outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight;
- outputPort.format.video.nBitrate = mBitrate;
- outputPort.format.video.xFramerate = 0;
- outputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
- outputPort.nPortIndex = kOutputPortIndex;
- outputPort.eDir = OMX_DirOutput;
- outputPort.nBufferAlignment = kOutputBufferAlignment;
- outputPort.format.video.cMIMEType =
- const_cast<char *>(MEDIA_MIMETYPE_VIDEO_VP8);
- outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8;
- outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- outputPort.format.video.pNativeWindow = NULL;
- outputPort.nBufferSize = 256 * 1024; // arbitrary
-
- addPort(outputPort);
+SoftVPXEncoder::~SoftVPXEncoder() {
+ releaseEncoder();
}
-
status_t SoftVPXEncoder::initEncoder() {
vpx_codec_err_t codec_return;
@@ -236,7 +110,9 @@ status_t SoftVPXEncoder::initEncoder() {
if (mCodecInterface == NULL) {
return UNKNOWN_ERROR;
}
-
+ ALOGD("VP8: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer);
codec_return = vpx_codec_enc_config_default(mCodecInterface,
mCodecConfiguration,
0); // Codec specific flags
@@ -277,8 +153,120 @@ status_t SoftVPXEncoder::initEncoder() {
mCodecConfiguration->g_timebase.num = 1;
mCodecConfiguration->g_timebase.den = 1000000;
// rc_target_bitrate is in kbps, mBitrate in bps
- mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
+ mCodecConfiguration->rc_target_bitrate = (mBitrate + 500) / 1000;
mCodecConfiguration->rc_end_usage = mBitrateControlMode;
+ // Disable frame drop - not allowed in MediaCodec now.
+ mCodecConfiguration->rc_dropframe_thresh = 0;
+ if (mBitrateControlMode == VPX_CBR) {
+ // Disable spatial resizing.
+ mCodecConfiguration->rc_resize_allowed = 0;
+ // Single-pass mode.
+ mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
+ // Maximum amount of bits that can be subtracted from the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_undershoot_pct = 100;
+ // Maximum amount of bits that can be added to the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_overshoot_pct = 15;
+ // Initial value of the buffer level in ms.
+ mCodecConfiguration->rc_buf_initial_sz = 500;
+ // Amount of data that the encoder should try to maintain in ms.
+ mCodecConfiguration->rc_buf_optimal_sz = 600;
+ // The amount of data that may be buffered by the decoding
+ // application in ms.
+ mCodecConfiguration->rc_buf_sz = 1000;
+ // Enable error resilience - needed for packet loss.
+ mCodecConfiguration->g_error_resilient = 1;
+ // Disable lagged encoding.
+ mCodecConfiguration->g_lag_in_frames = 0;
+ // Maximum key frame interval - for CBR boost to 3000
+ mCodecConfiguration->kf_max_dist = 3000;
+ // Encoder determines optimal key frame placement automatically.
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+
+ // Frames temporal pattern - for now WebRTC like pattern is only supported.
+ switch (mTemporalLayers) {
+ case 0:
+ {
+ mTemporalPatternLength = 0;
+ break;
+ }
+ case 1:
+ {
+ mCodecConfiguration->ts_number_layers = 1;
+ mCodecConfiguration->ts_rate_decimator[0] = 1;
+ mCodecConfiguration->ts_periodicity = 1;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mTemporalPattern[0] = kTemporalUpdateLastRefAll;
+ mTemporalPatternLength = 1;
+ break;
+ }
+ case 2:
+ {
+ mCodecConfiguration->ts_number_layers = 2;
+ mCodecConfiguration->ts_rate_decimator[0] = 2;
+ mCodecConfiguration->ts_rate_decimator[1] = 1;
+ mCodecConfiguration->ts_periodicity = 2;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 1;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ }
+ case 3:
+ {
+ mCodecConfiguration->ts_number_layers = 3;
+ mCodecConfiguration->ts_rate_decimator[0] = 4;
+ mCodecConfiguration->ts_rate_decimator[1] = 2;
+ mCodecConfiguration->ts_rate_decimator[2] = 1;
+ mCodecConfiguration->ts_periodicity = 4;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 2;
+ mCodecConfiguration->ts_layer_id[2] = 1;
+ mCodecConfiguration->ts_layer_id[3] = 2;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateNone;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateNone;
+ mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ }
+ default:
+ {
+ ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Set bitrate values for each layer
+ for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
+ mCodecConfiguration->ts_target_bitrate[i] =
+ mCodecConfiguration->rc_target_bitrate *
+ mTemporalLayerBitrateRatio[i] / 100;
+ }
+ if (mKeyFrameInterval > 0) {
+ mCodecConfiguration->kf_max_dist = mKeyFrameInterval;
+ mCodecConfiguration->kf_min_dist = mKeyFrameInterval;
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+ if (mMinQuantizer > 0) {
+ mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+ }
+ if (mMaxQuantizer > 0) {
+ mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+ }
codec_return = vpx_codec_enc_init(mCodecContext,
mCodecInterface,
@@ -298,13 +286,39 @@ status_t SoftVPXEncoder::initEncoder() {
return UNKNOWN_ERROR;
}
- if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) {
- if (mConversionBuffer == NULL) {
- mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
- if (mConversionBuffer == NULL) {
- ALOGE("Allocating conversion buffer failed.");
- return UNKNOWN_ERROR;
+ // Extra CBR settings
+ if (mBitrateControlMode == VPX_CBR) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_STATIC_THRESHOLD,
+ 1);
+ if (codec_return == VPX_CODEC_OK) {
+ uint32_t rc_max_intra_target =
+ mCodecConfiguration->rc_buf_optimal_sz * (mFramerate >> 17) / 10;
+ // Don't go below 3 times per frame bandwidth.
+ if (rc_max_intra_target < 300) {
+ rc_max_intra_target = 300;
}
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_MAX_INTRA_BITRATE_PCT,
+ rc_max_intra_target);
+ }
+ if (codec_return == VPX_CODEC_OK) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_CPUUSED,
+ -8);
+ }
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting cbr parameters for vpx encoder.");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
+ free(mConversionBuffer);
+ mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
+ if (mConversionBuffer == NULL) {
+ ALOGE("Allocating conversion buffer failed.");
+ return UNKNOWN_ERROR;
}
}
return OK;
@@ -324,7 +338,7 @@ status_t SoftVPXEncoder::releaseEncoder() {
}
if (mConversionBuffer != NULL) {
- delete mConversionBuffer;
+ free(mConversionBuffer);
mConversionBuffer = NULL;
}
@@ -341,40 +355,6 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
const int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamVideoPortFormat: {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
-
- if (formatParams->nPortIndex == kInputPortIndex) {
- if (formatParams->nIndex >= kNumberOfSupportedColorFormats) {
- return OMX_ErrorNoMore;
- }
-
- // Color formats, in order of preference
- if (formatParams->nIndex == 0) {
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- } else if (formatParams->nIndex == 1) {
- formatParams->eColorFormat =
- OMX_COLOR_FormatYUV420SemiPlanar;
- } else {
- formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
- }
-
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- // Converting from microseconds
- // Also converting to Q16 format
- formatParams->xFramerate = (1000000/mFrameDurationUs) << 16;
- return OMX_ErrorNone;
- } else if (formatParams->nPortIndex == kOutputPortIndex) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8;
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- formatParams->xFramerate = 0;
- return OMX_ErrorNone;
- } else {
- return OMX_ErrorBadPortIndex;
- }
- }
-
case OMX_IndexParamVideoBitrate: {
OMX_VIDEO_PARAM_BITRATETYPE *bitrate =
(OMX_VIDEO_PARAM_BITRATETYPE *)param;
@@ -411,54 +391,26 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
return OMX_ErrorNone;
}
- case OMX_IndexParamVideoProfileLevelQuerySupported: {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
-
- if (profileAndLevel->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- switch (profileAndLevel->nProfileIndex) {
- case 0:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0;
- break;
-
- case 1:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1;
- break;
-
- case 2:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2;
- break;
-
- case 3:
- profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3;
- break;
-
- default:
- return OMX_ErrorNoMore;
- }
-
- profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoProfileLevelCurrent: {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
+ case OMX_IndexParamVideoAndroidVp8Encoder: {
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vp8AndroidParams =
+ (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param;
- if (profileAndLevel->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
+ if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
- profileAndLevel->eLevel = mLevel;
- profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
- return OMX_ErrorNone;
+ vp8AndroidParams->nKeyFrameInterval = mKeyFrameInterval;
+ vp8AndroidParams->eTemporalPattern = mTemporalPatternType;
+ vp8AndroidParams->nTemporalLayerCount = mTemporalLayers;
+ vp8AndroidParams->nMinQuantizer = mMinQuantizer;
+ vp8AndroidParams->nMaxQuantizer = mMaxQuantizer;
+ memcpy(vp8AndroidParams->nTemporalLayerBitrateRatio,
+ mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio));
+ return OMX_ErrorNone;
}
default:
- return SimpleSoftOMXComponent::internalGetParameter(index, param);
+ return SoftVideoEncoderOMXComponent::internalGetParameter(index, param);
}
}
@@ -469,55 +421,20 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
const int32_t indexFull = index;
switch (indexFull) {
- case OMX_IndexParamStandardComponentRole:
- return internalSetRoleParams(
- (const OMX_PARAM_COMPONENTROLETYPE *)param);
-
case OMX_IndexParamVideoBitrate:
return internalSetBitrateParams(
(const OMX_VIDEO_PARAM_BITRATETYPE *)param);
- case OMX_IndexParamPortDefinition:
- {
- OMX_ERRORTYPE err = internalSetPortParams(
- (const OMX_PARAM_PORTDEFINITIONTYPE *)param);
-
- if (err != OMX_ErrorNone) {
- return err;
- }
-
- return SimpleSoftOMXComponent::internalSetParameter(index, param);
- }
-
- case OMX_IndexParamVideoPortFormat:
- return internalSetFormatParams(
- (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param);
-
case OMX_IndexParamVideoVp8:
return internalSetVp8Params(
(const OMX_VIDEO_PARAM_VP8TYPE *)param);
- case OMX_IndexParamVideoProfileLevelCurrent:
- return internalSetProfileLevel(
- (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param);
-
- case OMX_IndexVendorStartUnused:
- {
- // storeMetaDataInBuffers
- const StoreMetaDataInBuffersParams *storeParam =
- (const StoreMetaDataInBuffersParams *)param;
-
- if (storeParam->nPortIndex != kInputPortIndex) {
- return OMX_ErrorBadPortIndex;
- }
-
- mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
-
- return OMX_ErrorNone;
- }
+ case OMX_IndexParamVideoAndroidVp8Encoder:
+ return internalSetAndroidVp8Params(
+ (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
default:
- return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ return SoftVideoEncoderOMXComponent::internalSetParameter(index, param);
}
}
@@ -558,29 +475,6 @@ OMX_ERRORTYPE SoftVPXEncoder::setConfig(
}
}
-OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel(
- const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) {
- if (profileAndLevel->nPortIndex != kOutputPortIndex) {
- return OMX_ErrorUnsupportedIndex;
- }
-
- if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) {
- return OMX_ErrorBadParameter;
- }
-
- if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 ||
- profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 ||
- profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 ||
- profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) {
- mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel;
- } else {
- return OMX_ErrorBadParameter;
- }
-
- return OMX_ErrorNone;
-}
-
-
OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
if (vp8Params->nPortIndex != kOutputPortIndex) {
@@ -610,87 +504,51 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
return OMX_ErrorNone;
}
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams(
- const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) {
- if (format->nPortIndex == kInputPortIndex) {
- if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
- format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
- format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
- mColorFormat = format->eColorFormat;
-
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
- def->format.video.eColorFormat = mColorFormat;
-
- return OMX_ErrorNone;
- } else {
- ALOGE("Unsupported color format %i", format->eColorFormat);
- return OMX_ErrorUnsupportedSetting;
- }
- } else if (format->nPortIndex == kOutputPortIndex) {
- if (format->eCompressionFormat == OMX_VIDEO_CodingVP8) {
- return OMX_ErrorNone;
- } else {
- return OMX_ErrorUnsupportedSetting;
- }
- } else {
- return OMX_ErrorBadPortIndex;
+OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVp8Params(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams) {
+ if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
}
-}
-
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams(
- const OMX_PARAM_COMPONENTROLETYPE* role) {
- const char* roleText = (const char*)role->cRole;
- const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1;
-
- if (strncmp(roleText, "video_encoder.vp8", roleTextMaxSize)) {
- ALOGE("Unsupported component role");
+ if (vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone &&
+ vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vp8AndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vp8AndroidParams->nMinQuantizer > vp8AndroidParams->nMaxQuantizer) {
return OMX_ErrorBadParameter;
}
- return OMX_ErrorNone;
-}
-
-
-OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
- const OMX_PARAM_PORTDEFINITIONTYPE* port) {
- if (port->nPortIndex == kInputPortIndex) {
- mWidth = port->format.video.nFrameWidth;
- mHeight = port->format.video.nFrameHeight;
-
- // xFramerate comes in Q16 format, in frames per second unit
- const uint32_t framerate = port->format.video.xFramerate >> 16;
- // frame duration is in microseconds
- mFrameDurationUs = (1000000/framerate);
-
- if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
- port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
- port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
- mColorFormat = port->format.video.eColorFormat;
- } else {
- return OMX_ErrorUnsupportedSetting;
+ mTemporalPatternType = vp8AndroidParams->eTemporalPattern;
+ if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ mTemporalLayers = vp8AndroidParams->nTemporalLayerCount;
+ } else if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) {
+ mTemporalLayers = 0;
+ }
+ // Check the bitrate distribution between layers is in increasing order
+ if (mTemporalLayers > 1) {
+ for (size_t i = 0; i < mTemporalLayers - 1; i++) {
+ if (vp8AndroidParams->nTemporalLayerBitrateRatio[i + 1] <=
+ vp8AndroidParams->nTemporalLayerBitrateRatio[i]) {
+ ALOGE("Wrong bitrate ratio - should be in increasing order.");
+ return OMX_ErrorBadParameter;
+ }
}
-
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.xFramerate = port->format.video.xFramerate;
- def->format.video.eColorFormat = mColorFormat;
- def = &editPortInfo(kOutputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
-
- return OMX_ErrorNone;
- } else if (port->nPortIndex == kOutputPortIndex) {
- mBitrate = port->format.video.nBitrate;
- return OMX_ErrorNone;
- } else {
- return OMX_ErrorBadPortIndex;
}
+ mKeyFrameInterval = vp8AndroidParams->nKeyFrameInterval;
+ mMinQuantizer = vp8AndroidParams->nMinQuantizer;
+ mMaxQuantizer = vp8AndroidParams->nMaxQuantizer;
+ memcpy(mTemporalLayerBitrateRatio, vp8AndroidParams->nTemporalLayerBitrateRatio,
+ sizeof(mTemporalLayerBitrateRatio));
+ ALOGD("VP8: internalSetAndroidVp8Params. BRMode: %u. TS: %zu. KF: %u."
+ " QP: %u - %u BR0: %u. BR1: %u. BR2: %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0],
+ mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]);
+ return OMX_ErrorNone;
}
-
OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
if (bitrate->nPortIndex != kOutputPortIndex) {
@@ -710,8 +568,76 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
return OMX_ErrorNone;
}
+vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() {
+ vpx_enc_frame_flags_t flags = 0;
+ int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
+ mTemporalPatternIdx++;
+ switch (mTemporalPattern[patternIdx]) {
+ case kTemporalUpdateLast:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ break;
+ case kTemporalUpdateGoldenWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateGolden:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateAltrefWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateAltref:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateNoneNoRefAltref:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ // Deliberately no break here.
+ case kTemporalUpdateNone:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateNoneNoRefGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastAndGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateLastRefAll:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ break;
+ }
+ return flags;
+}
-void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftVPXEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
// Initialize encoder if not already
if (mCodecContext == NULL) {
if (OK != initEncoder()) {
@@ -749,51 +675,33 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
return;
}
- uint8_t *source =
+ const uint8_t *source =
inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
if (mInputDataIsMeta) {
- CHECK_GE(inputBufferHeader->nFilledLen,
- 4 + sizeof(buffer_handle_t));
-
- uint32_t bufferType = *(uint32_t *)source;
- CHECK_EQ(bufferType, kMetadataBufferTypeGrallocSource);
-
- if (mGrallocModule == NULL) {
- CHECK_EQ(0, hw_get_module(
- GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule));
+ source = extractGraphicBuffer(
+ mConversionBuffer, mWidth * mHeight * 3 / 2,
+ source, inputBufferHeader->nFilledLen,
+ mWidth, mHeight);
+ if (source == NULL) {
+ ALOGE("Unable to extract gralloc buffer in metadata mode");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return;
}
-
- const gralloc_module_t *grmodule =
- (const gralloc_module_t *)mGrallocModule;
-
- buffer_handle_t handle = *(buffer_handle_t *)(source + 4);
-
- void *bits;
- CHECK_EQ(0,
- grmodule->lock(
- grmodule, handle,
- GRALLOC_USAGE_SW_READ_OFTEN
- | GRALLOC_USAGE_SW_WRITE_NEVER,
- 0, 0, mWidth, mHeight, &bits));
-
- ConvertRGB32ToPlanar(
- (const uint8_t *)bits, mConversionBuffer, mWidth, mHeight);
-
- source = mConversionBuffer;
-
- CHECK_EQ(0, grmodule->unlock(grmodule, handle));
} else if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
- ConvertSemiPlanarToPlanar(
+ ConvertYUV420SemiPlanarToYUV420Planar(
source, mConversionBuffer, mWidth, mHeight);
source = mConversionBuffer;
}
vpx_image_t raw_frame;
vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight,
- kInputBufferAlignment, source);
+ kInputBufferAlignment, (uint8_t *)source);
vpx_enc_frame_flags_t flags = 0;
+ if (mTemporalPatternLength > 0) {
+ flags = getEncodeFlags();
+ }
if (mKeyFrameRequested) {
flags |= VPX_EFLAG_FORCE_KF;
mKeyFrameRequested = false;
@@ -814,11 +722,18 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
mBitrateUpdated = false;
}
+ uint32_t frameDuration;
+ if (inputBufferHeader->nTimeStamp > mLastTimestamp) {
+ frameDuration = (uint32_t)(inputBufferHeader->nTimeStamp - mLastTimestamp);
+ } else {
+ frameDuration = (uint32_t)(((uint64_t)1000000 << 16) / mFramerate);
+ }
+ mLastTimestamp = inputBufferHeader->nTimeStamp;
codec_return = vpx_codec_encode(
mCodecContext,
&raw_frame,
inputBufferHeader->nTimeStamp, // in timebase units
- mFrameDurationUs, // frame duration in timebase units
+ frameDuration, // frame duration in timebase units
flags, // frame flags
VPX_DL_REALTIME); // encoding deadline
if (codec_return != VPX_CODEC_OK) {
@@ -857,16 +772,6 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
-OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *index = OMX_IndexVendorStartUnused;
- return OMX_ErrorNone;
- }
-
- return SimpleSoftOMXComponent::getExtensionIndex(name, index);
-}
-
} // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 076830f..cd0a0cf 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_VPX_ENCODER_H_
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
#include <OMX_VideoExt.h>
#include <OMX_IndexExt.h>
@@ -59,7 +59,7 @@ namespace android {
// - OMX timestamps are in microseconds, therefore
// encoder timebase is fixed to 1/1000000
-struct SoftVPXEncoder : public SimpleSoftOMXComponent {
+struct SoftVPXEncoder : public SoftVideoEncoderOMXComponent {
SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
@@ -87,10 +87,44 @@ protected:
// encoding of the frame
virtual void onQueueFilled(OMX_U32 portIndex);
- virtual OMX_ERRORTYPE getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index);
-
private:
+ enum TemporalReferences {
+ // For 1 layer case: reference all (last, golden, and alt ref), but only
+ // update last.
+ kTemporalUpdateLastRefAll = 12,
+ // First base layer frame for 3 temporal layers, which updates last and
+ // golden with alt ref dependency.
+ kTemporalUpdateLastAndGoldenRefAltRef = 11,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenRefAltRef = 10,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
+ // Base layer with alt ref dependency.
+ kTemporalUpdateLastRefAltRef = 8,
+ // Highest enhacement layer without dependency on golden with alt ref
+ // dependency.
+ kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
+ // Second layer and last frame in cycle, for 2 layers.
+ kTemporalUpdateNoneNoRefAltref = 6,
+ // Highest enhancement layer.
+ kTemporalUpdateNone = 5,
+ // Second enhancement layer.
+ kTemporalUpdateAltref = 4,
+ // Second enhancement layer without dependency on previous frames in
+ // the second enhancement layer.
+ kTemporalUpdateAltrefWithoutDependency = 3,
+ // First enhancement layer.
+ kTemporalUpdateGolden = 2,
+ // First enhancement layer without dependency on previous frames in
+ // the first enhancement layer.
+ kTemporalUpdateGoldenWithoutDependency = 1,
+ // Base layer.
+ kTemporalUpdateLast = 0,
+ };
+ enum {
+ kMaxTemporalPattern = 8
+ };
+
// number of buffers allocated per port
static const uint32_t kNumBuffers = 4;
@@ -121,25 +155,12 @@ private:
// that specifies algorithm interface (e.g. vp8)
vpx_codec_iface_t* mCodecInterface;
- // Width of the input frames
- int32_t mWidth;
-
- // Height of the input frames
- int32_t mHeight;
-
- // Target bitrate set for the encoder, in bits per second.
- uint32_t mBitrate;
-
// If a request for a change it bitrate has been received.
bool mBitrateUpdated;
// Bitrate control mode, either constant or variable
vpx_rc_mode mBitrateControlMode;
- // Frame duration is the reciprocal of framerate, denoted
- // in microseconds
- uint64_t mFrameDurationUs;
-
// vp8 specific configuration parameter
// that enables token partitioning of
// the stream into substreams
@@ -149,9 +170,6 @@ private:
// is enabled in encoder
OMX_BOOL mErrorResilience;
- // Color format for the input port
- OMX_COLOR_FORMATTYPE mColorFormat;
-
// Encoder profile corresponding to OMX level parameter
//
// The inconsistency in the naming is caused by
@@ -160,21 +178,44 @@ private:
// something else.
OMX_VIDEO_VP8LEVELTYPE mLevel;
+ // Key frame interval in frames
+ uint32_t mKeyFrameInterval;
+
+ // Minimum (best quality) quantizer
+ uint32_t mMinQuantizer;
+
+ // Maximum (worst quality) quantizer
+ uint32_t mMaxQuantizer;
+
+ // Number of coding temporal layers to be used.
+ size_t mTemporalLayers;
+
+ // Temporal layer bitrare ratio in percentage
+ uint32_t mTemporalLayerBitrateRatio[OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS];
+
+ // Temporal pattern type
+ OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE mTemporalPatternType;
+
+ // Temporal pattern length
+ size_t mTemporalPatternLength;
+
+ // Temporal pattern current index
+ size_t mTemporalPatternIdx;
+
+ // Frame type temporal pattern
+ TemporalReferences mTemporalPattern[kMaxTemporalPattern];
+
+ // Last input buffer timestamp
+ OMX_TICKS mLastTimestamp;
+
// Conversion buffer is needed to convert semi
// planar yuv420 to planar format
// It is only allocated if input format is
// indeed YUV420SemiPlanar.
uint8_t* mConversionBuffer;
- bool mInputDataIsMeta;
- const hw_module_t *mGrallocModule;
-
bool mKeyFrameRequested;
- // Initializes input and output OMX ports with sensible
- // default values.
- void initPorts();
-
// Initializes vpx encoder with available settings.
status_t initEncoder();
@@ -185,30 +226,20 @@ private:
// dtor.
status_t releaseEncoder();
- // Handles port changes with respect to color formats
- OMX_ERRORTYPE internalSetFormatParams(
- const OMX_VIDEO_PARAM_PORTFORMATTYPE* format);
-
- // Verifies the component role tried to be set to this OMX component is
- // strictly video_encoder.vp8
- OMX_ERRORTYPE internalSetRoleParams(
- const OMX_PARAM_COMPONENTROLETYPE* role);
+ // Get current encode flags
+ vpx_enc_frame_flags_t getEncodeFlags();
// Updates bitrate to reflect port settings.
OMX_ERRORTYPE internalSetBitrateParams(
const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
- // Handles port definition changes.
- OMX_ERRORTYPE internalSetPortParams(
- const OMX_PARAM_PORTDEFINITIONTYPE* port);
-
// Handles vp8 specific parameters.
OMX_ERRORTYPE internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
- // Updates encoder profile
- OMX_ERRORTYPE internalSetProfileLevel(
- const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel);
+ // Handles Android vp8 specific parameters.
+ OMX_ERRORTYPE internalSetAndroidVp8Params(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams);
DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder);
};
diff --git a/media/libstagefright/codecs/on2/h264dec/Android.mk b/media/libstagefright/codecs/on2/h264dec/Android.mk
index bf03ad9..e63b6b1 100644
--- a/media/libstagefright/codecs/on2/h264dec/Android.mk
+++ b/media/libstagefright/codecs/on2/h264dec/Android.mk
@@ -94,6 +94,8 @@ ifeq ($(TARGET_ARCH),arm)
LOCAL_C_INCLUDES += $(LOCAL_PATH)/./omxdl/arm_neon/api \
$(LOCAL_PATH)/./omxdl/arm_neon/vc/api \
$(LOCAL_PATH)/./omxdl/arm_neon/vc/m4p10/api
+ # h264bsdWriteMacroblock.S does not compile with Clang.
+ LOCAL_CLANG_ASFLAGS_arm += -no-integrated-as
endif
endif
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 7ddb13c..6b8b395 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -58,16 +58,17 @@ SoftAVC::SoftAVC(
320 /* width */, 240 /* height */, callbacks, appData, component),
mHandle(NULL),
mInputBufferCount(0),
- mPictureSize(mWidth * mHeight * 3 / 2),
mFirstPicture(NULL),
mFirstPictureId(-1),
mPicId(0),
mHeadersDecoded(false),
mEOSStatus(INPUT_DATA_AVAILABLE),
mSignalledError(false) {
+ const size_t kMinCompressionRatio = 2;
+ const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
initPorts(
- kNumInputBuffers, 8192 /* inputBufferSize */,
- kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC);
+ kNumInputBuffers, kMaxOutputBufferSize / kMinCompressionRatio /* minInputBufferSize */,
+ kNumOutputBuffers, MEDIA_MIMETYPE_VIDEO_AVC, kMinCompressionRatio);
CHECK_EQ(initDecoder(), (status_t)OK);
}
@@ -98,7 +99,7 @@ status_t SoftAVC::initDecoder() {
return UNKNOWN_ERROR;
}
-void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
+void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
@@ -118,7 +119,7 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
}
H264SwDecRet ret = H264SWDEC_PIC_RDY;
- bool portSettingsChanged = false;
+ bool portWillReset = false;
while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
&& outQueue.size() == kNumOutputBuffers) {
@@ -161,17 +162,14 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
H264SwDecInfo decoderInfo;
CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
- if (handlePortSettingChangeEvent(&decoderInfo)) {
- portSettingsChanged = true;
- }
-
- if (decoderInfo.croppingFlag &&
- handleCropRectEvent(&decoderInfo.cropParams)) {
- portSettingsChanged = true;
- }
+ SoftVideoDecoderOMXComponent::CropSettingsMode cropSettingsMode =
+ handleCropParams(decoderInfo);
+ handlePortSettingsChange(
+ &portWillReset, decoderInfo.picWidth, decoderInfo.picHeight,
+ cropSettingsMode);
}
} else {
- if (portSettingsChanged) {
+ if (portWillReset) {
if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
== H264SWDEC_PIC_RDY) {
@@ -199,8 +197,7 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
inInfo->mOwnedByUs = false;
notifyEmptyBufferDone(inHeader);
- if (portSettingsChanged) {
- portSettingsChanged = false;
+ if (portWillReset) {
return;
}
@@ -215,44 +212,34 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
}
}
-bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) {
- if (mWidth != info->picWidth || mHeight != info->picHeight) {
- mWidth = info->picWidth;
- mHeight = info->picHeight;
- mPictureSize = mWidth * mHeight * 3 / 2;
- updatePortDefinitions();
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- return true;
+SoftVideoDecoderOMXComponent::CropSettingsMode SoftAVC::handleCropParams(
+ const H264SwDecInfo& decInfo) {
+ if (!decInfo.croppingFlag) {
+ return kCropUnSet;
}
- return false;
-}
-
-bool SoftAVC::handleCropRectEvent(const CropParams *crop) {
- if (mCropLeft != crop->cropLeftOffset ||
- mCropTop != crop->cropTopOffset ||
- mCropWidth != crop->cropOutWidth ||
- mCropHeight != crop->cropOutHeight) {
- mCropLeft = crop->cropLeftOffset;
- mCropTop = crop->cropTopOffset;
- mCropWidth = crop->cropOutWidth;
- mCropHeight = crop->cropOutHeight;
-
- notify(OMX_EventPortSettingsChanged, 1,
- OMX_IndexConfigCommonOutputCrop, NULL);
-
- return true;
+ const CropParams& crop = decInfo.cropParams;
+ if (mCropLeft == crop.cropLeftOffset &&
+ mCropTop == crop.cropTopOffset &&
+ mCropWidth == crop.cropOutWidth &&
+ mCropHeight == crop.cropOutHeight) {
+ return kCropSet;
}
- return false;
+
+ mCropLeft = crop.cropLeftOffset;
+ mCropTop = crop.cropTopOffset;
+ mCropWidth = crop.cropOutWidth;
+ mCropHeight = crop.cropOutHeight;
+ return kCropChanged;
}
void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
CHECK(mFirstPicture == NULL);
mFirstPictureId = picId;
- mFirstPicture = new uint8_t[mPictureSize];
- memcpy(mFirstPicture, data, mPictureSize);
+ uint32_t pictureSize = mWidth * mHeight * 3 / 2;
+ mFirstPicture = new uint8_t[pictureSize];
+ memcpy(mFirstPicture, data, pictureSize);
}
void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
@@ -263,9 +250,17 @@ void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
outHeader->nTimeStamp = header->nTimeStamp;
outHeader->nFlags = header->nFlags;
- outHeader->nFilledLen = mPictureSize;
- memcpy(outHeader->pBuffer + outHeader->nOffset,
- data, mPictureSize);
+ outHeader->nFilledLen = mWidth * mHeight * 3 / 2;
+
+ uint8_t *dst = outHeader->pBuffer + outHeader->nOffset;
+ const uint8_t *srcY = data;
+ const uint8_t *srcU = srcY + mWidth * mHeight;
+ const uint8_t *srcV = srcU + mWidth * mHeight / 4;
+ size_t srcYStride = mWidth;
+ size_t srcUStride = mWidth / 2;
+ size_t srcVStride = srcUStride;
+ copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
+
mPicToHeaderMap.removeItem(picId);
delete header;
outInfo->mOwnedByUs = false;
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index ee69926..069107d 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -55,8 +55,6 @@ private:
size_t mInputBufferCount;
- uint32_t mPictureSize;
-
uint8_t *mFirstPicture;
int32_t mFirstPictureId;
@@ -75,8 +73,7 @@ private:
void drainAllOutputBuffers(bool eos);
void drainOneOutputBuffer(int32_t picId, uint8_t *data);
void saveFirstOutputBuffer(int32_t pidId, uint8_t *data);
- bool handleCropRectEvent(const CropParams* crop);
- bool handlePortSettingChangeEvent(const H264SwDecInfo *info);
+ CropSettingsMode handleCropParams(const H264SwDecInfo& decInfo);
DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
};
diff --git a/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h b/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h
index fe112bc..fe112bc 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h
+++ b/media/libstagefright/codecs/on2/h264dec/inc/H264SwDecApi.h
diff --git a/media/libstagefright/codecs/on2/h264dec/inc/basetype.h b/media/libstagefright/codecs/on2/h264dec/inc/basetype.h
index 63d5653..63d5653 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/inc/basetype.h
+++ b/media/libstagefright/codecs/on2/h264dec/inc/basetype.h
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM.h
index 2ed86a4..fbb97e2 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_BitDec_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_BitDec_s.h
index abb98fc..d5866fa 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_BitDec_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_BitDec_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armCOMM_BitDec_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -667,4 +681,4 @@ BitCount SETS "$RBitCount"
MEND
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_Bitstream.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_Bitstream.h
index 4f9bc3b..576b66d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_Bitstream.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_Bitstream.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_Bitstream.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCTTable.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCTTable.h
index d5db32f..223684e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCTTable.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCTTable.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCT_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCT_s.h
index 03f7137..6a7d24f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCT_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_IDCT_s.h
@@ -1,11 +1,19 @@
;//
-;// This confidential and proprietary software may be used only as
-;// authorised by a licensing agreement from ARM Limited
-;// (C) COPYRIGHT 2004 ARM Limited
-;// ALL RIGHTS RESERVED
-;// The entire notice above must be reproduced on all authorised
-;// copies and copies may only be made to the extent permitted
-;// by a licensing agreement from ARM Limited.
+;// Copyright (C) 2004 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// IDCT_s.s
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_MaskTable.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_MaskTable.h
index b5da9dc..5246f15 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_MaskTable.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_MaskTable.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_MaskTable.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_s.h
index 2df1fc8..04735a9 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armCOMM_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armCOMM_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armOMX.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armOMX.h
index f629f72..e7c0c26 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armOMX.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/armOMX.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/*
*
* File Name: armOMX_ReleaseVersion.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes.h
index 8b295a6..912cb0d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes.h
@@ -32,6 +32,7 @@
#define _OMXTYPES_H_
#include <limits.h>
+#include <stdint.h>
#define OMX_IN
#define OMX_OUT
@@ -75,64 +76,22 @@ typedef enum {
/* OMX_U8 */
-#if UCHAR_MAX == 0xff
-typedef unsigned char OMX_U8;
-#elif USHRT_MAX == 0xff
-typedef unsigned short int OMX_U8;
-#else
-#error OMX_U8 undefined
-#endif
-
+typedef uint8_t OMX_U8;
/* OMX_S8 */
-#if SCHAR_MAX == 0x7f
-typedef signed char OMX_S8;
-#elif SHRT_MAX == 0x7f
-typedef signed short int OMX_S8;
-#else
-#error OMX_S8 undefined
-#endif
-
+typedef int8_t OMX_S8;
/* OMX_U16 */
-#if USHRT_MAX == 0xffff
-typedef unsigned short int OMX_U16;
-#elif UINT_MAX == 0xffff
-typedef unsigned int OMX_U16;
-#else
-#error OMX_U16 undefined
-#endif
-
+typedef uint16_t OMX_U16;
/* OMX_S16 */
-#if SHRT_MAX == 0x7fff
-typedef signed short int OMX_S16;
-#elif INT_MAX == 0x7fff
-typedef signed int OMX_S16;
-#else
-#error OMX_S16 undefined
-#endif
-
+typedef int16_t OMX_S16;
/* OMX_U32 */
-#if UINT_MAX == 0xffffffff
-typedef unsigned int OMX_U32;
-#elif LONG_MAX == 0xffffffff
-typedef unsigned long int OMX_U32;
-#else
-#error OMX_U32 undefined
-#endif
-
+typedef uint32_t OMX_U32;
/* OMX_S32 */
-#if INT_MAX == 0x7fffffff
-typedef signed int OMX_S32;
-#elif LONG_MAX == 0x7fffffff
-typedef long signed int OMX_S32;
-#else
-#error OMX_S32 undefined
-#endif
-
+typedef int32_t OMX_S32;
/* OMX_U64 & OMX_S64 */
#if defined( _WIN32 ) || defined ( _WIN64 )
@@ -143,15 +102,14 @@ typedef long signed int OMX_S32;
#define OMX_MAX_S64 (0x7FFFFFFFFFFFFFFFi64)
#define OMX_MAX_U64 (0xFFFFFFFFFFFFFFFFi64)
#else
- typedef long long OMX_S64; /** Signed 64-bit integer */
- typedef unsigned long long OMX_U64; /** Unsigned 64-bit integer */
+ typedef int64_t OMX_S64; /** Signed 64-bit integer */
+ typedef uint64_t OMX_U64; /** Unsigned 64-bit integer */
#define OMX_MIN_S64 (0x8000000000000000LL)
#define OMX_MIN_U64 (0x0000000000000000LL)
#define OMX_MAX_S64 (0x7FFFFFFFFFFFFFFFLL)
#define OMX_MAX_U64 (0xFFFFFFFFFFFFFFFFLL)
#endif
-
/* OMX_SC8 */
typedef struct
{
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes_s.h
index 8d24b65..d41a037 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/api/omxtypes_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxtypes_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/build_vc.pl b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/build_vc.pl
index 1ae7005..5d672b3 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/build_vc.pl
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/build_vc.pl
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
#!/usr/bin/perl
#
#
@@ -6,7 +22,6 @@
# Revision: 9641
# Date: Thursday, February 7, 2008
#
-# (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
#
#
#
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM.c
index e572a89..e8dbf41 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_Bitstream.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_Bitstream.c
index 9ef9319..99f53ca 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_Bitstream.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_Bitstream.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_Bitstream.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_IDCTTable.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_IDCTTable.c
index 9e4679c..6f0b87f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_IDCTTable.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_IDCTTable.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_IDCTTable.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_MaskTable.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_MaskTable.c
index 3241db2..906a8e5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_MaskTable.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/src/armCOMM_MaskTable.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVC.h
index 7fa7716..6dbe8b6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVC.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVC.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVCCOMM_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVCCOMM_s.h
index 7f0a9b8..a9d4644 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVCCOMM_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/api/armVCCOMM_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCCOMM_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -69,4 +83,4 @@
ENDIF ;// ARMACCOMM_S_H
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
index 02b4b08..f5d2271 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCCOMM_ExpandFrame_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -186,4 +200,4 @@ End
ENDIF ;//ARM1136JS
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/api/armVCM4P10_CAVLCTables.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/api/armVCM4P10_CAVLCTables.h
index 4340f2a..d43d86b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/api/armVCM4P10_CAVLCTables.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/api/armVCM4P10_CAVLCTables.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
index b2cd9d1..198f0ac 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_Average_4x_Align_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -219,4 +233,4 @@ End3
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_CAVLCTables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_CAVLCTables.c
index 17fe518..3b84c8d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_CAVLCTables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_CAVLCTables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
index dcbcd00..51dcb92 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DeblockingChroma_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -17,4 +31,4 @@
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
index 14b37fe..2085233 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DeblockingLuma_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -363,4 +377,4 @@ t11 RN 9
ENDIF
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
index ac448a0..33638bf 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DecodeCoeffsToPair_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DequantTables_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DequantTables_s.s
index b16f188..afe07b5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DequantTables_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_DequantTables_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DequantTables_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -120,4 +134,4 @@
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
index 82b9542..ffe123d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_Align_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
index bc0b6ec..c9a89fd 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -146,4 +160,4 @@ Copy4x4End
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
index 66cfe5e..98b67eb 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -175,4 +189,4 @@ End2
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
index 851ff6a..523eace 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
index 2f48e13..2e7c5c7 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
index 6690ced..81af75a 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
index 007cd0d..906cbf3 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -182,4 +196,4 @@ End
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
index b1ad17c..35bf67c 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_Interpolate_Chroma_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_QuantTables_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_QuantTables_s.s
index f962f70..938c719 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_QuantTables_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_QuantTables_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_QuantTables_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;// Description:
@@ -71,4 +85,4 @@
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
index 241d188..e5372e1 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_TransformResidual4x4_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -404,4 +418,4 @@ End
;// Guarding implementation by the processor name
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
index ad16d9c..d02b4f3 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_UnpackBlock4x4_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -89,4 +103,4 @@ unpackLoop
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
index c2e6b60..34adea8 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
index 6023862..8b47dc2 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
index a19f277..2cd65ca 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
index 99bb4ce..9f9706b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
index 2b71486..3187f2b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
@@ -1,5 +1,19 @@
;//
-;// (c) Copyright 2007 ARM Limited. All Rights Reserved.
+;// Copyright (C) 2007 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// Description:
;// H.264 inverse quantize and transform module
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
index 6d960f0..d940418 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
index 00c8354..2dc9369 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
index 1b84080..e4fbfa4 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -328,4 +342,4 @@ ExitLoopY
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
index 417ddc2..6adf27b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -547,4 +561,4 @@ ExitLoopY
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
index de835bd..63d185f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InterpolateChroma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
index cf611a3..cb3b4e2 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_InterpolateLuma_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -423,4 +437,4 @@ EndOfInterpolation
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
index 34fedd8..09b4cf6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_PredictIntraChroma_8x8_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
index 1557208..0c0cba7 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_PredictIntra_16x16_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
index a90f460..112139f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_PredictIntra_4x4_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
index 53597a8..b83d7f0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_TransformDequantChromaDCFromPair_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
index 73caec2..6974cd1 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_TransformDequantLumaDCFromPair_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -466,4 +480,4 @@ QPR5 RN 5
ENDIF ;//ARM1136JS
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
index 22115d3..359e752 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Huff_Tables_VLC.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
index d5f865c..286ba04 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_ZigZag_Tables.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Clip8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Clip8_s.s
index 7801e57..241d441 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Clip8_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Clip8_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
; /**
; *
; * File Name: armVCM4P2_Clip8_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
index 9e30900..96f5bed 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
index ba4d058..04d86ed 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Huff_Tables_VLC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Lookup_Tables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Lookup_Tables.c
index 25cf8db..04739a5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Lookup_Tables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Lookup_Tables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Lookup_Tables.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_SetPredDir_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
index 3f92d85..d0d13d1 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P2_SetPredDir_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
index ed17f9b..b647559 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Zigzag_Tables.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
index b63d295..127772a 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeBlockCoef_Inter.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
index c609a60..f24fc07 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeBlockCoef_Intra.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
index a1861da..65a01d7 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
; **********
; *
; * File Name: omxVCM4P2_DecodePadMV_PVOP_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
index c43b253..5ee33d8 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_DecodeVLCZigzag_Inter_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
index 166729e..9d5940c 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
index d19cb13..266a62b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
index a4bfa71..92acd51 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P2_FindMVpred_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -191,4 +205,4 @@ BlkEnd
M_END
ENDIF ;// ARM1136JS :LOR: CortexA8
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
index bfeb540..e4f91fb 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P2_IDCT8x8blk_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
index 20965bf..8ac6ff9 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P2_MCReconBlock_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
index 213444a..116c81d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
; **********
; *
; * File Name: omxVCM4P2_PredictReconCoefIntra_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
index c9591cb..d57160f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_QuantInvInter_I_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
index 6328e01..bd82da4 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm11/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_QuantInvIntra_I_s.s
@@ -5,7 +20,6 @@
; * Revision: 9641
; * Date: Thursday, February 7, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM.h
index 64c1958..91e38b8 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_BitDec_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_BitDec_s.h
index c738f72..56344e3 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_BitDec_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_BitDec_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armCOMM_BitDec_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -667,4 +681,4 @@ BitCount SETS "$RBitCount"
MEND
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Bitstream.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Bitstream.h
index b699034..8c0ef37 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Bitstream.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Bitstream.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_Bitstream.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCTTable.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCTTable.h
index e0cfdaa..d761f61 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCTTable.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCTTable.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCT_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCT_s.h
index 0baa087..9130223 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCT_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_IDCT_s.h
@@ -1,11 +1,19 @@
;//
-;// This confidential and proprietary software may be used only as
-;// authorised by a licensing agreement from ARM Limited
-;// (C) COPYRIGHT 2004 ARM Limited
-;// ALL RIGHTS RESERVED
-;// The entire notice above must be reproduced on all authorised
-;// copies and copies may only be made to the extent permitted
-;// by a licensing agreement from ARM Limited.
+;// Copyright (C) 2004 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// IDCT_s.s
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_MaskTable.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_MaskTable.h
index 51118fd..5ffc835 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_MaskTable.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_MaskTable.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_MaskTable.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Version.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Version.h
index 41b3e1e..41b3e1e 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Version.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_Version.h
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_s.h
index 0956bd1..321d2d3 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armCOMM_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armCOMM_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armOMX.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armOMX.h
index 7a68d14..303abd9 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armOMX.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/armOMX.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/*
*
* File Name: armOMX_ReleaseVersion.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes.h
index 8b295a6..912cb0d 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes.h
@@ -32,6 +32,7 @@
#define _OMXTYPES_H_
#include <limits.h>
+#include <stdint.h>
#define OMX_IN
#define OMX_OUT
@@ -75,64 +76,22 @@ typedef enum {
/* OMX_U8 */
-#if UCHAR_MAX == 0xff
-typedef unsigned char OMX_U8;
-#elif USHRT_MAX == 0xff
-typedef unsigned short int OMX_U8;
-#else
-#error OMX_U8 undefined
-#endif
-
+typedef uint8_t OMX_U8;
/* OMX_S8 */
-#if SCHAR_MAX == 0x7f
-typedef signed char OMX_S8;
-#elif SHRT_MAX == 0x7f
-typedef signed short int OMX_S8;
-#else
-#error OMX_S8 undefined
-#endif
-
+typedef int8_t OMX_S8;
/* OMX_U16 */
-#if USHRT_MAX == 0xffff
-typedef unsigned short int OMX_U16;
-#elif UINT_MAX == 0xffff
-typedef unsigned int OMX_U16;
-#else
-#error OMX_U16 undefined
-#endif
-
+typedef uint16_t OMX_U16;
/* OMX_S16 */
-#if SHRT_MAX == 0x7fff
-typedef signed short int OMX_S16;
-#elif INT_MAX == 0x7fff
-typedef signed int OMX_S16;
-#else
-#error OMX_S16 undefined
-#endif
-
+typedef int16_t OMX_S16;
/* OMX_U32 */
-#if UINT_MAX == 0xffffffff
-typedef unsigned int OMX_U32;
-#elif LONG_MAX == 0xffffffff
-typedef unsigned long int OMX_U32;
-#else
-#error OMX_U32 undefined
-#endif
-
+typedef uint32_t OMX_U32;
/* OMX_S32 */
-#if INT_MAX == 0x7fffffff
-typedef signed int OMX_S32;
-#elif LONG_MAX == 0x7fffffff
-typedef long signed int OMX_S32;
-#else
-#error OMX_S32 undefined
-#endif
-
+typedef int32_t OMX_S32;
/* OMX_U64 & OMX_S64 */
#if defined( _WIN32 ) || defined ( _WIN64 )
@@ -143,15 +102,14 @@ typedef long signed int OMX_S32;
#define OMX_MAX_S64 (0x7FFFFFFFFFFFFFFFi64)
#define OMX_MAX_U64 (0xFFFFFFFFFFFFFFFFi64)
#else
- typedef long long OMX_S64; /** Signed 64-bit integer */
- typedef unsigned long long OMX_U64; /** Unsigned 64-bit integer */
+ typedef int64_t OMX_S64; /** Signed 64-bit integer */
+ typedef uint64_t OMX_U64; /** Unsigned 64-bit integer */
#define OMX_MIN_S64 (0x8000000000000000LL)
#define OMX_MIN_U64 (0x0000000000000000LL)
#define OMX_MAX_S64 (0x7FFFFFFFFFFFFFFFLL)
#define OMX_MAX_U64 (0xFFFFFFFFFFFFFFFFLL)
#endif
-
/* OMX_SC8 */
typedef struct
{
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes_s.h
index 48703d1..6e742c7 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/api/omxtypes_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxtypes_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/build_vc.pl b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/build_vc.pl
index 649e74c..6a206c0 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/build_vc.pl
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/build_vc.pl
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
#!/usr/bin/perl
#
#
@@ -6,7 +22,6 @@
# Revision: 12290
# Date: Wednesday, April 9, 2008
#
-# (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
#
#
#
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM.c
index e572a89..e8dbf41 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_Bitstream.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_Bitstream.c
index 9ef9319..99f53ca 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_Bitstream.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_Bitstream.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_Bitstream.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_IDCTTable.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_IDCTTable.c
index 3f5e279..85d4c67 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_IDCTTable.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_IDCTTable.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_IDCTTable.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_MaskTable.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_MaskTable.c
index 09f88c3..f169a16 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_MaskTable.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/src/armCOMM_MaskTable.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVC.h
index 35b510b..1d37a5d 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVC.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVC.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVCCOMM_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVCCOMM_s.h
index 32a0166..cfc2a3b 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVCCOMM_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/armVCCOMM_s.h
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCCOMM_s.h
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -69,4 +83,4 @@
ENDIF ;// ARMACCOMM_S_H
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC.h
index 7b3cc72..7b3cc72 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC.h
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC_s.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC_s.h
index 89f3040..89f3040 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC_s.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/api/omxVC_s.h
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
index 5c5b7d8..3d6b669 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/comm/src/omxVCCOMM_ExpandFrame_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCCOMM_ExpandFrame_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -233,4 +247,4 @@ End
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/api/armVCM4P10_CAVLCTables.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/api/armVCM4P10_CAVLCTables.h
index 547a2d9..7dde9a7 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/api/armVCM4P10_CAVLCTables.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/api/armVCM4P10_CAVLCTables.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
index 4f0892d..5f3eb9b 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Average_4x_Align_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_Average_4x_Align_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -219,4 +233,4 @@ End3
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_CAVLCTables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_CAVLCTables.c
index 137495d..bb4bd9e 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_CAVLCTables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_CAVLCTables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
index 4c3a77c..e3813d3 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingChroma_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DeblockingChroma_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
index 0afe4fd..bcc01dd 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DeblockingLuma_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DeblockingLuma_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
index 10a89e9..6e3a0d5 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DecodeCoeffsToPair_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s
index 2761600..dce8c89 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_DequantTables_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_DequantTables_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -120,4 +134,4 @@
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
index 6e912d7..20b3e22 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Align_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_Align_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
index d275891..1415beb 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_Copy_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -146,4 +160,4 @@ Copy4x4End
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
index 4e5a39d..f5a7326 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -175,4 +189,4 @@ End2
ENDIF
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
index d1684cb..4d86782 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
index 7bc091f..3bc9534 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
index babe8ad..ea1c345 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
index 89c90aa..5414d47 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
index 0f0ec78..afb9565 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_Interpolate_Chroma_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_Interpolate_Chroma_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 9641
;// Date: Thursday, February 7, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s
index 7e2642b..8cd33a4 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_QuantTables_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_QuantTables_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;// Description:
@@ -71,4 +85,4 @@
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
index ee9c339..9e16e49 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_TransformResidual4x4_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_TransformResidual4x4_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -183,4 +197,4 @@ End
ENDIF ;//CortexA8
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
index 4c52e22..a24c7d5 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/armVCM4P10_UnpackBlock4x4_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P10_UnpackBlock4x4_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -89,4 +103,4 @@ unpackLoop
END
- \ No newline at end of file
+
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
index 40d4d5e..0a6448d 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
index 619365f..7b89be7 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
index 4e871bf..950f348 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
index b29e576..5e78b4c 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
index 485a488..4787982 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
index 4606197..a099dcb 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
index 18e6c1d..bf2152c 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
index 0c3f4f2..5678670 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
index e6fbb34..d2a134e 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
index 3ce41be..c6b3f41 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InterpolateChroma.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
index 942ebc6..9f8f69e 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_InterpolateLuma_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_InterpolateLuma_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
index 3a60705..1ff418f 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_PredictIntraChroma_8x8_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
index e9c0eee..de331f4 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_PredictIntra_16x16_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
index 39eb8a4..b5780ef 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_PredictIntra_4x4_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
index e394339..5981795 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_TransformDequantChromaDCFromPair_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
index 2529959..d8c2431 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P10_TransformDequantLumaDCFromPair_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -261,4 +275,4 @@ QPR5 RN 5
ENDIF ;//ARM1136JS
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Average_4x_Align_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Average_4x_Align_unsafe_s.S
index aca2df4..46e0018 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Average_4x_Align_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Average_4x_Align_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingChroma_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingChroma_unsafe_s.S
index b9ee221..ca64a02 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingChroma_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingChroma_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingLuma_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingLuma_unsafe_s.S
index 47f3d44..193bc5e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingLuma_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DeblockingLuma_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S
index 073dbba..8e0db37 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DecodeCoeffsToPair_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
@@ -10,6 +25,22 @@
.fpu neon
.text
+ .extern armVCM4P10_CAVLCCoeffTokenTables
+ .extern armVCM4P10_SuffixToLevel
+ .extern armVCM4P10_CAVLCTotalZeros2x2Tables
+ .extern armVCM4P10_CAVLCTotalZeroTables
+ .extern armVCM4P10_CAVLCRunBeforeTables
+ .extern armVCM4P10_ZigZag_2x2
+ .extern armVCM4P10_ZigZag_4x4
+
+ .hidden armVCM4P10_CAVLCCoeffTokenTables
+ .hidden armVCM4P10_SuffixToLevel
+ .hidden armVCM4P10_CAVLCTotalZeros2x2Tables
+ .hidden armVCM4P10_CAVLCTotalZeroTables
+ .hidden armVCM4P10_CAVLCRunBeforeTables
+ .hidden armVCM4P10_ZigZag_2x2
+ .hidden armVCM4P10_ZigZag_4x4
+
.global armVCM4P10_DecodeCoeffsToPair
.func armVCM4P10_DecodeCoeffsToPair
armVCM4P10_DecodeCoeffsToPair:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S
index 44eb428..6febf2f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_DequantTables_s.S
@@ -1,5 +1,19 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*/
@@ -20,6 +34,14 @@
.global armVCM4P10_QPModuloTable
.global armVCM4P10_VMatrixU16
+ .hidden armVCM4P10_QPDivTable
+ .hidden armVCM4P10_VMatrixQPModTable
+ .hidden armVCM4P10_PosToVCol4x4
+ .hidden armVCM4P10_PosToVCol2x2
+ .hidden armVCM4P10_VMatrix
+ .hidden armVCM4P10_QPModuloTable
+ .hidden armVCM4P10_VMatrixU16
+
armVCM4P10_PosToVCol4x4:
.byte 0, 2, 0, 2
.byte 2, 1, 2, 1
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Align_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Align_unsafe_s.S
index 37bc69b..7206d76 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Align_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Align_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Copy_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Copy_unsafe_s.S
index fe92201..e41d662 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Copy_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_Copy_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.S
index 544abe8..c8f5cda 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_DiagCopy_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.S
index a330972..f5868c0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagHorVer4x4_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.S
index 991c33f..065995d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfDiagVerHor4x4_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.S
index 40e141b..1e2d16b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfHor4x4_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.S
index 955846f..c7def2a 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_InterpolateLuma_HalfVer4x4_unsafe_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Interpolate_Chroma_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Interpolate_Chroma_s.S
index 8599cab..2f4293f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Interpolate_Chroma_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_Interpolate_Chroma_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_QuantTables_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_QuantTables_s.S
index f5d6d1f..f4e6010 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_QuantTables_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_QuantTables_s.S
@@ -1,5 +1,19 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_TransformResidual4x4_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_TransformResidual4x4_s.S
index c24d717..d4cedb5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_TransformResidual4x4_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_TransformResidual4x4_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_UnpackBlock4x4_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_UnpackBlock4x4_s.S
index c552f8d..1652dc6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_UnpackBlock4x4_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/armVCM4P10_UnpackBlock4x4_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DeblockLuma_I.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DeblockLuma_I.S
index ba61059..90b0947 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DeblockLuma_I.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DeblockLuma_I.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.S
index bc0f7fa..4a74594 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_DequantTransformResidualFromPairAndAdd_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.S
index 79ba538..f20fb78 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_HorEdge_I_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.S
index dcdddbe..003526e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingChroma_VerEdge_I_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.S
index 9755899..7ddc42e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_HorEdge_I_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.S
index 66cc32e..f71aceb 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_FilterDeblockingLuma_VerEdge_I_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_InterpolateLuma_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_InterpolateLuma_s.S
index 76c3d7d..000fbeb 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_InterpolateLuma_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_InterpolateLuma_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntraChroma_8x8_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntraChroma_8x8_s.S
index a896a3a..4e2cff6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntraChroma_8x8_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntraChroma_8x8_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_16x16_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_16x16_s.S
index 3944f53..c71c93b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_16x16_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_16x16_s.S
@@ -1,5 +1,19 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_4x4_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_4x4_s.S
index 6646b7f..cd5d356 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_4x4_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_PredictIntra_4x4_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantChromaDCFromPair_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantChromaDCFromPair_s.S
index 7ba3bd6..5570892 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantChromaDCFromPair_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantChromaDCFromPair_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantLumaDCFromPair_s.S b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantLumaDCFromPair_s.S
index 640f096..5b6eee0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantLumaDCFromPair_s.S
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p10/src_gcc/omxVCM4P10_TransformDequantLumaDCFromPair_s.S
@@ -1,5 +1,20 @@
/*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+/*
*
*/
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
index 74b5505..6cbc5ff 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Huff_Tables_VLC.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
index e95203a..0d64a68 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_ZigZag_Tables.h
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s
index 95fe6d2..2f830fc 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Clip8_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
; /**
; *
; * File Name: armVCM4P2_Clip8_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
index e4a7f33..016e65b 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: armVCM4P2_DecodeVLCZigzag_AC_unsafe_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
index 38af975..5a77832 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Huff_Tables_VLC.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Lookup_Tables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Lookup_Tables.c
index 6948f80..e915d3c 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Lookup_Tables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Lookup_Tables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Lookup_Tables.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
index 44f2460..bf3f363 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_SetPredDir_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: armVCM4P2_SetPredDir_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
index 21fa715..719b434 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Zigzag_Tables.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
index 796ad6e..95346ad 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeBlockCoef_Inter.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
index b28657c..91ec5d2 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeBlockCoef_Intra.c
@@ -5,7 +21,6 @@
* Revision: 12290
* Date: Wednesday, April 9, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
index cc16f5a..08e9538 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
; **********
; *
; * File Name: omxVCM4P2_DecodePadMV_PVOP_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
index 7208c21..636dfe4 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_DecodeVLCZigzag_Inter_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
index 9a37ec9..15cc5b4 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_DecodeVLCZigzag_IntraACVLC_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
index 778aaf2..e9fed80 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_DecodeVLCZigzag_IntraDCVLC_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
index caf7121..9344120 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_FindMVpred_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P2_FindMVpred_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
@@ -191,4 +205,4 @@ BlkEnd
M_END
ENDIF ;// ARM1136JS :LOR: CortexA8
- END \ No newline at end of file
+ END
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
index b5e3d0d..01b925e 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_IDCT8x8blk_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P2_IDCT8x8blk_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
index dd00df5..3c1aec3 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_MCReconBlock_s.s
@@ -1,11 +1,25 @@
;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
+;//
;//
;// File Name: omxVCM4P2_MCReconBlock_s.s
;// OpenMAX DL: v1.0.2
;// Revision: 12290
;// Date: Wednesday, April 9, 2008
;//
-;// (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
;//
;//
;//
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
index a73f64a..6b4eb28 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
; **********
; *
; * File Name: omxVCM4P2_PredictReconCoefIntra_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
index bd0ad1f..744571f 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvInter_I_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_QuantInvInter_I_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
index e00591f..61a7fd4 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I_s.s
@@ -1,3 +1,18 @@
+;//
+;// Copyright (C) 2007-2008 ARM Limited
+;//
+;// Licensed under the Apache License, Version 2.0 (the "License");
+;// you may not use this file except in compliance with the License.
+;// You may obtain a copy of the License at
+;//
+;// http://www.apache.org/licenses/LICENSE-2.0
+;//
+;// Unless required by applicable law or agreed to in writing, software
+;// distributed under the License is distributed on an "AS IS" BASIS,
+;// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;// See the License for the specific language governing permissions and
+;// limitations under the License.
+;//
;/**
; *
; * File Name: omxVCM4P2_QuantInvIntra_I_s.s
@@ -5,7 +20,6 @@
; * Revision: 12290
; * Date: Wednesday, April 9, 2008
; *
-; * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
; *
; *
; *
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/src/armVC_Version.c b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/src/armVC_Version.c
index 5d93681..5d93681 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/src/armVC_Version.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/arm_neon/vc/src/armVC_Version.c
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM.h
index 2ed86a4..fbb97e2 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM_Bitstream.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM_Bitstream.h
index 4f9bc3b..576b66d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM_Bitstream.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armCOMM_Bitstream.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_Bitstream.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armOMX.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armOMX.h
index f629f72..e7c0c26 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armOMX.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/armOMX.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/*
*
* File Name: armOMX_ReleaseVersion.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/omxtypes.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/omxtypes.h
index 8b295a6..912cb0d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/omxtypes.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/api/omxtypes.h
@@ -32,6 +32,7 @@
#define _OMXTYPES_H_
#include <limits.h>
+#include <stdint.h>
#define OMX_IN
#define OMX_OUT
@@ -75,64 +76,22 @@ typedef enum {
/* OMX_U8 */
-#if UCHAR_MAX == 0xff
-typedef unsigned char OMX_U8;
-#elif USHRT_MAX == 0xff
-typedef unsigned short int OMX_U8;
-#else
-#error OMX_U8 undefined
-#endif
-
+typedef uint8_t OMX_U8;
/* OMX_S8 */
-#if SCHAR_MAX == 0x7f
-typedef signed char OMX_S8;
-#elif SHRT_MAX == 0x7f
-typedef signed short int OMX_S8;
-#else
-#error OMX_S8 undefined
-#endif
-
+typedef int8_t OMX_S8;
/* OMX_U16 */
-#if USHRT_MAX == 0xffff
-typedef unsigned short int OMX_U16;
-#elif UINT_MAX == 0xffff
-typedef unsigned int OMX_U16;
-#else
-#error OMX_U16 undefined
-#endif
-
+typedef uint16_t OMX_U16;
/* OMX_S16 */
-#if SHRT_MAX == 0x7fff
-typedef signed short int OMX_S16;
-#elif INT_MAX == 0x7fff
-typedef signed int OMX_S16;
-#else
-#error OMX_S16 undefined
-#endif
-
+typedef int16_t OMX_S16;
/* OMX_U32 */
-#if UINT_MAX == 0xffffffff
-typedef unsigned int OMX_U32;
-#elif LONG_MAX == 0xffffffff
-typedef unsigned long int OMX_U32;
-#else
-#error OMX_U32 undefined
-#endif
-
+typedef uint32_t OMX_U32;
/* OMX_S32 */
-#if INT_MAX == 0x7fffffff
-typedef signed int OMX_S32;
-#elif LONG_MAX == 0x7fffffff
-typedef long signed int OMX_S32;
-#else
-#error OMX_S32 undefined
-#endif
-
+typedef int32_t OMX_S32;
/* OMX_U64 & OMX_S64 */
#if defined( _WIN32 ) || defined ( _WIN64 )
@@ -143,15 +102,14 @@ typedef long signed int OMX_S32;
#define OMX_MAX_S64 (0x7FFFFFFFFFFFFFFFi64)
#define OMX_MAX_U64 (0xFFFFFFFFFFFFFFFFi64)
#else
- typedef long long OMX_S64; /** Signed 64-bit integer */
- typedef unsigned long long OMX_U64; /** Unsigned 64-bit integer */
+ typedef int64_t OMX_S64; /** Signed 64-bit integer */
+ typedef uint64_t OMX_U64; /** Unsigned 64-bit integer */
#define OMX_MIN_S64 (0x8000000000000000LL)
#define OMX_MIN_U64 (0x0000000000000000LL)
#define OMX_MAX_S64 (0x7FFFFFFFFFFFFFFFLL)
#define OMX_MAX_U64 (0xFFFFFFFFFFFFFFFFLL)
#endif
-
/* OMX_SC8 */
typedef struct
{
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/build_vc.pl b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/build_vc.pl
index f0b43e0..e59cded 100755
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/build_vc.pl
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/build_vc.pl
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
#!/usr/bin/perl
#
#
@@ -6,7 +22,6 @@
# Revision: 9641
# Date: Thursday, February 7, 2008
#
-# (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
#
#
#
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM.c
index e572a89..e8dbf41 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM_Bitstream.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM_Bitstream.c
index 9ef9319..99f53ca 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM_Bitstream.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/src/armCOMM_Bitstream.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armCOMM_Bitstream.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/api/armVC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/api/armVC.h
index 7fa7716..6dbe8b6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/api/armVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/api/armVC.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVC.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_Average.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_Average.c
index 1e51077..b7b37bf 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_Average.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_Average.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCCOMM_Average.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_SAD.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_SAD.c
index d41ac9a..05b96dc 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_SAD.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/armVCCOMM_SAD.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCCOMM_SAD.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_16x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_16x.c
index 6d1447e..175bca8 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_16x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_16x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_Average_16x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_8x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_8x.c
index 17b1326..2c14f43 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_8x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Average_8x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_Average_8x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock.c
index e559adf..a1f5240 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_ComputeTextureErrorBlock.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock_SAD.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock_SAD.c
index c4731aa..a7f48c9 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock_SAD.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ComputeTextureErrorBlock_SAD.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_ComputeTextureErrorBlock_SAD.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy16x16.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy16x16.c
index 4857024..8e467a4 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy16x16.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy16x16.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_Copy16x16.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy8x8.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy8x8.c
index a4f9dde..3f5969b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy8x8.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_Copy8x8.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_Copy8x8.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ExpandFrame_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ExpandFrame_I.c
index 9536df7..5379fd0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ExpandFrame_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_ExpandFrame_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_ExpandFrame_I.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_LimitMVToRect.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_LimitMVToRect.c
index af04582..9ba9093 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_LimitMVToRect.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_LimitMVToRect.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_LimitMVToRect.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_16x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_16x.c
index 0f0cedb..83dbbd0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_16x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_16x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_SAD_16x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_8x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_8x.c
index 1421d99..7bfd1ec 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_8x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/comm/src/omxVCCOMM_SAD_8x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCCOMM_SAD_8x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/api/armVCM4P10_CAVLCTables.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/api/armVCM4P10_CAVLCTables.h
index 8d18a8f..37241ca 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/api/armVCM4P10_CAVLCTables.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/api/armVCM4P10_CAVLCTables.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CAVLCTables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CAVLCTables.c
index f4e36ad..c4a3074 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CAVLCTables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CAVLCTables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CompareMotionCostToMV.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CompareMotionCostToMV.c
index e4bedc2..6611a37 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CompareMotionCostToMV.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_CompareMotionCostToMV.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_CompareMotionCostToMV.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DeBlockPixel.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DeBlockPixel.c
index f4fb1d9..c6da8ab 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DeBlockPixel.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DeBlockPixel.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair.c
index 7616add..831d53b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DecodeCoeffsToPair.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DequantTables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DequantTables.c
index d9c2541..ad6cef3 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DequantTables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_DequantTables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_FwdTransformResidual4x4.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_FwdTransformResidual4x4.c
index 93d54c3..17d6c0f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_FwdTransformResidual4x4.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_FwdTransformResidual4x4.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfDiag_Luma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfDiag_Luma.c
index 8732f4f..ce9df49 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfDiag_Luma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfDiag_Luma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_InterpolateHalfDiag_Luma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfHor_Luma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfHor_Luma.c
index 89c0079..15462b2 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfHor_Luma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfHor_Luma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_InterpolateHalfHor_Luma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfVer_Luma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfVer_Luma.c
index f7ecfc5..e8adf45 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfVer_Luma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_InterpolateHalfVer_Luma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_InterpolateHalfVer_Luma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Chroma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Chroma.c
index 1507d23..26730f8 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Chroma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Chroma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_Interpolate_Chroma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Luma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Luma.c
index 89978dd..538d62e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Luma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_Interpolate_Luma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_Interpolate_Luma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_PredictIntraDC4x4.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_PredictIntraDC4x4.c
index b713073..a200d55 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_PredictIntraDC4x4.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_PredictIntraDC4x4.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_QuantTables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_QuantTables.c
index f0b5bb0..c01d4f6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_QuantTables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_QuantTables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_SADQuar.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_SADQuar.c
index a41e04b..6ef8af5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_SADQuar.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_SADQuar.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P10_SADQuar.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_TransformResidual4x4.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_TransformResidual4x4.c
index f9f756a..6c53731 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_TransformResidual4x4.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_TransformResidual4x4.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock2x2.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock2x2.c
index dda49f6..fb004e5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock2x2.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock2x2.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock4x4.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock4x4.c
index 3c0dcbd..b40c933 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock4x4.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/armVCM4P10_UnpackBlock4x4.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_Average_4x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_Average_4x.c
index ac0d523..638605e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_Average_4x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_Average_4x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_Average_4x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Half.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Half.c
index c490e10..6cfdb64 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Half.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Half.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_BlockMatch_Half.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Integer.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Integer.c
index f7764e1..050200f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Integer.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Integer.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_BlockMatch_Integer.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Quarter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Quarter.c
index 513ee25..f450d2c 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Quarter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_BlockMatch_Quarter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_BlockMatch_Quarter.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
index a07b1bb..9aecf3f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockChroma_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
index 1f3a646..a159631 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DeblockLuma_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
index 830ddc7..f931eeb 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeChromaDcCoeffsToPairCAVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
index 7e83d1e..e8ab819 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DecodeCoeffsToPairCAVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd.c
index ed5a158..8a022ba 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_DequantTransformResidualFromPairAndAdd.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I.c
index 75edee2..4f34a96 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_HorEdge_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I.c
index 10b2592..70b0e87 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingChroma_VerEdge_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I.c
index 30a37da..19294f8 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_HorEdge_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I.c
index 8733427..53e232a 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_FilterDeblockingLuma_VerEdge_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_GetVLCInfo.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_GetVLCInfo.c
index 81c59d6..c80552a 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_GetVLCInfo.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_GetVLCInfo.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_GetVLCInfo.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
index 8824de2..18824d8 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateChroma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InterpolateChroma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfHor_Luma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfHor_Luma.c
index ef0befa..26c8208 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfHor_Luma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfHor_Luma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InterpolateHalfHor_Luma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfVer_Luma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfVer_Luma.c
index 3560ff8..96c186b 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfVer_Luma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateHalfVer_Luma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InterpolateHalfVer_Luma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateLuma.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateLuma.c
index d233735..e2a8163 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateLuma.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InterpolateLuma.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InterpolateLuma.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_ChromaDC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_ChromaDC.c
index 92ba031..869e768 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_ChromaDC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_ChromaDC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InvTransformDequant_ChromaDC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_LumaDC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_LumaDC.c
index a3b1200..75f15cf 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_LumaDC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformDequant_LumaDC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InvTransformDequant_LumaDC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformResidualAndAdd.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformResidualAndAdd.c
index 3303997..e3e4519 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformResidualAndAdd.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_InvTransformResidualAndAdd.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_InvTransformResidualAndAdd.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEGetBufSize.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEGetBufSize.c
index 8c3a5c3..7a245e1 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEGetBufSize.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEGetBufSize.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_MEGetBufSize.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEInit.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEInit.c
index 58ecc88..e463353 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEInit.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MEInit.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_MEInit.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MotionEstimationMB.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MotionEstimationMB.c
index 33dbf3f..5264394 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MotionEstimationMB.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_MotionEstimationMB.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/** x
*
* File Name: omxVCM4P10_MotionEstimationMB.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8.c
index d6ca783..e850771 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntraChroma_8x8.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16.c
index c90cb4c..ec44526 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_16x16.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4.c
index 3fa8212..44c25f6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_PredictIntra_4x4.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_16x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_16x.c
index c8114ee..140a785 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_16x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_16x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_SADQuar_16x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_4x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_4x.c
index 4b330ba..4b60d34 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_4x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_4x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_SADQuar_4x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_8x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_8x.c
index c9e9c24..6c8cdf3 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_8x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SADQuar_8x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_SADQuar_8x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SAD_4x.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SAD_4x.c
index 927c454..e22d8dd 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SAD_4x.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SAD_4x.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_SAD_4x.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SATD_4x4.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SATD_4x4.c
index a91ae66..6f74499 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SATD_4x4.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SATD_4x4.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_SATD_4x4.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SubAndTransformQDQResidual.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SubAndTransformQDQResidual.c
index 23a5662..f184d7c 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SubAndTransformQDQResidual.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_SubAndTransformQDQResidual.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_SubAndTransformQDQResidual.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair.c
index 9ad0e81..dd9f5a7 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantChromaDCFromPair.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair.c
index 16c8be1..d333d49 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformDequantLumaDCFromPair.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/* ----------------------------------------------------------------
*
*
@@ -6,7 +22,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_ChromaDC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_ChromaDC.c
index b5544dd..1b6a3d0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_ChromaDC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_ChromaDC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_TransformQuant_ChromaDC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_LumaDC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_LumaDC.c
index 2ccf7f0..ea99a2d 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_LumaDC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p10/src/omxVCM4P10_TransformQuant_LumaDC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P10_TransformQuant_LumaDC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_DCT_Table.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_DCT_Table.h
index 3255b61..a72da13 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_DCT_Table.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_DCT_Table.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_DCT_Table.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
index 92ecc05..a88bdbc 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_Huff_Tables_VLC.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Huff_Tables_VLC.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
index c75ed89..90c163f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/api/armVCM4P2_ZigZag_Tables.h
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_ZigZag_Tables.h
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_ACDCPredict.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_ACDCPredict.c
index b6a396a..c993f73 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_ACDCPredict.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_ACDCPredict.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_ACDCPredict.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Half.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Half.c
index 1b69a33..4ffda10 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Half.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Half.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_BlockMatch_Half.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Integer.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Integer.c
index 77fe358..2b05660 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Integer.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_BlockMatch_Integer.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_BlockMatch_Integer.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CheckVLCEscapeMode.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CheckVLCEscapeMode.c
index 94e8639..5e510e7 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CheckVLCEscapeMode.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CheckVLCEscapeMode.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_CheckVLCEscapeMode.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CompareMV.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CompareMV.c
index 3b8845e..3b621a3 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CompareMV.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_CompareMV.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_CompareMV.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DCT_Table.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DCT_Table.c
index a6f713e..7d055d9 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DCT_Table.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DCT_Table.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_DCT_Table.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_intra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_intra.c
index a2572e0..a5aa198 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_intra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_DecodeVLCZigzag_intra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_DecodeVLCZigzag_intra.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_EncodeVLCZigzag_intra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_EncodeVLCZigzag_intra.c
index cd6b56d..b61c547 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_EncodeVLCZigzag_intra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_EncodeVLCZigzag_intra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_EncodeVLCZigzag_intra.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLCBuffer.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLCBuffer.c
index 93c9504..aeb7714 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLCBuffer.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLCBuffer.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_FillVLCBuffer.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLDBuffer.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLDBuffer.c
index 1712c3a..f09f5d5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLDBuffer.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_FillVLDBuffer.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_FillVLDBuffer.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_GetVLCBits.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_GetVLCBits.c
index 953f597..8eb1411 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_GetVLCBits.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_GetVLCBits.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_GetVLCBits.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
index cd7e9e4..b101d48 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Huff_Tables_VLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Huff_Tables_VLC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_PutVLCBits.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_PutVLCBits.c
index ca9efec..21d5494 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_PutVLCBits.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_PutVLCBits.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_PutVLCBits.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_SetPredDir.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_SetPredDir.c
index a9cd008..61d44d4 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_SetPredDir.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_SetPredDir.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_SetPredDir.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
index a247c69..bcfc0ef 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/armVCM4P2_Zigzag_Tables.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: armVCM4P2_Zigzag_Tables.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_16x16.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_16x16.c
index dcd3ce1..f23c533 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_16x16.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_16x16.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_BlockMatch_Half_16x16.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_8x8.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_8x8.c
index 6996e6d..83da79f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_8x8.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Half_8x8.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_BlockMatch_Half_8x8.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_16x16.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_16x16.c
index e714ef1..e224016 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_16x16.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_16x16.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_BlockMatch_Integer_16x16.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_8x8.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_8x8.c
index 607e64c..73a99bd 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_8x8.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_BlockMatch_Integer_8x8.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_BlockMatch_Integer_8x8.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DCT8x8blk.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DCT8x8blk.c
index a077ac8..c73e24a 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DCT8x8blk.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DCT8x8blk.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DCT8x8blk.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
index 51f7bab..9c9a7f6 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Inter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeBlockCoef_Inter.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
index a0b2376..970da6c 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeBlockCoef_Intra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeBlockCoef_Intra.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP.c
index 7e159b7..ae2c220 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodePadMV_PVOP.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodePadMV_PVOP.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter.c
index 88a8d04..2d3cf6e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_Inter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeVLCZigzag_Inter.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC.c
index 96593d1..6dddaf0 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraACVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeVLCZigzag_IntraACVLC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC.c
index 95e00d7..9c76ed1 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_DecodeVLCZigzag_IntraDCVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_DecodeVLCZigzag_IntraDCVLC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeMV.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeMV.c
index def2b6d..c04a236 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeMV.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeMV.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_EncodeMV.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_Inter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_Inter.c
index b6c73ea..2158f88 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_Inter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_Inter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_EncodeVLCZigzag_Inter.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraACVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraACVLC.c
index d047942..63b6d97 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraACVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraACVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_EncodeVLCZigzag_IntraACVLC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraDCVLC.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraDCVLC.c
index c57acd2..7bdda19 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraDCVLC.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_EncodeVLCZigzag_IntraDCVLC.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_EncodeVLCZigzag_IntraDCVLC.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_FindMVpred.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_FindMVpred.c
index a0cff48..054b486 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_FindMVpred.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_FindMVpred.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_FindMVpred.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_IDCT8x8blk.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_IDCT8x8blk.c
index 1886d92..c512458 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_IDCT8x8blk.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_IDCT8x8blk.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_IDCT8x8blk.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MCReconBlock.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MCReconBlock.c
index 7b3faee..33f0cf5 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MCReconBlock.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MCReconBlock.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_MCReconBlock.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
* Description:
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEGetBufSize.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEGetBufSize.c
index a8e51da..dda852e 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEGetBufSize.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEGetBufSize.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_MEGetBufSize.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEInit.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEInit.c
index 419e71a..59c57c2 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEInit.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MEInit.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_MEInit.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MotionEstimationMB.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MotionEstimationMB.c
index 9549050..f9bb297 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MotionEstimationMB.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_MotionEstimationMB.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_MotionEstimationMB.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra.c
index 1613f47..e091f31 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_PredictReconCoefIntra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_PredictReconCoefIntra.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInter_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInter_I.c
index 5964f73..9055b66 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInter_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInter_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_QuantInter_I.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantIntra_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantIntra_I.c
index a10da68..795b802 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantIntra_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantIntra_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_QuantIntra_I.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvInter_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvInter_I.c
index 6e0de5c..189e244 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvInter_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvInter_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_QuantInvInter_I.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I.c
index a946d7b..2f24cc7 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_QuantInvIntra_I.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_QuantInvIntra_I.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_inter.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_inter.c
index 6e0c59b..9615a77 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_inter.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_inter.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_TransRecBlockCoef_inter.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_intra.c b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_intra.c
index dd444f9..4923e3f 100644
--- a/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_intra.c
+++ b/media/libstagefright/codecs/on2/h264dec/omxdl/reference/vc/m4p2/src/omxVCM4P2_TransRecBlockCoef_intra.c
@@ -1,3 +1,19 @@
+/*
+ * Copyright (C) 2007-2008 ARM Limited
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
/**
*
* File Name: omxVCM4P2_TransRecBlockCoef_intra.c
@@ -5,7 +21,6 @@
* Revision: 9641
* Date: Thursday, February 7, 2008
*
- * (c) Copyright 2007-2008 ARM Limited. All Rights Reserved.
*
*
*
diff --git a/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c b/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c
index dcf2ef6..dcf2ef6 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/DecTestBench.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c b/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c
index aadc75f..aadc75f 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/EvaluationTestBench.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
index 2bb4c4d..a073dcb 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
@@ -36,12 +36,15 @@
1. Include headers
------------------------------------------------------------------------------*/
#include <stdlib.h>
+#include <string.h>
#include "basetype.h"
#include "h264bsd_container.h"
#include "H264SwDecApi.h"
#include "h264bsd_decoder.h"
#include "h264bsd_util.h"
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
Version Information
------------------------------------------------------------------------------*/
@@ -73,6 +76,7 @@ H264DEC_EVALUATION Compile evaluation version, restricts number of frames
#endif
void H264SwDecTrace(char *string) {
+ UNUSED(string);
}
void* H264SwDecMalloc(u32 size) {
diff --git a/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c b/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c
index 42170d3..42170d3 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/TestBenchMultipleInstance.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.c
index db77f8c..db77f8c 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.h
index 36aec76..36aec76 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_byte_stream.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.c
index 91d78bd..91d78bd 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.h
index 80353d3..80353d3 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cavlc.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cfg.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cfg.h
index 2baba5a..2baba5a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cfg.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_cfg.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.c
index 493fb9e..7a262ed 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.c
@@ -267,7 +267,7 @@ u32 ConcealMb(mbStorage_t *pMb, image_t *currImage, u32 row, u32 col,
i32 firstPhase[16];
i32 *pTmp;
/* neighbours above, below, left and right */
- i32 a[4], b[4], l[4], r[4];
+ i32 a[4] = { 0,0,0,0 }, b[4], l[4] = { 0,0,0,0 }, r[4];
u32 A, B, L, R;
#ifdef H264DEC_OMXDL
u8 fillBuff[32*21 + 15 + 32];
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.h
index 3134670..3134670 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_conceal.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_container.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_container.h
index 99b74a0..99b74a0 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_container.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_container.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.c
index f8c1f76..f8c1f76 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.h
index 2571dda..2571dda 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_deblocking.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
index 9517d0a..9517d0a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.h
index 0e25084..0e25084 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_dpb.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.c
index 7b92870..7b92870 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.h
index ed7c18c..ed7c18c 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_image.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.c
index 2a81c4a..2a81c4a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.h
index 94dee25..94dee25 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_inter_prediction.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.c
index 52c85e5..52c85e5 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.h
index 4652bd5..4652bd5 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_intra_prediction.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.c
index 2b3e7f0..2b3e7f0 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.h
index 32bc340..32bc340 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_macroblock_layer.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.c
index e44c43a..e44c43a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.h
index 38957bf..38957bf 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_nal_unit.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.c
index ce5eeff..ce5eeff 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.h
index fce0ad1..fce0ad1 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_neighbour.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.c
index fb23352..fb23352 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.h
index 19741eb..19741eb 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_order_cnt.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.c
index e04dea4..e04dea4 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.h
index 6328638..6328638 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_pic_param_set.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
index c948776..b409a06 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
@@ -42,6 +42,8 @@
#include "armVC.h"
#endif /* H264DEC_OMXDL */
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
2. External compiler flags
--------------------------------------------------------------------------------
@@ -2136,7 +2138,8 @@ static void FillRow1(
i32 center,
i32 right)
{
-
+ UNUSED(left);
+ UNUSED(right);
ASSERT(ref);
ASSERT(fill);
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.h
index 5a1a140..5a1a140 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.c
index 0756c47..0756c47 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.h
index efe543a..efe543a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_sei.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_seq_param_set.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_seq_param_set.h
index e18df94..e18df94 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_seq_param_set.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_seq_param_set.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.c
index c288d4b..c288d4b 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.h
index f23d49e..f23d49e 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_data.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.c
index 7cbb534..7cbb534 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.h
index 4bcb6f2..4bcb6f2 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_group_map.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
index a7c6f64..23401c6 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
@@ -47,6 +47,8 @@
#include "h264bsd_nal_unit.h"
#include "h264bsd_dpb.h"
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
2. External compiler flags
--------------------------------------------------------------------------------
@@ -1407,6 +1409,7 @@ u32 h264bsdCheckPriorPicsFlag(u32 * noOutputOfPriorPicsFlag,
u32 tmp, value, i;
i32 ivalue;
strmData_t tmpStrmData[1];
+ UNUSED(nalUnitType);
/* Code */
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.h
index 198898a..198898a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.c
index 3234754..3234754 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.h
index ba3b2da..ba3b2da 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_storage.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.c
index 20d1083..20d1083 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.h
index 4404b66..4404b66 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_stream.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.c
index 4eb6dd0..4eb6dd0 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.h
index 4f41a23..4f41a23 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_transform.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c
index cc838fd..fb97a28 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c
@@ -186,7 +186,7 @@ u32 h264bsdMoreRbspData(strmData_t *pStrmData)
return(HANTRO_FALSE);
if ( (bits > 8) ||
- ((h264bsdShowBits32(pStrmData)>>(32-bits)) != (1 << (bits-1))) )
+ ((h264bsdShowBits32(pStrmData)>>(32-bits)) != (1ul << (bits-1))) )
return(HANTRO_TRUE);
else
return(HANTRO_FALSE);
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h
index 216ad04..216ad04 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.c
index 060f35e..060f35e 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.h
index 4c16773..4c16773 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vlc.h
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.c
index 4a9335a..4a9335a 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.c
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.h b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.h
index 05d52a4..05d52a4 100755..100644
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.h
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_vui.h
diff --git a/media/libstagefright/codecs/opus/Android.mk b/media/libstagefright/codecs/opus/Android.mk
new file mode 100644
index 0000000..365b179
--- /dev/null
+++ b/media/libstagefright/codecs/opus/Android.mk
@@ -0,0 +1,4 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+include $(call all-makefiles-under,$(LOCAL_PATH)) \ No newline at end of file
diff --git a/media/libstagefright/codecs/opus/dec/Android.mk b/media/libstagefright/codecs/opus/dec/Android.mk
new file mode 100644
index 0000000..2379c5f
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/Android.mk
@@ -0,0 +1,19 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftOpus.cpp
+
+LOCAL_C_INCLUDES := \
+ external/libopus/include \
+ frameworks/av/media/libstagefright/include \
+ frameworks/native/include/media/openmax \
+
+LOCAL_SHARED_LIBRARIES := \
+ libopus libstagefright libstagefright_omx \
+ libstagefright_foundation libutils liblog
+
+LOCAL_MODULE := libstagefright_soft_opusdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY) \ No newline at end of file
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
new file mode 100644
index 0000000..b8084ae
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -0,0 +1,540 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftOpus"
+#include <utils/Log.h>
+
+#include "SoftOpus.h"
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+extern "C" {
+ #include <opus.h>
+ #include <opus_multistream.h>
+}
+
+namespace android {
+
+static const int kRate = 48000;
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftOpus::SoftOpus(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mInputBufferCount(0),
+ mDecoder(NULL),
+ mHeader(NULL),
+ mCodecDelay(0),
+ mSeekPreRoll(0),
+ mAnchorTimeUs(0),
+ mNumFramesOutput(0),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftOpus::~SoftOpus() {
+ if (mDecoder != NULL) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = NULL;
+ }
+ if (mHeader != NULL) {
+ delete mHeader;
+ mHeader = NULL;
+ }
+}
+
+void SoftOpus::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 960 * 6;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_AUDIO_OPUS);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding =
+ (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+status_t SoftOpus::initDecoder() {
+ return OK;
+}
+
+OMX_ERRORTYPE SoftOpus::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch ((int)index) {
+ case OMX_IndexParamAudioAndroidOpus:
+ {
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
+ (OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
+
+ if (opusParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ opusParams->nAudioBandWidth = 0;
+ opusParams->nSampleRate = kRate;
+ opusParams->nBitRate = 0;
+
+ if (!isConfigured()) {
+ opusParams->nChannels = 1;
+ } else {
+ opusParams->nChannels = mHeader->channels;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+ pcmParams->nSamplingRate = kRate;
+
+ if (!isConfigured()) {
+ pcmParams->nChannels = 1;
+ } else {
+ pcmParams->nChannels = mHeader->channels;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftOpus::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch ((int)index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.opus",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioAndroidOpus:
+ {
+ const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
+ (const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
+
+ if (opusParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+bool SoftOpus::isConfigured() const {
+ return mInputBufferCount >= 1;
+}
+
+static uint16_t ReadLE16(const uint8_t *data, size_t data_size,
+ uint32_t read_offset) {
+ if (read_offset + 1 > data_size)
+ return 0;
+ uint16_t val;
+ val = data[read_offset];
+ val |= data[read_offset + 1] << 8;
+ return val;
+}
+
+// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
+// mappings for up to 8 channels. This information is part of the Vorbis I
+// Specification:
+// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
+static const int kMaxChannels = 8;
+
+// Maximum packet size used in Xiph's opusdec.
+static const int kMaxOpusOutputPacketSizeSamples = 960 * 6;
+
+// Default audio output channel layout. Used to initialize |stream_map| in
+// OpusHeader, and passed to opus_multistream_decoder_create() when the header
+// does not contain mapping information. The values are valid only for mono and
+// stereo output: Opus streams with more than 2 channels require a stream map.
+static const int kMaxChannelsWithDefaultLayout = 2;
+static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 };
+
+// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header
+static bool ParseOpusHeader(const uint8_t *data, size_t data_size,
+ OpusHeader* header) {
+ // Size of the Opus header excluding optional mapping information.
+ const size_t kOpusHeaderSize = 19;
+
+ // Offset to the channel count byte in the Opus header.
+ const size_t kOpusHeaderChannelsOffset = 9;
+
+ // Offset to the pre-skip value in the Opus header.
+ const size_t kOpusHeaderSkipSamplesOffset = 10;
+
+ // Offset to the gain value in the Opus header.
+ const size_t kOpusHeaderGainOffset = 16;
+
+ // Offset to the channel mapping byte in the Opus header.
+ const size_t kOpusHeaderChannelMappingOffset = 18;
+
+ // Opus Header contains a stream map. The mapping values are in the header
+ // beyond the always present |kOpusHeaderSize| bytes of data. The mapping
+ // data contains stream count, coupling information, and per channel mapping
+ // values:
+ // - Byte 0: Number of streams.
+ // - Byte 1: Number coupled.
+ // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping
+ // values.
+ const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize;
+ const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1;
+ const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2;
+
+ if (data_size < kOpusHeaderSize) {
+ ALOGV("Header size is too small.");
+ return false;
+ }
+ header->channels = *(data + kOpusHeaderChannelsOffset);
+
+ if (header->channels <= 0 || header->channels > kMaxChannels) {
+ ALOGV("Invalid Header, wrong channel count: %d", header->channels);
+ return false;
+ }
+ header->skip_samples = ReadLE16(data, data_size,
+ kOpusHeaderSkipSamplesOffset);
+ header->gain_db = static_cast<int16_t>(
+ ReadLE16(data, data_size,
+ kOpusHeaderGainOffset));
+ header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset);
+ if (!header->channel_mapping) {
+ if (header->channels > kMaxChannelsWithDefaultLayout) {
+ ALOGV("Invalid Header, missing stream map.");
+ return false;
+ }
+ header->num_streams = 1;
+ header->num_coupled = header->channels > 1;
+ header->stream_map[0] = 0;
+ header->stream_map[1] = 1;
+ return true;
+ }
+ if (data_size < kOpusHeaderStreamMapOffset + header->channels) {
+ ALOGV("Invalid stream map; insufficient data for current channel "
+ "count: %d", header->channels);
+ return false;
+ }
+ header->num_streams = *(data + kOpusHeaderNumStreamsOffset);
+ header->num_coupled = *(data + kOpusHeaderNumCoupledOffset);
+ if (header->num_streams + header->num_coupled != header->channels) {
+ ALOGV("Inconsistent channel mapping.");
+ return false;
+ }
+ for (int i = 0; i < header->channels; ++i)
+ header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i);
+ return true;
+}
+
+// Convert nanoseconds to number of samples.
+static uint64_t ns_to_samples(uint64_t ns, int kRate) {
+ return static_cast<double>(ns) * kRate / 1000000000;
+}
+
+void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ if (portIndex == 0 && mInputBufferCount < 3) {
+ BufferInfo *info = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *header = info->mHeader;
+
+ const uint8_t *data = header->pBuffer + header->nOffset;
+ size_t size = header->nFilledLen;
+
+ if (mInputBufferCount == 0) {
+ CHECK(mHeader == NULL);
+ mHeader = new OpusHeader();
+ memset(mHeader, 0, sizeof(*mHeader));
+ if (!ParseOpusHeader(data, size, mHeader)) {
+ ALOGV("Parsing Opus Header failed.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ uint8_t channel_mapping[kMaxChannels] = {0};
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+
+ int status = OPUS_INVALID_STATE;
+ mDecoder = opus_multistream_decoder_create(kRate,
+ mHeader->channels,
+ mHeader->num_streams,
+ mHeader->num_coupled,
+ channel_mapping,
+ &status);
+ if (!mDecoder || status != OPUS_OK) {
+ ALOGV("opus_multistream_decoder_create failed status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ status =
+ opus_multistream_decoder_ctl(mDecoder,
+ OPUS_SET_GAIN(mHeader->gain_db));
+ if (status != OPUS_OK) {
+ ALOGV("Failed to set OPUS header gain; status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ } else if (mInputBufferCount == 1) {
+ mCodecDelay = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(header->pBuffer +
+ header->nOffset)),
+ kRate);
+ mSamplesToDiscard = mCodecDelay;
+ } else {
+ mSeekPreRoll = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(header->pBuffer +
+ header->nOffset)),
+ kRate);
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+
+ inQueue.erase(inQueue.begin());
+ info->mOwnedByUs = false;
+ notifyEmptyBufferDone(header);
+ ++mInputBufferCount;
+ return;
+ }
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
+ }
+
+ // When seeking to zero, |mCodecDelay| samples has to be discarded
+ // instead of |mSeekPreRoll| samples (as we would when seeking to any
+ // other timestamp).
+ if (inHeader->nTimeStamp == 0) {
+ mSamplesToDiscard = mCodecDelay;
+ }
+
+ const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+ const uint32_t size = inHeader->nFilledLen;
+
+ int numFrames = opus_multistream_decode(mDecoder,
+ data,
+ size,
+ (int16_t *)outHeader->pBuffer,
+ kMaxOpusOutputPacketSizeSamples,
+ 0);
+ if (numFrames < 0) {
+ ALOGE("opus_multistream_decode returned %d", numFrames);
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ outHeader->nOffset = 0;
+ if (mSamplesToDiscard > 0) {
+ if (mSamplesToDiscard > numFrames) {
+ mSamplesToDiscard -= numFrames;
+ numFrames = 0;
+ } else {
+ numFrames -= mSamplesToDiscard;
+ outHeader->nOffset = mSamplesToDiscard * sizeof(int16_t) *
+ mHeader->channels;
+ mSamplesToDiscard = 0;
+ }
+ }
+
+ outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels;
+ outHeader->nFlags = 0;
+
+ outHeader->nTimeStamp = mAnchorTimeUs +
+ (mNumFramesOutput * 1000000ll) /
+ kRate;
+
+ mNumFramesOutput += numFrames;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mInputBufferCount;
+ }
+}
+
+void SoftOpus::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0 && mDecoder != NULL) {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+ mNumFramesOutput = 0;
+ opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
+ mAnchorTimeUs = 0;
+ mSamplesToDiscard = mSeekPreRoll;
+ }
+}
+
+void SoftOpus::onReset() {
+ mInputBufferCount = 0;
+ mNumFramesOutput = 0;
+ if (mDecoder != NULL) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = NULL;
+ }
+ if (mHeader != NULL) {
+ delete mHeader;
+ mHeader = NULL;
+ }
+
+ mOutputPortSettingsChange = NONE;
+}
+
+void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftOpus(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
new file mode 100644
index 0000000..97f6561
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * The Opus specification is part of IETF RFC 6716:
+ * http://tools.ietf.org/html/rfc6716
+ */
+
+#ifndef SOFT_OPUS_H_
+
+#define SOFT_OPUS_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct OpusMSDecoder;
+
+namespace android {
+
+struct OpusHeader {
+ int channels;
+ int skip_samples;
+ int channel_mapping;
+ int num_streams;
+ int num_coupled;
+ int16_t gain_db;
+ uint8_t stream_map[8];
+};
+
+struct SoftOpus : public SimpleSoftOMXComponent {
+ SoftOpus(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftOpus();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kMaxNumSamplesPerBuffer = 960 * 6
+ };
+
+ size_t mInputBufferCount;
+
+ OpusMSDecoder *mDecoder;
+ OpusHeader *mHeader;
+
+ int64_t mCodecDelay;
+ int64_t mSeekPreRoll;
+ int64_t mSamplesToDiscard;
+ int64_t mAnchorTimeUs;
+ int64_t mNumFramesOutput;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftOpus);
+};
+
+} // namespace android
+
+#endif // SOFT_OPUS_H_
diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk
index fe90a03..87080e7 100644
--- a/media/libstagefright/codecs/raw/Android.mk
+++ b/media/libstagefright/codecs/raw/Android.mk
@@ -8,6 +8,8 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/raw/SoftRaw.cpp b/media/libstagefright/codecs/raw/SoftRaw.cpp
index 19d6f13..9d514a6 100644
--- a/media/libstagefright/codecs/raw/SoftRaw.cpp
+++ b/media/libstagefright/codecs/raw/SoftRaw.cpp
@@ -163,7 +163,7 @@ OMX_ERRORTYPE SoftRaw::internalSetParameter(
}
}
-void SoftRaw::onQueueFilled(OMX_U32 portIndex) {
+void SoftRaw::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError) {
return;
}
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk
index 2232353..217a6d2 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.mk
+++ b/media/libstagefright/codecs/vorbis/dec/Android.mk
@@ -16,4 +16,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_vorbisdec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 51bb958..8f356b6 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -54,6 +54,8 @@ SoftVorbis::SoftVorbis(
mAnchorTimeUs(0),
mNumFramesOutput(0),
mNumFramesLeftOnPage(-1),
+ mSawInputEos(false),
+ mSignalledOutputEos(false),
mOutputPortSettingsChange(NONE) {
initPorts();
CHECK_EQ(initDecoder(), (status_t)OK);
@@ -290,48 +292,47 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
return;
}
- while (!inQueue.empty() && !outQueue.empty()) {
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
+ BufferInfo *inInfo = NULL;
+ OMX_BUFFERHEADERTYPE *inHeader = NULL;
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ }
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
+ int32_t numPageSamples = 0;
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ if (inHeader) {
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ mSawInputEos = true;
+ }
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
- return;
- }
+ if (inHeader->nFilledLen || !mSawInputEos) {
+ CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples));
+ memcpy(&numPageSamples,
+ inHeader->pBuffer
+ + inHeader->nOffset + inHeader->nFilledLen - 4,
+ sizeof(numPageSamples));
- int32_t numPageSamples;
- CHECK_GE(inHeader->nFilledLen, sizeof(numPageSamples));
- memcpy(&numPageSamples,
- inHeader->pBuffer
- + inHeader->nOffset + inHeader->nFilledLen - 4,
- sizeof(numPageSamples));
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
+ }
- if (numPageSamples >= 0) {
- mNumFramesLeftOnPage = numPageSamples;
+ inHeader->nFilledLen -= sizeof(numPageSamples);;
+ }
}
- if (inHeader->nOffset == 0) {
- mAnchorTimeUs = inHeader->nTimeStamp;
- mNumFramesOutput = 0;
+ if (numPageSamples >= 0) {
+ mNumFramesLeftOnPage = numPageSamples;
}
- inHeader->nFilledLen -= sizeof(numPageSamples);;
-
ogg_buffer buf;
- buf.data = inHeader->pBuffer + inHeader->nOffset;
- buf.size = inHeader->nFilledLen;
+ buf.data = inHeader ? inHeader->pBuffer + inHeader->nOffset : NULL;
+ buf.size = inHeader ? inHeader->nFilledLen : 0;
buf.refcount = 1;
buf.ptr.owner = NULL;
@@ -351,9 +352,15 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
int numFrames = 0;
+ outHeader->nFlags = 0;
int err = vorbis_dsp_synthesis(mState, &pack, 1);
if (err != 0) {
+ // FIXME temporary workaround for log spam
+#if !defined(__arm__) && !defined(__aarch64__)
+ ALOGV("vorbis_dsp_synthesis returned %d", err);
+#else
ALOGW("vorbis_dsp_synthesis returned %d", err);
+#endif
} else {
numFrames = vorbis_dsp_pcmout(
mState, (int16_t *)outHeader->pBuffer,
@@ -370,13 +377,16 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
ALOGV("discarding %d frames at end of page",
numFrames - mNumFramesLeftOnPage);
numFrames = mNumFramesLeftOnPage;
+ if (mSawInputEos) {
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mSignalledOutputEos = true;
+ }
}
mNumFramesLeftOnPage -= numFrames;
}
outHeader->nFilledLen = numFrames * sizeof(int16_t) * mVi->channels;
outHeader->nOffset = 0;
- outHeader->nFlags = 0;
outHeader->nTimeStamp =
mAnchorTimeUs
@@ -384,11 +394,13 @@ void SoftVorbis::onQueueFilled(OMX_U32 portIndex) {
mNumFramesOutput += numFrames;
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
+ if (inHeader) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
@@ -425,6 +437,8 @@ void SoftVorbis::onReset() {
mVi = NULL;
}
+ mSawInputEos = false;
+ mSignalledOutputEos = false;
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index cb628a0..1d00816 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -59,6 +59,8 @@ private:
int64_t mAnchorTimeUs;
int64_t mNumFramesOutput;
int32_t mNumFramesLeftOnPage;
+ bool mSawInputEos;
+ bool mSignalledOutputEos;
enum {
NONE,
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 77f21b7..4e75250 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -21,7 +21,7 @@
#include <cutils/properties.h> // for property_get
#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/AMessage.h>
#include <system/window.h>
#include <ui/GraphicBufferMapper.h>
#include <gui/IGraphicBufferProducer.h>
@@ -33,40 +33,78 @@ static bool runningInEmulator() {
return (property_get("ro.kernel.qemu", prop, NULL) > 0);
}
-SoftwareRenderer::SoftwareRenderer(
- const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
- : mConverter(NULL),
+static int ALIGN(int x, int y) {
+ // y must be a power of 2.
+ return (x + y - 1) & ~(y - 1);
+}
+
+SoftwareRenderer::SoftwareRenderer(const sp<ANativeWindow> &nativeWindow)
+ : mColorFormat(OMX_COLOR_FormatUnused),
+ mConverter(NULL),
mYUVMode(None),
- mNativeWindow(nativeWindow) {
- int32_t tmp;
- CHECK(meta->findInt32(kKeyColorFormat, &tmp));
- mColorFormat = (OMX_COLOR_FORMATTYPE)tmp;
-
- CHECK(meta->findInt32(kKeyWidth, &mWidth));
- CHECK(meta->findInt32(kKeyHeight, &mHeight));
-
- if (!meta->findRect(
- kKeyCropRect,
- &mCropLeft, &mCropTop, &mCropRight, &mCropBottom)) {
- mCropLeft = mCropTop = 0;
- mCropRight = mWidth - 1;
- mCropBottom = mHeight - 1;
+ mNativeWindow(nativeWindow),
+ mWidth(0),
+ mHeight(0),
+ mCropLeft(0),
+ mCropTop(0),
+ mCropRight(0),
+ mCropBottom(0),
+ mCropWidth(0),
+ mCropHeight(0) {
+}
+
+SoftwareRenderer::~SoftwareRenderer() {
+ delete mConverter;
+ mConverter = NULL;
+}
+
+void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) {
+ CHECK(format != NULL);
+
+ int32_t colorFormatNew;
+ CHECK(format->findInt32("color-format", &colorFormatNew));
+
+ int32_t widthNew, heightNew;
+ CHECK(format->findInt32("stride", &widthNew));
+ CHECK(format->findInt32("slice-height", &heightNew));
+
+ int32_t cropLeftNew, cropTopNew, cropRightNew, cropBottomNew;
+ if (!format->findRect(
+ "crop", &cropLeftNew, &cropTopNew, &cropRightNew, &cropBottomNew)) {
+ cropLeftNew = cropTopNew = 0;
+ cropRightNew = widthNew - 1;
+ cropBottomNew = heightNew - 1;
+ }
+
+ if (static_cast<int32_t>(mColorFormat) == colorFormatNew &&
+ mWidth == widthNew &&
+ mHeight == heightNew &&
+ mCropLeft == cropLeftNew &&
+ mCropTop == cropTopNew &&
+ mCropRight == cropRightNew &&
+ mCropBottom == cropBottomNew) {
+ // Nothing changed, no need to reset renderer.
+ return;
}
+ mColorFormat = static_cast<OMX_COLOR_FORMATTYPE>(colorFormatNew);
+ mWidth = widthNew;
+ mHeight = heightNew;
+ mCropLeft = cropLeftNew;
+ mCropTop = cropTopNew;
+ mCropRight = cropRightNew;
+ mCropBottom = cropBottomNew;
+
mCropWidth = mCropRight - mCropLeft + 1;
mCropHeight = mCropBottom - mCropTop + 1;
- int32_t rotationDegrees;
- if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
-
int halFormat;
size_t bufWidth, bufHeight;
switch (mColorFormat) {
case OMX_COLOR_FormatYUV420Planar:
case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
{
if (!runningInEmulator()) {
halFormat = HAL_PIXEL_FORMAT_YV12;
@@ -106,12 +144,29 @@ SoftwareRenderer::SoftwareRenderer(
NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW));
// Width must be multiple of 32???
- CHECK_EQ(0, native_window_set_buffers_geometry(
+ CHECK_EQ(0, native_window_set_buffers_dimensions(
mNativeWindow.get(),
bufWidth,
- bufHeight,
+ bufHeight));
+ CHECK_EQ(0, native_window_set_buffers_format(
+ mNativeWindow.get(),
halFormat));
+ // NOTE: native window uses extended right-bottom coordinate
+ android_native_rect_t crop;
+ crop.left = mCropLeft;
+ crop.top = mCropTop;
+ crop.right = mCropRight + 1;
+ crop.bottom = mCropBottom + 1;
+ ALOGV("setting crop: [%d, %d, %d, %d] for size [%zu, %zu]",
+ crop.left, crop.top, crop.right, crop.bottom, bufWidth, bufHeight);
+
+ CHECK_EQ(0, native_window_set_crop(mNativeWindow.get(), &crop));
+
+ int32_t rotationDegrees;
+ if (!format->findInt32("rotation-degrees", &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
uint32_t transform;
switch (rotationDegrees) {
case 0: transform = 0; break;
@@ -121,24 +176,15 @@ SoftwareRenderer::SoftwareRenderer(
default: transform = 0; break;
}
- if (transform) {
- CHECK_EQ(0, native_window_set_buffers_transform(
- mNativeWindow.get(), transform));
- }
-}
-
-SoftwareRenderer::~SoftwareRenderer() {
- delete mConverter;
- mConverter = NULL;
-}
-
-static int ALIGN(int x, int y) {
- // y must be a power of 2.
- return (x + y - 1) & ~(y - 1);
+ CHECK_EQ(0, native_window_set_buffers_transform(
+ mNativeWindow.get(), transform));
}
void SoftwareRenderer::render(
- const void *data, size_t size, void *platformPrivate) {
+ const void *data, size_t /*size*/, int64_t timestampNs,
+ void* /*platformPrivate*/, const sp<AMessage>& format) {
+ resetFormatIfChanged(format);
+
ANativeWindowBuffer *buf;
int err;
if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(),
@@ -191,9 +237,8 @@ void SoftwareRenderer::render(
dst_u += dst_c_stride;
dst_v += dst_c_stride;
}
- } else {
- CHECK_EQ(mColorFormat, OMX_TI_COLOR_FormatYUV420PackedSemiPlanar);
-
+ } else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
+ || mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
const uint8_t *src_y =
(const uint8_t *)data;
@@ -226,10 +271,17 @@ void SoftwareRenderer::render(
dst_u += dst_c_stride;
dst_v += dst_c_stride;
}
+ } else {
+ LOG_ALWAYS_FATAL("bad color format %#x", mColorFormat);
}
CHECK_EQ(0, mapper.unlock(buf->handle));
+ if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(),
+ timestampNs)) != 0) {
+ ALOGW("Surface::set_buffers_timestamp returned error %d", err);
+ }
+
if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf,
-1)) != 0) {
ALOGW("Surface::queueBuffer returned error %d", err);
diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml
new file mode 100644
index 0000000..b957b0c
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_audio.xml
@@ -0,0 +1,92 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.mp3.decoder" type="audio/mpeg">
+ <Limit name="channel-count" max="2" />
+ <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+ <Limit name="bitrate" range="8000-320000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.amrnb.decoder" type="audio/3gpp">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="8000" />
+ <Limit name="bitrate" range="4750-12200" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.amrwb.decoder" type="audio/amr-wb">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="16000" />
+ <Limit name="bitrate" range="6600-23850" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.aac.decoder" type="audio/mp4a-latm">
+ <Limit name="channel-count" max="8" />
+ <Limit name="sample-rate" ranges="7350,8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+ <Limit name="bitrate" range="8000-960000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.g711.alaw.decoder" type="audio/g711-alaw">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="8000-48000" />
+ <Limit name="bitrate" range="64000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.g711.mlaw.decoder" type="audio/g711-mlaw">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="8000-48000" />
+ <Limit name="bitrate" range="64000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vorbis.decoder" type="audio/vorbis">
+ <Limit name="channel-count" max="8" />
+ <Limit name="sample-rate" ranges="8000-96000" />
+ <Limit name="bitrate" range="32000-500000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.opus.decoder" type="audio/opus">
+ <Limit name="channel-count" max="8" />
+ <Limit name="sample-rate" ranges="48000" />
+ <Limit name="bitrate" range="6000-510000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.raw.decoder" type="audio/raw">
+ <Limit name="channel-count" max="8" />
+ <Limit name="sample-rate" ranges="8000-96000" />
+ <Limit name="bitrate" range="1-10000000" />
+ </MediaCodec>
+ </Decoders>
+ <Encoders>
+ <MediaCodec name="OMX.google.aac.encoder" type="audio/mp4a-latm">
+ <Limit name="channel-count" max="6" />
+ <Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
+ <!-- also may support 64000, 88200 and 96000 Hz -->
+ <Limit name="bitrate" range="8000-960000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.amrnb.encoder" type="audio/3gpp">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="8000" />
+ <Limit name="bitrate" range="4750-12200" />
+ <Feature name="bitrate-modes" value="CBR" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.amrwb.encoder" type="audio/amr-wb">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="16000" />
+ <Limit name="bitrate" range="6600-23850" />
+ <Feature name="bitrate-modes" value="CBR" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.flac.encoder" type="audio/flac">
+ <Limit name="channel-count" max="2" />
+ <Limit name="sample-rate" ranges="1-655350" />
+ <Limit name="bitrate" range="1-21000000" />
+ <Limit name="complexity" range="0-8" default="5" />
+ <Feature name="bitrate-modes" value="CQ" />
+ </MediaCodec>
+ </Encoders>
+</Included>
diff --git a/media/libstagefright/data/media_codecs_google_telephony.xml b/media/libstagefright/data/media_codecs_google_telephony.xml
new file mode 100644
index 0000000..5ad90d9
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_telephony.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.gsm.decoder" type="audio/gsm">
+ <Limit name="channel-count" max="1" />
+ <Limit name="sample-rate" ranges="8000" />
+ <Limit name="bitrate" range="13000" />
+ </MediaCodec>
+ </Decoders>
+</Included>
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
new file mode 100755
index 0000000..740f96b
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.mpeg2.decoder" type="video/mpeg2">
+ <!-- profiles and levels: ProfileMain : LevelHL -->
+ <Limit name="size" min="16x16" max="1920x1088" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-244800" />
+ <Limit name="bitrate" range="1-20000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es">
+ <!-- profiles and levels: ProfileSimple : Level3 -->
+ <Limit name="size" min="2x2" max="352x288" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="12-11880" />
+ <Limit name="bitrate" range="1-384000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.h263.decoder" type="video/3gpp">
+ <!-- profiles and levels: ProfileBaseline : Level30, ProfileBaseline : Level45
+ ProfileISWV2 : Level30, ProfileISWV2 : Level45 -->
+ <Limit name="size" min="2x2" max="352x288" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="bitrate" range="1-384000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.h264.decoder" type="video/avc">
+ <!-- profiles and levels: ProfileHigh : Level41 -->
+ <Limit name="size" min="16x16" max="1920x1088" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-244800" />
+ <Limit name="bitrate" range="1-12000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
+ <!-- profiles and levels: ProfileMain : MainTierLevel51 -->
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="8x8" />
+ <Limit name="block-count" range="1-139264" />
+ <Limit name="blocks-per-second" range="1-2000000" />
+ <Limit name="bitrate" range="1-10000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8">
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-1000000" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9">
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-500000" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ </Decoders>
+
+ <Encoders>
+ <MediaCodec name="OMX.google.h263.encoder" type="video/3gpp">
+ <!-- profiles and levels: ProfileBaseline : Level45 -->
+ <Limit name="size" min="176x144" max="176x144" />
+ <Limit name="alignment" value="16x16" />
+ <Limit name="bitrate" range="1-128000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.h264.encoder" type="video/avc">
+ <!-- profiles and levels: ProfileBaseline : Level41 -->
+ <Limit name="size" min="16x16" max="1920x1088" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-244800" />
+ <Limit name="bitrate" range="1-12000000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es">
+ <!-- profiles and levels: ProfileCore : Level2 -->
+ <Limit name="size" min="16x16" max="176x144" />
+ <Limit name="alignment" value="16x16" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="12-1485" />
+ <Limit name="bitrate" range="1-64000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8">
+ <!-- profiles and levels: ProfileMain : Level_Version0-3 -->
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ </MediaCodec>
+ </Encoders>
+</Included>
diff --git a/media/libstagefright/data/media_codecs_google_video_le.xml b/media/libstagefright/data/media_codecs_google_video_le.xml
new file mode 100644
index 0000000..034a038
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_video_le.xml
@@ -0,0 +1,108 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es">
+ <!-- profiles and levels: ProfileSimple : Level3 -->
+ <Limit name="size" min="2x2" max="352x288" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="12-11880" />
+ <Limit name="bitrate" range="1-384000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.h263.decoder" type="video/3gpp">
+ <!-- profiles and levels: ProfileBaseline : Level30, ProfileBaseline : Level45
+ ProfileISWV2 : Level30, ProfileISWV2 : Level45 -->
+ <Limit name="size" min="2x2" max="352x288" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="bitrate" range="1-384000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.h264.decoder" type="video/avc">
+ <!-- profiles and levels: ProfileBaseline : Level51 -->
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-8160" />
+ <Limit name="blocks-per-second" range="1-489600" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.hevc.decoder" type="video/hevc">
+ <!-- profiles and levels: ProfileMain : MainTierLevel51 -->
+ <Limit name="size" min="2x2" max="1280x1280" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="8x8" />
+ <Limit name="block-count" range="1-139264" />
+ <Limit name="blocks-per-second" range="1-432000" />
+ <Limit name="bitrate" range="1-5000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8">
+ <Limit name="size" min="2x2" max="2048x2048" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-8160" />
+ <Limit name="blocks-per-second" range="1-500000" />
+ <Limit name="bitrate" range="1-40000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9">
+ <Limit name="size" min="2x2" max="1280x1280" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="block-count" range="1-3600" />
+ <Limit name="blocks-per-second" range="1-108000" />
+ <Limit name="bitrate" range="1-5000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ </Decoders>
+
+ <Encoders>
+ <MediaCodec name="OMX.google.h263.encoder" type="video/3gpp">
+ <!-- profiles and levels: ProfileBaseline : Level45 -->
+ <Limit name="size" min="176x144" max="176x144" />
+ <Limit name="alignment" value="16x16" />
+ <Limit name="bitrate" range="1-128000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.h264.encoder" type="video/avc">
+ <!-- profiles and levels: ProfileBaseline : Level2 -->
+ <Limit name="size" min="16x16" max="896x896" />
+ <Limit name="alignment" value="16x16" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-11880" />
+ <Limit name="bitrate" range="1-2000000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es">
+ <!-- profiles and levels: ProfileCore : Level2 -->
+ <Limit name="size" min="16x16" max="176x144" />
+ <Limit name="alignment" value="16x16" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="12-1485" />
+ <Limit name="bitrate" range="1-64000" />
+ </MediaCodec>
+ <MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8">
+ <!-- profiles and levels: ProfileMain : Level_Version0-3 -->
+ <Limit name="size" min="2x2" max="1280x1280" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-count" range="1-3600" />
+ <Limit name="bitrate" range="1-20000000" />
+ <Feature name="bitrate-modes" value="VBR,CBR" />
+ </MediaCodec>
+ </Encoders>
+</Included>
diff --git a/media/libstagefright/foundation/ABitReader.cpp b/media/libstagefright/foundation/ABitReader.cpp
index 5499c32..beb5cc0 100644
--- a/media/libstagefright/foundation/ABitReader.cpp
+++ b/media/libstagefright/foundation/ABitReader.cpp
@@ -27,6 +27,9 @@ ABitReader::ABitReader(const uint8_t *data, size_t size)
mNumBitsLeft(0) {
}
+ABitReader::~ABitReader() {
+}
+
void ABitReader::fillReservoir() {
CHECK_GT(mSize, 0u);
@@ -99,4 +102,69 @@ const uint8_t *ABitReader::data() const {
return mData - (mNumBitsLeft + 7) / 8;
}
+NALBitReader::NALBitReader(const uint8_t *data, size_t size)
+ : ABitReader(data, size),
+ mNumZeros(0) {
+}
+
+bool NALBitReader::atLeastNumBitsLeft(size_t n) const {
+ // check against raw size and reservoir bits first
+ size_t numBits = numBitsLeft();
+ if (n > numBits) {
+ return false;
+ }
+
+ ssize_t numBitsRemaining = n - mNumBitsLeft;
+
+ size_t size = mSize;
+ const uint8_t *data = mData;
+ int32_t numZeros = mNumZeros;
+ while (size > 0 && numBitsRemaining > 0) {
+ bool isEmulationPreventionByte = (numZeros >= 2 && *data == 3);
+
+ if (*data == 0) {
+ ++numZeros;
+ } else {
+ numZeros = 0;
+ }
+
+ if (!isEmulationPreventionByte) {
+ numBitsRemaining -= 8;
+ }
+
+ ++data;
+ --size;
+ }
+
+ return (numBitsRemaining <= 0);
+}
+
+void NALBitReader::fillReservoir() {
+ CHECK_GT(mSize, 0u);
+
+ mReservoir = 0;
+ size_t i = 0;
+ while (mSize > 0 && i < 4) {
+ bool isEmulationPreventionByte = (mNumZeros >= 2 && *mData == 3);
+
+ if (*mData == 0) {
+ ++mNumZeros;
+ } else {
+ mNumZeros = 0;
+ }
+
+ // skip emulation_prevention_three_byte
+ if (!isEmulationPreventionByte) {
+ mReservoir = (mReservoir << 8) | *mData;
+ ++i;
+ }
+
+ ++mData;
+ --mSize;
+ }
+
+ mNumBitsLeft = 8 * i;
+ mReservoir <<= 32 - mNumBitsLeft;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/ABuffer.cpp b/media/libstagefright/foundation/ABuffer.cpp
index 6173db4..b214870 100644
--- a/media/libstagefright/foundation/ABuffer.cpp
+++ b/media/libstagefright/foundation/ABuffer.cpp
@@ -19,11 +19,13 @@
#include "ADebug.h"
#include "ALooper.h"
#include "AMessage.h"
+#include "MediaBufferBase.h"
namespace android {
ABuffer::ABuffer(size_t capacity)
- : mData(malloc(capacity)),
+ : mMediaBufferBase(NULL),
+ mData(malloc(capacity)),
mCapacity(capacity),
mRangeOffset(0),
mRangeLength(capacity),
@@ -32,7 +34,8 @@ ABuffer::ABuffer(size_t capacity)
}
ABuffer::ABuffer(void *data, size_t capacity)
- : mData(data),
+ : mMediaBufferBase(NULL),
+ mData(data),
mCapacity(capacity),
mRangeOffset(0),
mRangeLength(capacity),
@@ -40,6 +43,14 @@ ABuffer::ABuffer(void *data, size_t capacity)
mOwnsData(false) {
}
+// static
+sp<ABuffer> ABuffer::CreateAsCopy(const void *data, size_t capacity)
+{
+ sp<ABuffer> res = new ABuffer(capacity);
+ memcpy(res->data(), data, capacity);
+ return res;
+}
+
ABuffer::~ABuffer() {
if (mOwnsData) {
if (mData != NULL) {
@@ -51,6 +62,8 @@ ABuffer::~ABuffer() {
if (mFarewell != NULL) {
mFarewell->post();
}
+
+ setMediaBufferBase(NULL);
}
void ABuffer::setRange(size_t offset, size_t size) {
@@ -72,5 +85,19 @@ sp<AMessage> ABuffer::meta() {
return mMeta;
}
+MediaBufferBase *ABuffer::getMediaBufferBase() {
+ if (mMediaBufferBase != NULL) {
+ mMediaBufferBase->add_ref();
+ }
+ return mMediaBufferBase;
+}
+
+void ABuffer::setMediaBufferBase(MediaBufferBase *mediaBuffer) {
+ if (mMediaBufferBase != NULL) {
+ mMediaBufferBase->release();
+ }
+ mMediaBufferBase = mediaBuffer;
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/libstagefright/foundation/ADebug.cpp
new file mode 100644
index 0000000..ec4a960
--- /dev/null
+++ b/media/libstagefright/foundation/ADebug.cpp
@@ -0,0 +1,117 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <errno.h>
+#include <stdlib.h>
+#include <ctype.h>
+
+#define LOG_TAG "ADebug"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include <cutils/properties.h>
+
+#include <ADebug.h>
+#include <AStringUtils.h>
+#include <AUtils.h>
+
+namespace android {
+
+//static
+ADebug::Level ADebug::GetDebugLevelFromString(
+ const char *name, const char *value, ADebug::Level def) {
+ // split on ,
+ const char *next = value, *current;
+ const unsigned long maxLevel = (unsigned long)kDebugMax;
+ while (next != NULL) {
+ current = next;
+ next = strchr(current, ',');
+ if (next != NULL) {
+ ++next; // pass ,
+ }
+
+ while (isspace(*current)) {
+ ++current;
+ }
+ // check for :
+ char *colon = strchr(current, ':');
+
+ // get level
+ char *end;
+ errno = 0; // strtoul does not clear errno, but it can be set for any return value
+ unsigned long level = strtoul(current, &end, 10);
+ while (isspace(*end)) {
+ ++end;
+ }
+ if (errno != 0 || end == current || (end != colon && *end != '\0' && end != next)) {
+ // invalid level - skip
+ continue;
+ }
+ if (colon != NULL) {
+ // check if pattern matches
+ do { // skip colon and spaces
+ ++colon;
+ } while (isspace(*colon));
+ size_t globLen = (next == NULL ? strlen(colon) : (next - 1 - colon));
+ while (globLen > 0 && isspace(colon[globLen - 1])) {
+ --globLen; // trim glob
+ }
+
+ if (!AStringUtils::MatchesGlob(
+ colon, globLen, name, strlen(name), true /* ignoreCase */)) {
+ continue;
+ }
+ }
+
+ // update debug level
+ def = (Level)min(level, maxLevel);
+ }
+ return def;
+}
+
+//static
+ADebug::Level ADebug::GetDebugLevelFromProperty(
+ const char *name, const char *propertyName, ADebug::Level def) {
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get(propertyName, value, NULL)) {
+ return GetDebugLevelFromString(name, value, def);
+ }
+ return def;
+}
+
+//static
+char *ADebug::GetDebugName(const char *name) {
+ char *debugName = strdup(name);
+ const char *terms[] = { "omx", "video", "audio" };
+ for (size_t i = 0; i < NELEM(terms) && debugName != NULL; i++) {
+ const char *term = terms[i];
+ const size_t len = strlen(term);
+ char *match = strcasestr(debugName, term);
+ if (match != NULL && (match == debugName || match[-1] == '.'
+ || match[len] == '.' || match[len] == '\0')) {
+ char *src = match + len;
+ if (match == debugName || match[-1] == '.') {
+ src += (*src == '.'); // remove trailing or double .
+ }
+ memmove(match, src, debugName + strlen(debugName) - src + 1);
+ }
+ }
+
+ return debugName;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
index f7a00d8..5f7c70d 100644
--- a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
+++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp
@@ -51,7 +51,7 @@ AHierarchicalStateMachine::AHierarchicalStateMachine() {
AHierarchicalStateMachine::~AHierarchicalStateMachine() {
}
-void AHierarchicalStateMachine::onMessageReceived(const sp<AMessage> &msg) {
+void AHierarchicalStateMachine::handleMessage(const sp<AMessage> &msg) {
sp<AState> save = mState;
sp<AState> cur = mState;
diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp
index ebf9d8d..88b1c92 100644
--- a/media/libstagefright/foundation/ALooper.cpp
+++ b/media/libstagefright/foundation/ALooper.cpp
@@ -68,14 +68,14 @@ int64_t ALooper::GetNowUs() {
ALooper::ALooper()
: mRunningLocally(false) {
+ // clean up stale AHandlers. Doing it here instead of in the destructor avoids
+ // the side effect of objects being deleted from the unregister function recursively.
+ gLooperRoster.unregisterStaleHandlers();
}
ALooper::~ALooper() {
stop();
-
- // Since this looper is "dead" (or as good as dead by now),
- // have ALooperRoster unregister any handlers still registered for it.
- gLooperRoster.unregisterStaleHandlers();
+ // stale AHandlers are now cleaned up in the constructor of the next ALooper to come along
}
void ALooper::setName(const char *name) {
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index 0c181ff..2d57aee 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ALooperRoster"
#include <utils/Log.h>
+#include <utils/String8.h>
#include "ALooperRoster.h"
@@ -26,6 +27,8 @@
namespace android {
+static bool verboseStats = false;
+
ALooperRoster::ALooperRoster()
: mNextHandlerID(1),
mNextReplyID(1) {
@@ -72,50 +75,40 @@ void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) {
}
void ALooperRoster::unregisterStaleHandlers() {
- Mutex::Autolock autoLock(mLock);
- for (size_t i = mHandlers.size(); i-- > 0;) {
- const HandlerInfo &info = mHandlers.valueAt(i);
+ Vector<sp<ALooper> > activeLoopers;
+ {
+ Mutex::Autolock autoLock(mLock);
- sp<ALooper> looper = info.mLooper.promote();
- if (looper == NULL) {
- ALOGV("Unregistering stale handler %d", mHandlers.keyAt(i));
- mHandlers.removeItemsAt(i);
+ for (size_t i = mHandlers.size(); i-- > 0;) {
+ const HandlerInfo &info = mHandlers.valueAt(i);
+
+ sp<ALooper> looper = info.mLooper.promote();
+ if (looper == NULL) {
+ ALOGV("Unregistering stale handler %d", mHandlers.keyAt(i));
+ mHandlers.removeItemsAt(i);
+ } else {
+ // At this point 'looper' might be the only sp<> keeping
+ // the object alive. To prevent it from going out of scope
+ // and having ~ALooper call this method again recursively
+ // and then deadlocking because of the Autolock above, add
+ // it to a Vector which will go out of scope after the lock
+ // has been released.
+ activeLoopers.add(looper);
+ }
}
}
}
status_t ALooperRoster::postMessage(
const sp<AMessage> &msg, int64_t delayUs) {
- Mutex::Autolock autoLock(mLock);
- return postMessage_l(msg, delayUs);
-}
-
-status_t ALooperRoster::postMessage_l(
- const sp<AMessage> &msg, int64_t delayUs) {
- ssize_t index = mHandlers.indexOfKey(msg->target());
- if (index < 0) {
- ALOGW("failed to post message '%s'. Target handler not registered.",
- msg->debugString().c_str());
- return -ENOENT;
- }
-
- const HandlerInfo &info = mHandlers.valueAt(index);
-
- sp<ALooper> looper = info.mLooper.promote();
+ sp<ALooper> looper = findLooper(msg->target());
if (looper == NULL) {
- ALOGW("failed to post message. "
- "Target handler %d still registered, but object gone.",
- msg->target());
-
- mHandlers.removeItemsAt(index);
return -ENOENT;
}
-
looper->post(msg, delayUs);
-
return OK;
}
@@ -146,6 +139,17 @@ void ALooperRoster::deliverMessage(const sp<AMessage> &msg) {
}
handler->onMessageReceived(msg);
+ handler->mMessageCounter++;
+
+ if (verboseStats) {
+ uint32_t what = msg->what();
+ ssize_t idx = handler->mMessages.indexOfKey(what);
+ if (idx < 0) {
+ handler->mMessages.add(what, 1);
+ } else {
+ handler->mMessages.editValueAt(idx)++;
+ }
+ }
}
sp<ALooper> ALooperRoster::findLooper(ALooper::handler_id handlerID) {
@@ -169,18 +173,23 @@ sp<ALooper> ALooperRoster::findLooper(ALooper::handler_id handlerID) {
status_t ALooperRoster::postAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response) {
+ sp<ALooper> looper = findLooper(msg->target());
+
+ if (looper == NULL) {
+ ALOGW("failed to post message. "
+ "Target handler %d still registered, but object gone.",
+ msg->target());
+ response->clear();
+ return -ENOENT;
+ }
+
Mutex::Autolock autoLock(mLock);
uint32_t replyID = mNextReplyID++;
msg->setInt32("replyID", replyID);
- status_t err = postMessage_l(msg, 0 /* delayUs */);
-
- if (err != OK) {
- response->clear();
- return err;
- }
+ looper->post(msg, 0 /* delayUs */);
ssize_t index;
while ((index = mReplies.indexOfKey(replyID)) < 0) {
@@ -201,4 +210,72 @@ void ALooperRoster::postReply(uint32_t replyID, const sp<AMessage> &reply) {
mRepliesCondition.broadcast();
}
+static void makeFourCC(uint32_t fourcc, char *s) {
+ s[0] = (fourcc >> 24) & 0xff;
+ if (s[0]) {
+ s[1] = (fourcc >> 16) & 0xff;
+ s[2] = (fourcc >> 8) & 0xff;
+ s[3] = fourcc & 0xff;
+ s[4] = 0;
+ } else {
+ sprintf(s, "%u", fourcc);
+ }
+}
+
+void ALooperRoster::dump(int fd, const Vector<String16>& args) {
+ bool clear = false;
+ bool oldVerbose = verboseStats;
+ for (size_t i = 0;i < args.size(); i++) {
+ if (args[i] == String16("-c")) {
+ clear = true;
+ } else if (args[i] == String16("-von")) {
+ verboseStats = true;
+ } else if (args[i] == String16("-voff")) {
+ verboseStats = false;
+ }
+ }
+ String8 s;
+ if (verboseStats && !oldVerbose) {
+ s.append("(verbose stats collection enabled, stats will be cleared)\n");
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ size_t n = mHandlers.size();
+ s.appendFormat(" %zd registered handlers:\n", n);
+
+ for (size_t i = 0; i < n; i++) {
+ s.appendFormat(" %zd: ", i);
+ HandlerInfo &info = mHandlers.editValueAt(i);
+ sp<ALooper> looper = info.mLooper.promote();
+ if (looper != NULL) {
+ s.append(looper->mName.c_str());
+ sp<AHandler> handler = info.mHandler.promote();
+ if (handler != NULL) {
+ s.appendFormat(": %u messages processed", handler->mMessageCounter);
+ if (verboseStats) {
+ for (size_t j = 0; j < handler->mMessages.size(); j++) {
+ char fourcc[15];
+ makeFourCC(handler->mMessages.keyAt(j), fourcc);
+ s.appendFormat("\n %s: %d",
+ fourcc,
+ handler->mMessages.valueAt(j));
+ }
+ } else {
+ handler->mMessages.clear();
+ }
+ if (clear || (verboseStats && !oldVerbose)) {
+ handler->mMessageCounter = 0;
+ handler->mMessages.clear();
+ }
+ } else {
+ s.append(": <stale handler>");
+ }
+ } else {
+ s.append("<stale>");
+ }
+ s.append("\n");
+ }
+ write(fd, s.string(), s.size());
+}
+
} // namespace android
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index dc42f91..1f46bc9 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -14,6 +14,11 @@
* limitations under the License.
*/
+#define LOG_TAG "AMessage"
+//#define LOG_NDEBUG 0
+//#define DUMP_STATS
+#include <cutils/log.h>
+
#include "AMessage.h"
#include <ctype.h>
@@ -60,12 +65,14 @@ ALooper::handler_id AMessage::target() const {
void AMessage::clear() {
for (size_t i = 0; i < mNumItems; ++i) {
Item *item = &mItems[i];
- freeItem(item);
+ delete[] item->mName;
+ item->mName = NULL;
+ freeItemValue(item);
}
mNumItems = 0;
}
-void AMessage::freeItem(Item *item) {
+void AMessage::freeItemValue(Item *item) {
switch (item->mType) {
case kTypeString:
{
@@ -88,25 +95,85 @@ void AMessage::freeItem(Item *item) {
}
}
-AMessage::Item *AMessage::allocateItem(const char *name) {
- name = AAtomizer::Atomize(name);
+#ifdef DUMP_STATS
+#include <utils/Mutex.h>
+
+Mutex gLock;
+static int32_t gFindItemCalls = 1;
+static int32_t gDupCalls = 1;
+static int32_t gAverageNumItems = 0;
+static int32_t gAverageNumChecks = 0;
+static int32_t gAverageNumMemChecks = 0;
+static int32_t gAverageDupItems = 0;
+static int32_t gLastChecked = -1;
+
+static void reportStats() {
+ int32_t time = (ALooper::GetNowUs() / 1000);
+ if (time / 1000 != gLastChecked / 1000) {
+ gLastChecked = time;
+ ALOGI("called findItemIx %zu times (for len=%.1f i=%.1f/%.1f mem) dup %zu times (for len=%.1f)",
+ gFindItemCalls,
+ gAverageNumItems / (float)gFindItemCalls,
+ gAverageNumChecks / (float)gFindItemCalls,
+ gAverageNumMemChecks / (float)gFindItemCalls,
+ gDupCalls,
+ gAverageDupItems / (float)gDupCalls);
+ gFindItemCalls = gDupCalls = 1;
+ gAverageNumItems = gAverageNumChecks = gAverageNumMemChecks = gAverageDupItems = 0;
+ gLastChecked = time;
+ }
+}
+#endif
+inline size_t AMessage::findItemIndex(const char *name, size_t len) const {
+#ifdef DUMP_STATS
+ size_t memchecks = 0;
+#endif
size_t i = 0;
- while (i < mNumItems && mItems[i].mName != name) {
- ++i;
+ for (; i < mNumItems; i++) {
+ if (len != mItems[i].mNameLength) {
+ continue;
+ }
+#ifdef DUMP_STATS
+ ++memchecks;
+#endif
+ if (!memcmp(mItems[i].mName, name, len)) {
+ break;
+ }
}
+#ifdef DUMP_STATS
+ {
+ Mutex::Autolock _l(gLock);
+ ++gFindItemCalls;
+ gAverageNumItems += mNumItems;
+ gAverageNumMemChecks += memchecks;
+ gAverageNumChecks += i;
+ reportStats();
+ }
+#endif
+ return i;
+}
+
+// assumes item's name was uninitialized or NULL
+void AMessage::Item::setName(const char *name, size_t len) {
+ mNameLength = len;
+ mName = new char[len + 1];
+ memcpy((void*)mName, name, len + 1);
+}
+AMessage::Item *AMessage::allocateItem(const char *name) {
+ size_t len = strlen(name);
+ size_t i = findItemIndex(name, len);
Item *item;
if (i < mNumItems) {
item = &mItems[i];
- freeItem(item);
+ freeItemValue(item);
} else {
CHECK(mNumItems < kMaxNumItems);
i = mNumItems++;
item = &mItems[i];
-
- item->mName = name;
+ item->setName(name, len);
}
return item;
@@ -114,19 +181,20 @@ AMessage::Item *AMessage::allocateItem(const char *name) {
const AMessage::Item *AMessage::findItem(
const char *name, Type type) const {
- name = AAtomizer::Atomize(name);
-
- for (size_t i = 0; i < mNumItems; ++i) {
+ size_t i = findItemIndex(name, strlen(name));
+ if (i < mNumItems) {
const Item *item = &mItems[i];
+ return item->mType == type ? item : NULL;
- if (item->mName == name) {
- return item->mType == type ? item : NULL;
- }
}
-
return NULL;
}
+bool AMessage::contains(const char *name) const {
+ size_t i = findItemIndex(name, strlen(name));
+ return i < mNumItems;
+}
+
#define BASIC_TYPE(NAME,FIELDNAME,TYPENAME) \
void AMessage::set##NAME(const char *name, TYPENAME value) { \
Item *item = allocateItem(name); \
@@ -160,6 +228,11 @@ void AMessage::setString(
item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
}
+void AMessage::setString(
+ const char *name, const AString &s) {
+ setString(name, s.c_str(), s.size());
+}
+
void AMessage::setObjectInternal(
const char *name, const sp<RefBase> &obj, Type type) {
Item *item = allocateItem(name);
@@ -278,11 +351,20 @@ sp<AMessage> AMessage::dup() const {
sp<AMessage> msg = new AMessage(mWhat, mTarget);
msg->mNumItems = mNumItems;
+#ifdef DUMP_STATS
+ {
+ Mutex::Autolock _l(gLock);
+ ++gDupCalls;
+ gAverageDupItems += mNumItems;
+ reportStats();
+ }
+#endif
+
for (size_t i = 0; i < mNumItems; ++i) {
const Item *from = &mItems[i];
Item *to = &msg->mItems[i];
- to->mName = from->mName;
+ to->setName(from->mName, from->mNameLength);
to->mType = from->mType;
switch (from->mType) {
@@ -344,19 +426,19 @@ AString AMessage::debugString(int32_t indent) const {
AString tmp;
if (isFourcc(mWhat)) {
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"'%c%c%c%c'",
(char)(mWhat >> 24),
(char)((mWhat >> 16) & 0xff),
(char)((mWhat >> 8) & 0xff),
(char)(mWhat & 0xff));
} else {
- tmp = StringPrintf("0x%08x", mWhat);
+ tmp = AStringPrintf("0x%08x", mWhat);
}
s.append(tmp);
if (mTarget != 0) {
- tmp = StringPrintf(", target = %d", mTarget);
+ tmp = AStringPrintf(", target = %d", mTarget);
s.append(tmp);
}
s.append(") = {\n");
@@ -366,56 +448,56 @@ AString AMessage::debugString(int32_t indent) const {
switch (item.mType) {
case kTypeInt32:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"int32_t %s = %d", item.mName, item.u.int32Value);
break;
case kTypeInt64:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"int64_t %s = %lld", item.mName, item.u.int64Value);
break;
case kTypeSize:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"size_t %s = %d", item.mName, item.u.sizeValue);
break;
case kTypeFloat:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"float %s = %f", item.mName, item.u.floatValue);
break;
case kTypeDouble:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"double %s = %f", item.mName, item.u.doubleValue);
break;
case kTypePointer:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"void *%s = %p", item.mName, item.u.ptrValue);
break;
case kTypeString:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"string %s = \"%s\"",
item.mName,
item.u.stringValue->c_str());
break;
case kTypeObject:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"RefBase *%s = %p", item.mName, item.u.refValue);
break;
case kTypeBuffer:
{
sp<ABuffer> buffer = static_cast<ABuffer *>(item.u.refValue);
- if (buffer != NULL && buffer->size() <= 64) {
- tmp = StringPrintf("Buffer %s = {\n", item.mName);
+ if (buffer != NULL && buffer->data() != NULL && buffer->size() <= 64) {
+ tmp = AStringPrintf("Buffer %s = {\n", item.mName);
hexdump(buffer->data(), buffer->size(), indent + 4, &tmp);
appendIndent(&tmp, indent + 2);
tmp.append("}");
} else {
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"Buffer *%s = %p", item.mName, buffer.get());
}
break;
}
case kTypeMessage:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"AMessage %s = %s",
item.mName,
static_cast<AMessage *>(
@@ -423,7 +505,7 @@ AString AMessage::debugString(int32_t indent) const {
indent + strlen(item.mName) + 14).c_str());
break;
case kTypeRect:
- tmp = StringPrintf(
+ tmp = AStringPrintf(
"Rect %s(%d, %d, %d, %d)",
item.mName,
item.u.rectValue.mLeft,
@@ -453,11 +535,11 @@ sp<AMessage> AMessage::FromParcel(const Parcel &parcel) {
sp<AMessage> msg = new AMessage(what);
msg->mNumItems = static_cast<size_t>(parcel.readInt32());
-
for (size_t i = 0; i < msg->mNumItems; ++i) {
Item *item = &msg->mItems[i];
- item->mName = AAtomizer::Atomize(parcel.readCString());
+ const char *name = parcel.readCString();
+ item->setName(name, strlen(name));
item->mType = static_cast<Type>(parcel.readInt32());
switch (item->mType) {
diff --git a/media/libstagefright/foundation/ANetworkSession.cpp b/media/libstagefright/foundation/ANetworkSession.cpp
index e629588..b230400 100644
--- a/media/libstagefright/foundation/ANetworkSession.cpp
+++ b/media/libstagefright/foundation/ANetworkSession.cpp
@@ -187,7 +187,7 @@ ANetworkSession::Session::Session(
CHECK_GE(res, 0);
in_addr_t addr = ntohl(localAddr.sin_addr.s_addr);
- AString localAddrString = StringPrintf(
+ AString localAddrString = AStringPrintf(
"%d.%d.%d.%d",
(addr >> 24),
(addr >> 16) & 0xff,
@@ -195,7 +195,7 @@ ANetworkSession::Session::Session(
addr & 0xff);
addr = ntohl(remoteAddr.sin_addr.s_addr);
- AString remoteAddrString = StringPrintf(
+ AString remoteAddrString = AStringPrintf(
"%d.%d.%d.%d",
(addr >> 24),
(addr >> 16) & 0xff,
@@ -301,7 +301,7 @@ status_t ANetworkSession::Session::readMore() {
uint32_t ip = ntohl(remoteAddr.sin_addr.s_addr);
notify->setString(
"fromAddr",
- StringPrintf(
+ AStringPrintf(
"%u.%u.%u.%u",
ip >> 24,
(ip >> 16) & 0xff,
@@ -521,7 +521,7 @@ status_t ANetworkSession::Session::readMore() {
return err;
}
-void ANetworkSession::Session::dumpFragmentStats(const Fragment &frag) {
+void ANetworkSession::Session::dumpFragmentStats(const Fragment & /* frag */) {
#if 0
int64_t nowUs = ALooper::GetNowUs();
int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll;
@@ -579,7 +579,7 @@ status_t ANetworkSession::Session::writeMore() {
if (err == -EAGAIN) {
if (!mOutFragments.empty()) {
- ALOGI("%d datagrams remain queued.", mOutFragments.size());
+ ALOGI("%zu datagrams remain queued.", mOutFragments.size());
}
err = OK;
}
@@ -623,7 +623,7 @@ status_t ANetworkSession::Session::writeMore() {
CHECK_EQ(mState, CONNECTED);
CHECK(!mOutFragments.empty());
- ssize_t n;
+ ssize_t n = -1;
while (!mOutFragments.empty()) {
const Fragment &frag = *mOutFragments.begin();
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index dee786d..b167543 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -20,6 +20,8 @@
#include <stdlib.h>
#include <string.h>
+#include <binder/Parcel.h>
+#include <utils/String8.h>
#include "ADebug.h"
#include "AString.h"
@@ -48,6 +50,13 @@ AString::AString(const char *s, size_t size)
setTo(s, size);
}
+AString::AString(const String8 &from)
+ : mData(NULL),
+ mSize(0),
+ mAllocSize(1) {
+ setTo(from.string(), from.length());
+}
+
AString::AString(const AString &from)
: mData(NULL),
mSize(0),
@@ -189,64 +198,64 @@ void AString::append(const AString &from, size_t offset, size_t n) {
void AString::append(int x) {
char s[16];
- sprintf(s, "%d", x);
-
+ int result = snprintf(s, sizeof(s), "%d", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(unsigned x) {
char s[16];
- sprintf(s, "%u", x);
-
+ int result = snprintf(s, sizeof(s), "%u", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(long x) {
- char s[16];
- sprintf(s, "%ld", x);
-
+ char s[32];
+ int result = snprintf(s, sizeof(s), "%ld", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(unsigned long x) {
- char s[16];
- sprintf(s, "%lu", x);
-
+ char s[32];
+ int result = snprintf(s, sizeof(s), "%lu", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(long long x) {
char s[32];
- sprintf(s, "%lld", x);
-
+ int result = snprintf(s, sizeof(s), "%lld", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(unsigned long long x) {
char s[32];
- sprintf(s, "%llu", x);
-
+ int result = snprintf(s, sizeof(s), "%llu", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(float x) {
char s[16];
- sprintf(s, "%f", x);
-
+ int result = snprintf(s, sizeof(s), "%f", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(double x) {
char s[16];
- sprintf(s, "%f", x);
-
+ int result = snprintf(s, sizeof(s), "%f", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
void AString::append(void *x) {
- char s[16];
- sprintf(s, "%p", x);
-
+ char s[32];
+ int result = snprintf(s, sizeof(s), "%p", x);
+ CHECK((result > 0) && ((size_t) result) < sizeof(s));
append(s);
}
@@ -298,6 +307,14 @@ int AString::compare(const AString &other) const {
return strcmp(mData, other.mData);
}
+int AString::compareIgnoreCase(const AString &other) const {
+ return strcasecmp(mData, other.mData);
+}
+
+bool AString::equalsIgnoreCase(const AString &other) const {
+ return compareIgnoreCase(other) == 0;
+}
+
void AString::tolower() {
makeMutable();
@@ -320,7 +337,36 @@ bool AString::endsWith(const char *suffix) const {
return !strcmp(mData + mSize - suffixLen, suffix);
}
-AString StringPrintf(const char *format, ...) {
+bool AString::startsWithIgnoreCase(const char *prefix) const {
+ return !strncasecmp(mData, prefix, strlen(prefix));
+}
+
+bool AString::endsWithIgnoreCase(const char *suffix) const {
+ size_t suffixLen = strlen(suffix);
+
+ if (mSize < suffixLen) {
+ return false;
+ }
+
+ return !strcasecmp(mData + mSize - suffixLen, suffix);
+}
+
+// static
+AString AString::FromParcel(const Parcel &parcel) {
+ size_t size = static_cast<size_t>(parcel.readInt32());
+ return AString(static_cast<const char *>(parcel.readInplace(size)), size);
+}
+
+status_t AString::writeToParcel(Parcel *parcel) const {
+ CHECK_LE(mSize, static_cast<size_t>(INT32_MAX));
+ status_t err = parcel->writeInt32(mSize);
+ if (err == OK) {
+ err = parcel->write(mData, mSize);
+ }
+ return err;
+}
+
+AString AStringPrintf(const char *format, ...) {
va_list ap;
va_start(ap, format);
diff --git a/media/libstagefright/foundation/AStringUtils.cpp b/media/libstagefright/foundation/AStringUtils.cpp
new file mode 100644
index 0000000..e5a846c
--- /dev/null
+++ b/media/libstagefright/foundation/AStringUtils.cpp
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <AStringUtils.h>
+
+namespace android {
+
+// static
+int AStringUtils::Compare(const char *a, const char *b, size_t len, bool ignoreCase) {
+ // this method relies on a trailing '\0' if a or b are shorter than len
+ return ignoreCase ? strncasecmp(a, b, len) : strncmp(a, b, len);
+}
+
+// static
+bool AStringUtils::MatchesGlob(
+ const char *glob, size_t globLen, const char *str, size_t strLen, bool ignoreCase) {
+ // this method does not assume a trailing '\0'
+ size_t ix = 0, globIx = 0;
+
+ // pattern must match until first '*'
+ while (globIx < globLen && glob[globIx] != '*') {
+ ++globIx;
+ }
+ if (strLen < globIx || Compare(str, glob, globIx /* len */, ignoreCase)) {
+ return false;
+ }
+ ix = globIx;
+
+ // process by * separated sections
+ while (globIx < globLen) {
+ ++globIx;
+ size_t start = globIx;
+ while (globIx < globLen && glob[globIx] != '*') {
+ ++globIx;
+ }
+ size_t len = globIx - start;
+ const char *pattern = glob + start;
+
+ if (globIx == globLen) {
+ // last pattern must match tail
+ if (ix + len > strLen) {
+ return false;
+ }
+ const char *tail = str + strLen - len;
+ return !Compare(tail, pattern, len, ignoreCase);
+ }
+ // progress after first occurrence of pattern
+ while (ix + len <= strLen && Compare(str + ix, pattern, len, ignoreCase)) {
+ ++ix;
+ }
+ if (ix + len > strLen) {
+ return false;
+ }
+ ix += len;
+ // we will loop around as globIx < globLen
+ }
+
+ // we only get here if there were no * in the pattern
+ return ix == strLen;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/AWakeLock.cpp b/media/libstagefright/foundation/AWakeLock.cpp
new file mode 100644
index 0000000..d9277ac
--- /dev/null
+++ b/media/libstagefright/foundation/AWakeLock.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AWakeLock"
+#include <utils/Log.h>
+
+#include "ADebug.h"
+#include "AWakeLock.h"
+
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <powermanager/PowerManager.h>
+
+
+namespace android {
+
+AWakeLock::AWakeLock() :
+ mPowerManager(NULL),
+ mWakeLockToken(NULL),
+ mWakeLockCount(0),
+ mDeathRecipient(new PMDeathRecipient(this)) {}
+
+AWakeLock::~AWakeLock() {
+ if (mPowerManager != NULL) {
+ sp<IBinder> binder = IInterface::asBinder(mPowerManager);
+ binder->unlinkToDeath(mDeathRecipient);
+ }
+ clearPowerManager();
+}
+
+bool AWakeLock::acquire() {
+ if (mWakeLockCount == 0) {
+ CHECK(mWakeLockToken == NULL);
+ if (mPowerManager == NULL) {
+ // use checkService() to avoid blocking if power service is not up yet
+ sp<IBinder> binder =
+ defaultServiceManager()->checkService(String16("power"));
+ if (binder == NULL) {
+ ALOGW("could not get the power manager service");
+ } else {
+ mPowerManager = interface_cast<IPowerManager>(binder);
+ binder->linkToDeath(mDeathRecipient);
+ }
+ }
+ if (mPowerManager != NULL) {
+ sp<IBinder> binder = new BBinder();
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ status_t status = mPowerManager->acquireWakeLock(
+ POWERMANAGER_PARTIAL_WAKE_LOCK,
+ binder, String16("AWakeLock"), String16("media"));
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ if (status == NO_ERROR) {
+ mWakeLockToken = binder;
+ mWakeLockCount++;
+ return true;
+ }
+ }
+ } else {
+ mWakeLockCount++;
+ return true;
+ }
+ return false;
+}
+
+void AWakeLock::release(bool force) {
+ if (mWakeLockCount == 0) {
+ return;
+ }
+ if (force) {
+ // Force wakelock release below by setting reference count to 1.
+ mWakeLockCount = 1;
+ }
+ if (--mWakeLockCount == 0) {
+ CHECK(mWakeLockToken != NULL);
+ if (mPowerManager != NULL) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mPowerManager->releaseWakeLock(mWakeLockToken, 0 /* flags */);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ }
+ mWakeLockToken.clear();
+ }
+}
+
+void AWakeLock::clearPowerManager() {
+ release(true);
+ mPowerManager.clear();
+}
+
+void AWakeLock::PMDeathRecipient::binderDied(const wp<IBinder>& who __unused) {
+ if (mWakeLock != NULL) {
+ mWakeLock->clearPowerManager();
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index ad2dab5..08355c7 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -5,6 +5,7 @@ LOCAL_SRC_FILES:= \
AAtomizer.cpp \
ABitReader.cpp \
ABuffer.cpp \
+ ADebug.cpp \
AHandler.cpp \
AHierarchicalStateMachine.cpp \
ALooper.cpp \
@@ -12,6 +13,8 @@ LOCAL_SRC_FILES:= \
AMessage.cpp \
ANetworkSession.cpp \
AString.cpp \
+ AStringUtils.cpp \
+ AWakeLock.cpp \
ParsedMessage.cpp \
base64.cpp \
hexdump.cpp
@@ -22,9 +25,11 @@ LOCAL_C_INCLUDES:= \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libutils \
- liblog
+ libcutils \
+ liblog \
+ libpowermanager
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror
LOCAL_MODULE:= libstagefright_foundation
diff --git a/media/libstagefright/foundation/base64.cpp b/media/libstagefright/foundation/base64.cpp
index d5fb4e0..dcf5bef 100644
--- a/media/libstagefright/foundation/base64.cpp
+++ b/media/libstagefright/foundation/base64.cpp
@@ -33,6 +33,10 @@ sp<ABuffer> decodeBase64(const AString &s) {
if (n >= 2 && s.c_str()[n - 2] == '=') {
padding = 2;
+
+ if (n >= 3 && s.c_str()[n - 3] == '=') {
+ padding = 3;
+ }
}
}
@@ -71,7 +75,7 @@ sp<ABuffer> decodeBase64(const AString &s) {
if (((i + 1) % 4) == 0) {
out[j++] = (accum >> 16);
- if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
+ if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
if (j < outLen) { out[j++] = accum & 0xff; }
accum = 0;
diff --git a/media/libstagefright/http/Android.mk b/media/libstagefright/http/Android.mk
new file mode 100644
index 0000000..7f3307d
--- /dev/null
+++ b/media/libstagefright/http/Android.mk
@@ -0,0 +1,28 @@
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_BUILD_PDK), true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ HTTPHelper.cpp \
+
+LOCAL_C_INCLUDES:= \
+ $(TOP)/frameworks/av/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/base/core/jni \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation \
+ libandroid_runtime \
+ libmedia
+
+LOCAL_MODULE:= libstagefright_http_support
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_CFLAGS += -Werror
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/http/HTTPHelper.cpp b/media/libstagefright/http/HTTPHelper.cpp
new file mode 100644
index 0000000..77845e2
--- /dev/null
+++ b/media/libstagefright/http/HTTPHelper.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPHelper"
+#include <utils/Log.h>
+
+#include "HTTPHelper.h"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "android_util_Binder.h"
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/ScopedLocalRef.h>
+#include "jni.h"
+
+namespace android {
+
+sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext() {
+ if (AndroidRuntime::getJavaVM() == NULL) {
+ ALOGE("CreateHTTPServiceInCurrentJavaContext called outside "
+ "JAVA environment.");
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ ScopedLocalRef<jclass> clazz(
+ env, env->FindClass("android/media/MediaHTTPService"));
+ CHECK(clazz.get() != NULL);
+
+ jmethodID constructID = env->GetMethodID(clazz.get(), "<init>", "()V");
+ CHECK(constructID != NULL);
+
+ ScopedLocalRef<jobject> httpServiceObj(
+ env, env->NewObject(clazz.get(), constructID));
+
+ sp<IMediaHTTPService> httpService;
+ if (httpServiceObj.get() != NULL) {
+ jmethodID asBinderID =
+ env->GetMethodID(clazz.get(), "asBinder", "()Landroid/os/IBinder;");
+ CHECK(asBinderID != NULL);
+
+ ScopedLocalRef<jobject> httpServiceBinderObj(
+ env, env->CallObjectMethod(httpServiceObj.get(), asBinderID));
+ CHECK(httpServiceBinderObj.get() != NULL);
+
+ sp<IBinder> binder =
+ ibinderForJavaObject(env, httpServiceBinderObj.get());
+
+ httpService = interface_cast<IMediaHTTPService>(binder);
+ }
+
+ return httpService;
+}
+
+} // namespace android
diff --git a/media/libstagefright/http/HTTPHelper.h b/media/libstagefright/http/HTTPHelper.h
new file mode 100644
index 0000000..8aef115
--- /dev/null
+++ b/media/libstagefright/http/HTTPHelper.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_HELPER_H_
+
+#define HTTP_HELPER_H_
+
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct IMediaHTTPService;
+
+sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext();
+
+} // namespace android
+
+#endif // HTTP_HELPER_H_
diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp
new file mode 100644
index 0000000..bb89567
--- /dev/null
+++ b/media/libstagefright/http/MediaHTTP.cpp
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaHTTP"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaHTTP.h>
+
+#include <binder/IServiceManager.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/IMediaHTTPConnection.h>
+
+namespace android {
+
+MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn)
+ : mInitCheck(NO_INIT),
+ mHTTPConnection(conn),
+ mCachedSizeValid(false),
+ mCachedSize(0ll),
+ mDrmManagerClient(NULL) {
+ mInitCheck = OK;
+}
+
+MediaHTTP::~MediaHTTP() {
+ clearDRMState_l();
+}
+
+status_t MediaHTTP::connect(
+ const char *uri,
+ const KeyedVector<String8, String8> *headers,
+ off64_t /* offset */) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ KeyedVector<String8, String8> extHeaders;
+ if (headers != NULL) {
+ extHeaders = *headers;
+ }
+ extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str()));
+
+ bool success = mHTTPConnection->connect(uri, &extHeaders);
+
+ mLastHeaders = extHeaders;
+ mLastURI = uri;
+
+ mCachedSizeValid = false;
+
+ return success ? OK : UNKNOWN_ERROR;
+}
+
+void MediaHTTP::disconnect() {
+ if (mInitCheck != OK) {
+ return;
+ }
+
+ mHTTPConnection->disconnect();
+}
+
+status_t MediaHTTP::initCheck() const {
+ return mInitCheck;
+}
+
+ssize_t MediaHTTP::readAt(off64_t offset, void *data, size_t size) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ int64_t startTimeUs = ALooper::GetNowUs();
+
+ size_t numBytesRead = 0;
+ while (numBytesRead < size) {
+ size_t copy = size - numBytesRead;
+
+ if (copy > 64 * 1024) {
+ // limit the buffer sizes transferred across binder boundaries
+ // to avoid spurious transaction failures.
+ copy = 64 * 1024;
+ }
+
+ ssize_t n = mHTTPConnection->readAt(
+ offset + numBytesRead, (uint8_t *)data + numBytesRead, copy);
+
+ if (n < 0) {
+ return n;
+ } else if (n == 0) {
+ break;
+ }
+
+ numBytesRead += n;
+ }
+
+ int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+
+ addBandwidthMeasurement(numBytesRead, delayUs);
+
+ return numBytesRead;
+}
+
+status_t MediaHTTP::getSize(off64_t *size) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ // Caching the returned size so that it stays valid even after a
+ // disconnect. NuCachedSource2 relies on this.
+
+ if (!mCachedSizeValid) {
+ mCachedSize = mHTTPConnection->getSize();
+ mCachedSizeValid = true;
+ }
+
+ *size = mCachedSize;
+
+ return *size < 0 ? *size : static_cast<status_t>(OK);
+}
+
+uint32_t MediaHTTP::flags() {
+ return kWantsPrefetching | kIsHTTPBasedSource;
+}
+
+status_t MediaHTTP::reconnectAtOffset(off64_t offset) {
+ return connect(mLastURI.c_str(), &mLastHeaders, offset);
+}
+
+// DRM...
+
+sp<DecryptHandle> MediaHTTP::DrmInitialization(const char* mime) {
+ if (mDrmManagerClient == NULL) {
+ mDrmManagerClient = new DrmManagerClient();
+ }
+
+ if (mDrmManagerClient == NULL) {
+ return NULL;
+ }
+
+ if (mDecryptHandle == NULL) {
+ mDecryptHandle = mDrmManagerClient->openDecryptSession(
+ String8(mLastURI.c_str()), mime);
+ }
+
+ if (mDecryptHandle == NULL) {
+ delete mDrmManagerClient;
+ mDrmManagerClient = NULL;
+ }
+
+ return mDecryptHandle;
+}
+
+void MediaHTTP::getDrmInfo(
+ sp<DecryptHandle> &handle, DrmManagerClient **client) {
+ handle = mDecryptHandle;
+ *client = mDrmManagerClient;
+}
+
+String8 MediaHTTP::getUri() {
+ String8 uri;
+ if (OK == mHTTPConnection->getUri(&uri)) {
+ return uri;
+ }
+ return String8(mLastURI.c_str());
+}
+
+String8 MediaHTTP::getMIMEType() const {
+ if (mInitCheck != OK) {
+ return String8("application/octet-stream");
+ }
+
+ String8 mimeType;
+ status_t err = mHTTPConnection->getMIMEType(&mimeType);
+
+ if (err != OK) {
+ return String8("application/octet-stream");
+ }
+
+ return mimeType;
+}
+
+void MediaHTTP::clearDRMState_l() {
+ if (mDecryptHandle != NULL) {
+ // To release mDecryptHandle
+ CHECK(mDrmManagerClient);
+ mDrmManagerClient->closeDecryptSession(mDecryptHandle);
+ mDecryptHandle = NULL;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index f3529f9..93b7935 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -10,8 +10,9 @@ LOCAL_SRC_FILES:= \
LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/media/libstagefright \
- $(TOP)/frameworks/native/include/media/openmax \
- $(TOP)/external/openssl/include
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Werror
LOCAL_SHARED_LIBRARIES := \
libbinder \
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index fc1353a..d0f3bc2 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -27,6 +27,8 @@
#include "mpeg2ts/AnotherPacketSource.h"
#include <cutils/properties.h>
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -34,82 +36,193 @@
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaHTTP.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
+#include <utils/Mutex.h>
+
#include <ctype.h>
+#include <inttypes.h>
#include <openssl/aes.h>
#include <openssl/md5.h>
namespace android {
+// Number of recently-read bytes to use for bandwidth estimation
+const size_t LiveSession::kBandwidthHistoryBytes = 200 * 1024;
+
LiveSession::LiveSession(
- const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+ const sp<AMessage> &notify, uint32_t flags,
+ const sp<IMediaHTTPService> &httpService)
: mNotify(notify),
mFlags(flags),
- mUIDValid(uidValid),
- mUID(uid),
+ mHTTPService(httpService),
mInPreparationPhase(true),
- mHTTPDataSource(
- HTTPBase::Create(
- (mFlags & kFlagIncognito)
- ? HTTPBase::kFlagIncognito
- : 0)),
- mPrevBandwidthIndex(-1),
+ mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())),
+ mCurBandwidthIndex(-1),
mStreamMask(0),
+ mNewStreamMask(0),
+ mSwapMask(0),
mCheckBandwidthGeneration(0),
+ mSwitchGeneration(0),
+ mSubtitleGeneration(0),
mLastDequeuedTimeUs(0ll),
mRealTimeBaseUs(0ll),
mReconfigurationInProgress(false),
- mDisconnectReplyID(0) {
- if (mUIDValid) {
- mHTTPDataSource->setUID(mUID);
+ mSwitchInProgress(false),
+ mDisconnectReplyID(0),
+ mSeekReplyID(0),
+ mFirstTimeUsValid(false),
+ mFirstTimeUs(0),
+ mLastSeekTimeUs(0) {
+
+ mStreams[kAudioIndex] = StreamItem("audio");
+ mStreams[kVideoIndex] = StreamItem("video");
+ mStreams[kSubtitleIndex] = StreamItem("subtitles");
+
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ mDiscontinuities.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
+ mPacketSources.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
+ mPacketSources2.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));
+ mBuffering[i] = false;
}
- mPacketSources.add(
- STREAMTYPE_AUDIO, new AnotherPacketSource(NULL /* meta */));
+ size_t numHistoryItems = kBandwidthHistoryBytes /
+ PlaylistFetcher::kDownloadBlockSize + 1;
+ if (numHistoryItems < 5) {
+ numHistoryItems = 5;
+ }
+ mHTTPDataSource->setBandwidthHistorySize(numHistoryItems);
+}
- mPacketSources.add(
- STREAMTYPE_VIDEO, new AnotherPacketSource(NULL /* meta */));
+LiveSession::~LiveSession() {
+}
- mPacketSources.add(
- STREAMTYPE_SUBTITLES, new AnotherPacketSource(NULL /* meta */));
+sp<ABuffer> LiveSession::createFormatChangeBuffer(bool swap) {
+ ABuffer *discontinuity = new ABuffer(0);
+ discontinuity->meta()->setInt32("discontinuity", ATSParser::DISCONTINUITY_FORMATCHANGE);
+ discontinuity->meta()->setInt32("swapPacketSource", swap);
+ discontinuity->meta()->setInt32("switchGeneration", mSwitchGeneration);
+ discontinuity->meta()->setInt64("timeUs", -1);
+ return discontinuity;
}
-LiveSession::~LiveSession() {
+void LiveSession::swapPacketSource(StreamType stream) {
+ sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream);
+ sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream);
+ sp<AnotherPacketSource> tmp = aps;
+ aps = aps2;
+ aps2 = tmp;
+ aps2->clear();
}
status_t LiveSession::dequeueAccessUnit(
StreamType stream, sp<ABuffer> *accessUnit) {
if (!(mStreamMask & stream)) {
- return UNKNOWN_ERROR;
+ // return -EWOULDBLOCK to avoid halting the decoder
+ // when switching between audio/video and audio only.
+ return -EWOULDBLOCK;
+ }
+
+ status_t finalResult;
+ sp<AnotherPacketSource> discontinuityQueue = mDiscontinuities.valueFor(stream);
+ if (discontinuityQueue->hasBufferAvailable(&finalResult)) {
+ discontinuityQueue->dequeueAccessUnit(accessUnit);
+ // seeking, track switching
+ sp<AMessage> extra;
+ int64_t timeUs;
+ if ((*accessUnit)->meta()->findMessage("extra", &extra)
+ && extra != NULL
+ && extra->findInt64("timeUs", &timeUs)) {
+ // seeking only
+ mLastSeekTimeUs = timeUs;
+ mDiscontinuityOffsetTimesUs.clear();
+ mDiscontinuityAbsStartTimesUs.clear();
+ }
+ return INFO_DISCONTINUITY;
}
sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(stream);
- status_t finalResult;
+ ssize_t idx = typeToIndex(stream);
if (!packetSource->hasBufferAvailable(&finalResult)) {
+ if (finalResult == OK) {
+ mBuffering[idx] = true;
+ return -EAGAIN;
+ } else {
+ return finalResult;
+ }
+ }
+
+ int32_t targetDuration = 0;
+ sp<AMessage> meta = packetSource->getLatestEnqueuedMeta();
+ if (meta != NULL) {
+ meta->findInt32("targetDuration", &targetDuration);
+ }
+
+ int64_t targetDurationUs = targetDuration * 1000000ll;
+ if (targetDurationUs == 0 ||
+ targetDurationUs > PlaylistFetcher::kMinBufferedDurationUs) {
+ // Fetchers limit buffering to
+ // min(3 * targetDuration, kMinBufferedDurationUs)
+ targetDurationUs = PlaylistFetcher::kMinBufferedDurationUs;
+ }
+
+ if (mBuffering[idx]) {
+ if (mSwitchInProgress
+ || packetSource->isFinished(0)
+ || packetSource->getEstimatedDurationUs() > targetDurationUs) {
+ mBuffering[idx] = false;
+ }
+ }
+
+ if (mBuffering[idx]) {
+ return -EAGAIN;
+ }
+
+ // wait for counterpart
+ sp<AnotherPacketSource> otherSource;
+ uint32_t mask = mNewStreamMask & mStreamMask;
+ uint32_t fetchersMask = 0;
+ for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
+ uint32_t fetcherMask = mFetcherInfos.valueAt(i).mFetcher->getStreamTypeMask();
+ fetchersMask |= fetcherMask;
+ }
+ mask &= fetchersMask;
+ if (stream == STREAMTYPE_AUDIO && (mask & STREAMTYPE_VIDEO)) {
+ otherSource = mPacketSources.valueFor(STREAMTYPE_VIDEO);
+ } else if (stream == STREAMTYPE_VIDEO && (mask & STREAMTYPE_AUDIO)) {
+ otherSource = mPacketSources.valueFor(STREAMTYPE_AUDIO);
+ }
+ if (otherSource != NULL && !otherSource->hasBufferAvailable(&finalResult)) {
return finalResult == OK ? -EAGAIN : finalResult;
}
status_t err = packetSource->dequeueAccessUnit(accessUnit);
+ size_t streamIdx;
const char *streamStr;
switch (stream) {
case STREAMTYPE_AUDIO:
+ streamIdx = kAudioIndex;
streamStr = "audio";
break;
case STREAMTYPE_VIDEO:
+ streamIdx = kVideoIndex;
streamStr = "video";
break;
case STREAMTYPE_SUBTITLES:
+ streamIdx = kSubtitleIndex;
streamStr = "subs";
break;
default:
TRESPASS();
}
+ StreamItem& strm = mStreams[streamIdx];
if (err == INFO_DISCONTINUITY) {
+ // adaptive streaming, discontinuities in the playlist
int32_t type;
CHECK((*accessUnit)->meta()->findInt32("discontinuity", &type));
@@ -122,15 +235,90 @@ status_t LiveSession::dequeueAccessUnit(
streamStr,
type,
extra == NULL ? "NULL" : extra->debugString().c_str());
+
+ int32_t swap;
+ if ((*accessUnit)->meta()->findInt32("swapPacketSource", &swap) && swap) {
+ int32_t switchGeneration;
+ CHECK((*accessUnit)->meta()->findInt32("switchGeneration", &switchGeneration));
+ {
+ Mutex::Autolock lock(mSwapMutex);
+ if (switchGeneration == mSwitchGeneration) {
+ swapPacketSource(stream);
+ sp<AMessage> msg = new AMessage(kWhatSwapped, id());
+ msg->setInt32("stream", stream);
+ msg->setInt32("switchGeneration", switchGeneration);
+ msg->post();
+ }
+ }
+ } else {
+ size_t seq = strm.mCurDiscontinuitySeq;
+ int64_t offsetTimeUs;
+ if (mDiscontinuityOffsetTimesUs.indexOfKey(seq) >= 0) {
+ offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(seq);
+ } else {
+ offsetTimeUs = 0;
+ }
+
+ seq += 1;
+ if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
+ int64_t firstTimeUs;
+ firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq);
+ offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs;
+ offsetTimeUs += strm.mLastSampleDurationUs;
+ } else {
+ offsetTimeUs += strm.mLastSampleDurationUs;
+ }
+
+ mDiscontinuityOffsetTimesUs.add(seq, offsetTimeUs);
+ }
} else if (err == OK) {
+
if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) {
int64_t timeUs;
+ int32_t discontinuitySeq = 0;
CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
- ALOGV("[%s] read buffer at time %lld us", streamStr, timeUs);
+ (*accessUnit)->meta()->findInt32("discontinuitySeq", &discontinuitySeq);
+ strm.mCurDiscontinuitySeq = discontinuitySeq;
+
+ int32_t discard = 0;
+ int64_t firstTimeUs;
+ if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
+ int64_t durUs; // approximate sample duration
+ if (timeUs > strm.mLastDequeuedTimeUs) {
+ durUs = timeUs - strm.mLastDequeuedTimeUs;
+ } else {
+ durUs = strm.mLastDequeuedTimeUs - timeUs;
+ }
+ strm.mLastSampleDurationUs = durUs;
+ firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq);
+ } else if ((*accessUnit)->meta()->findInt32("discard", &discard) && discard) {
+ firstTimeUs = timeUs;
+ } else {
+ mDiscontinuityAbsStartTimesUs.add(strm.mCurDiscontinuitySeq, timeUs);
+ firstTimeUs = timeUs;
+ }
+
+ strm.mLastDequeuedTimeUs = timeUs;
+ if (timeUs >= firstTimeUs) {
+ timeUs -= firstTimeUs;
+ } else {
+ timeUs = 0;
+ }
+ timeUs += mLastSeekTimeUs;
+ if (mDiscontinuityOffsetTimesUs.indexOfKey(discontinuitySeq) >= 0) {
+ timeUs += mDiscontinuityOffsetTimesUs.valueFor(discontinuitySeq);
+ }
+ ALOGV("[%s] read buffer at time %" PRId64 " us", streamStr, timeUs);
+ (*accessUnit)->meta()->setInt64("timeUs", timeUs);
mLastDequeuedTimeUs = timeUs;
mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
} else if (stream == STREAMTYPE_SUBTITLES) {
+ int32_t subtitleGeneration;
+ if ((*accessUnit)->meta()->findInt32("subtitleGeneration", &subtitleGeneration)
+ && subtitleGeneration != mSubtitleGeneration) {
+ return -EAGAIN;
+ };
(*accessUnit)->meta()->setInt32(
"trackIndex", mPlaylist->getSelectedIndex());
(*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs);
@@ -143,6 +331,7 @@ status_t LiveSession::dequeueAccessUnit(
}
status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) {
+ // No swapPacketSource race condition; called from the same thread as dequeueAccessUnit.
if (!(mStreamMask & stream)) {
return UNKNOWN_ERROR;
}
@@ -213,15 +402,16 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
case kWhatSeek:
{
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
+ uint32_t seekReplyID;
+ CHECK(msg->senderAwaitsResponse(&seekReplyID));
+ mSeekReplyID = seekReplyID;
+ mSeekReply = new AMessage;
status_t err = onSeek(msg);
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
-
- response->postReply(replyID);
+ if (err != OK) {
+ msg->post(50000);
+ }
break;
}
@@ -239,13 +429,28 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
if (what == PlaylistFetcher::kWhatStopped) {
AString uri;
CHECK(msg->findString("uri", &uri));
- mFetcherInfos.removeItem(uri);
+ if (mFetcherInfos.removeItem(uri) < 0) {
+ // ignore duplicated kWhatStopped messages.
+ break;
+ }
+
+ if (mSwitchInProgress) {
+ tryToFinishBandwidthSwitch();
+ }
}
if (mContinuation != NULL) {
CHECK_GT(mContinuationCounter, 0);
if (--mContinuationCounter == 0) {
mContinuation->post();
+
+ if (mSeekReplyID != 0) {
+ CHECK(mSeekReply != NULL);
+ mSeekReply->setInt32("err", OK);
+ mSeekReply->postReply(mSeekReplyID);
+ mSeekReplyID = 0;
+ mSeekReply.clear();
+ }
}
}
break;
@@ -271,10 +476,29 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
ALOGE("XXX Received error %d from PlaylistFetcher.", err);
+ // handle EOS on subtitle tracks independently
+ AString uri;
+ if (err == ERROR_END_OF_STREAM && msg->findString("uri", &uri)) {
+ ssize_t i = mFetcherInfos.indexOfKey(uri);
+ if (i >= 0) {
+ const sp<PlaylistFetcher> &fetcher = mFetcherInfos.valueAt(i).mFetcher;
+ if (fetcher != NULL) {
+ uint32_t type = fetcher->getStreamTypeMask();
+ if (type == STREAMTYPE_SUBTITLES) {
+ mPacketSources.valueFor(
+ STREAMTYPE_SUBTITLES)->signalEOS(err);;
+ break;
+ }
+ }
+ }
+ }
+
if (mInPreparationPhase) {
postPrepared(err);
}
+ cancelBandwidthSwitch();
+
mPacketSources.valueFor(STREAMTYPE_AUDIO)->signalEOS(err);
mPacketSources.valueFor(STREAMTYPE_VIDEO)->signalEOS(err);
@@ -294,6 +518,10 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
AString uri;
CHECK(msg->findString("uri", &uri));
+ if (mFetcherInfos.indexOfKey(uri) < 0) {
+ ALOGE("couldn't find uri");
+ break;
+ }
FetcherInfo *info = &mFetcherInfos.editValueFor(uri);
info->mIsPrepared = true;
@@ -313,6 +541,27 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case PlaylistFetcher::kWhatStartedAt:
+ {
+ int32_t switchGeneration;
+ CHECK(msg->findInt32("switchGeneration", &switchGeneration));
+
+ if (switchGeneration != mSwitchGeneration) {
+ break;
+ }
+
+ // Resume fetcher for the original variant; the resumed fetcher should
+ // continue until the timestamps found in msg, which is stored by the
+ // new fetcher to indicate where the new variant has started buffering.
+ for (size_t i = 0; i < mFetcherInfos.size(); i++) {
+ const FetcherInfo info = mFetcherInfos.valueAt(i);
+ if (info.mToBeRemoved) {
+ info.mFetcher->resumeUntilAsync(msg);
+ }
+ }
+ break;
+ }
+
default:
TRESPASS();
}
@@ -329,7 +578,7 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- onCheckBandwidth();
+ onCheckBandwidth(msg);
break;
}
@@ -357,6 +606,24 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSwapped:
+ {
+ onSwapped(msg);
+ break;
+ }
+
+ case kWhatCheckSwitchDown:
+ {
+ onCheckSwitchDown();
+ break;
+ }
+
+ case kWhatSwitchDown:
+ {
+ onSwitchDown();
+ break;
+ }
+
default:
TRESPASS();
break;
@@ -374,6 +641,27 @@ int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b)
return 1;
}
+// static
+LiveSession::StreamType LiveSession::indexToType(int idx) {
+ CHECK(idx >= 0 && idx < kMaxStreams);
+ return (StreamType)(1 << idx);
+}
+
+// static
+ssize_t LiveSession::typeToIndex(int32_t type) {
+ switch (type) {
+ case STREAMTYPE_AUDIO:
+ return 0;
+ case STREAMTYPE_VIDEO:
+ return 1;
+ case STREAMTYPE_SUBTITLES:
+ return 2;
+ default:
+ return -1;
+ };
+ return -1;
+}
+
void LiveSession::onConnect(const sp<AMessage> &msg) {
AString url;
CHECK(msg->findString("url", &url));
@@ -388,11 +676,8 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
headers = NULL;
}
-#if 1
- ALOGI("onConnect <URL suppressed>");
-#else
- ALOGI("onConnect %s", url.c_str());
-#endif
+ // TODO currently we don't know if we are coming here from incognito mode
+ ALOGI("onConnect %s", uriDebugString(url).c_str());
mMasterURL = url;
@@ -400,7 +685,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
mPlaylist = fetchPlaylist(url.c_str(), NULL /* curPlaylistHash */, &dummy);
if (mPlaylist == NULL) {
- ALOGE("unable to fetch master playlist '%s'.", url.c_str());
+ ALOGE("unable to fetch master playlist %s.", uriDebugString(url).c_str());
postPrepared(ERROR_IO);
return;
@@ -424,7 +709,6 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
AString uri;
mPlaylist->itemAt(i, &uri, &meta);
- unsigned long bandwidth;
CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth));
if (initialBandwidth == 0) {
@@ -452,8 +736,9 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
mBandwidthItems.push(item);
}
+ mPlaylist->pickRandomMediaItems();
changeConfiguration(
- 0ll /* timeUs */, initialBandwidthIndex, true /* pickTrack */);
+ 0ll /* timeUs */, initialBandwidthIndex, false /* pickTrack */);
}
void LiveSession::finishDisconnect() {
@@ -461,6 +746,13 @@ void LiveSession::finishDisconnect() {
// during disconnection either.
cancelCheckBandwidthEvent();
+ // Protect mPacketSources from a swapPacketSource race condition through disconnect.
+ // (finishDisconnect, onFinishDisconnect2)
+ cancelBandwidthSwitch();
+
+ // cancel switch down monitor
+ mSwitchDownMonitor.clear();
+
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
mFetcherInfos.valueAt(i).mFetcher->stopAsync();
}
@@ -500,11 +792,13 @@ sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) {
sp<AMessage> notify = new AMessage(kWhatFetcherNotify, id());
notify->setString("uri", uri);
+ notify->setInt32("switchGeneration", mSwitchGeneration);
FetcherInfo info;
- info.mFetcher = new PlaylistFetcher(notify, this, uri);
+ info.mFetcher = new PlaylistFetcher(notify, this, uri, mSubtitleGeneration);
info.mDurationUs = -1ll;
info.mIsPrepared = false;
+ info.mToBeRemoved = false;
looper()->registerHandler(info.mFetcher);
mFetcherInfos.add(uri, info);
@@ -512,57 +806,89 @@ sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) {
return info.mFetcher;
}
-status_t LiveSession::fetchFile(
+/*
+ * Illustration of parameters:
+ *
+ * 0 `range_offset`
+ * +------------+-------------------------------------------------------+--+--+
+ * | | | next block to fetch | | |
+ * | | `source` handle => `out` buffer | | | |
+ * | `url` file |<--------- buffer size --------->|<--- `block_size` -->| | |
+ * | |<----------- `range_length` / buffer capacity ----------->| |
+ * |<------------------------------ file_size ------------------------------->|
+ *
+ * Special parameter values:
+ * - range_length == -1 means entire file
+ * - block_size == 0 means entire range
+ *
+ */
+ssize_t LiveSession::fetchFile(
const char *url, sp<ABuffer> *out,
int64_t range_offset, int64_t range_length,
+ uint32_t block_size, /* download block size */
+ sp<DataSource> *source, /* to return and reuse source */
String8 *actualUrl) {
- *out = NULL;
+ off64_t size;
+ sp<DataSource> temp_source;
+ if (source == NULL) {
+ source = &temp_source;
+ }
- sp<DataSource> source;
+ if (*source == NULL) {
+ if (!strncasecmp(url, "file://", 7)) {
+ *source = new FileSource(url + 7);
+ } else if (strncasecmp(url, "http://", 7)
+ && strncasecmp(url, "https://", 8)) {
+ return ERROR_UNSUPPORTED;
+ } else {
+ KeyedVector<String8, String8> headers = mExtraHeaders;
+ if (range_offset > 0 || range_length >= 0) {
+ headers.add(
+ String8("Range"),
+ String8(
+ AStringPrintf(
+ "bytes=%lld-%s",
+ range_offset,
+ range_length < 0
+ ? "" : AStringPrintf("%lld",
+ range_offset + range_length - 1).c_str()).c_str()));
+ }
+ status_t err = mHTTPDataSource->connect(url, &headers);
- if (!strncasecmp(url, "file://", 7)) {
- source = new FileSource(url + 7);
- } else if (strncasecmp(url, "http://", 7)
- && strncasecmp(url, "https://", 8)) {
- return ERROR_UNSUPPORTED;
- } else {
- KeyedVector<String8, String8> headers = mExtraHeaders;
- if (range_offset > 0 || range_length >= 0) {
- headers.add(
- String8("Range"),
- String8(
- StringPrintf(
- "bytes=%lld-%s",
- range_offset,
- range_length < 0
- ? "" : StringPrintf("%lld", range_offset + range_length - 1).c_str()).c_str()));
- }
- status_t err = mHTTPDataSource->connect(url, &headers);
+ if (err != OK) {
+ return err;
+ }
- if (err != OK) {
- return err;
+ *source = mHTTPDataSource;
}
-
- source = mHTTPDataSource;
}
- off64_t size;
- status_t err = source->getSize(&size);
-
- if (err != OK) {
+ status_t getSizeErr = (*source)->getSize(&size);
+ if (getSizeErr != OK) {
size = 65536;
}
- sp<ABuffer> buffer = new ABuffer(size);
- buffer->setRange(0, 0);
+ sp<ABuffer> buffer = *out != NULL ? *out : new ABuffer(size);
+ if (*out == NULL) {
+ buffer->setRange(0, 0);
+ }
+ ssize_t bytesRead = 0;
+ // adjust range_length if only reading partial block
+ if (block_size > 0 && (range_length == -1 || (int64_t)(buffer->size() + block_size) < range_length)) {
+ range_length = buffer->size() + block_size;
+ }
for (;;) {
+ // Only resize when we don't know the size.
size_t bufferRemaining = buffer->capacity() - buffer->size();
+ if (bufferRemaining == 0 && getSizeErr != OK) {
+ size_t bufferIncrement = buffer->size() / 2;
+ if (bufferIncrement < 32768) {
+ bufferIncrement = 32768;
+ }
+ bufferRemaining = bufferIncrement;
- if (bufferRemaining == 0) {
- bufferRemaining = 32768;
-
- ALOGV("increasing download buffer to %d bytes",
+ ALOGV("increasing download buffer to %zu bytes",
buffer->size() + bufferRemaining);
sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining);
@@ -575,7 +901,7 @@ status_t LiveSession::fetchFile(
size_t maxBytesToRead = bufferRemaining;
if (range_length >= 0) {
int64_t bytesLeftInRange = range_length - buffer->size();
- if (bytesLeftInRange < maxBytesToRead) {
+ if (bytesLeftInRange < (int64_t)maxBytesToRead) {
maxBytesToRead = bytesLeftInRange;
if (bytesLeftInRange == 0) {
@@ -584,7 +910,9 @@ status_t LiveSession::fetchFile(
}
}
- ssize_t n = source->readAt(
+ // The DataSource is responsible for informing us of error (n < 0) or eof (n == 0)
+ // to help us break out of the loop.
+ ssize_t n = (*source)->readAt(
buffer->size(), buffer->data() + buffer->size(),
maxBytesToRead);
@@ -597,17 +925,18 @@ status_t LiveSession::fetchFile(
}
buffer->setRange(0, buffer->size() + (size_t)n);
+ bytesRead += n;
}
*out = buffer;
if (actualUrl != NULL) {
- *actualUrl = source->getUri();
+ *actualUrl = (*source)->getUri();
if (actualUrl->isEmpty()) {
*actualUrl = url;
}
}
- return OK;
+ return bytesRead;
}
sp<M3UParser> LiveSession::fetchPlaylist(
@@ -618,9 +947,9 @@ sp<M3UParser> LiveSession::fetchPlaylist(
sp<ABuffer> buffer;
String8 actualUrl;
- status_t err = fetchFile(url, &buffer, 0, -1, &actualUrl);
+ ssize_t err = fetchFile(url, &buffer, 0, -1, 0, NULL, &actualUrl);
- if (err != OK) {
+ if (err <= 0) {
return NULL;
}
@@ -660,9 +989,11 @@ sp<M3UParser> LiveSession::fetchPlaylist(
return playlist;
}
+#if 0
static double uniformRand() {
return (double)rand() / RAND_MAX;
}
+#endif
size_t LiveSession::getBandwidthIndex() {
if (mBandwidthItems.size() == 0) {
@@ -704,14 +1035,22 @@ size_t LiveSession::getBandwidthIndex() {
}
}
- // Consider only 80% of the available bandwidth usable.
- bandwidthBps = (bandwidthBps * 8) / 10;
-
// Pick the highest bandwidth stream below or equal to estimated bandwidth.
index = mBandwidthItems.size() - 1;
- while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth
- > (size_t)bandwidthBps) {
+ while (index > 0) {
+ // consider only 80% of the available bandwidth, but if we are switching up,
+ // be even more conservative (70%) to avoid overestimating and immediately
+ // switching back.
+ size_t adjustedBandwidthBps = bandwidthBps;
+ if (index > mCurBandwidthIndex) {
+ adjustedBandwidthBps = adjustedBandwidthBps * 7 / 10;
+ } else {
+ adjustedBandwidthBps = adjustedBandwidthBps * 8 / 10;
+ }
+ if (mBandwidthItems.itemAt(index).mBandwidth <= adjustedBandwidthBps) {
+ break;
+ }
--index;
}
}
@@ -724,20 +1063,20 @@ size_t LiveSession::getBandwidthIndex() {
// to lowest)
const size_t kMinIndex = 0;
- static ssize_t mPrevBandwidthIndex = -1;
+ static ssize_t mCurBandwidthIndex = -1;
size_t index;
- if (mPrevBandwidthIndex < 0) {
+ if (mCurBandwidthIndex < 0) {
index = kMinIndex;
} else if (uniformRand() < 0.5) {
- index = (size_t)mPrevBandwidthIndex;
+ index = (size_t)mCurBandwidthIndex;
} else {
- index = mPrevBandwidthIndex + 1;
+ index = mCurBandwidthIndex + 1;
if (index == mBandwidthItems.size()) {
index = kMinIndex;
}
}
- mPrevBandwidthIndex = index;
+ mCurBandwidthIndex = index;
#elif 0
// Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec
@@ -768,23 +1107,42 @@ size_t LiveSession::getBandwidthIndex() {
return index;
}
+int64_t LiveSession::latestMediaSegmentStartTimeUs() {
+ sp<AMessage> audioMeta = mPacketSources.valueFor(STREAMTYPE_AUDIO)->getLatestDequeuedMeta();
+ int64_t minSegmentStartTimeUs = -1, videoSegmentStartTimeUs = -1;
+ if (audioMeta != NULL) {
+ audioMeta->findInt64("segmentStartTimeUs", &minSegmentStartTimeUs);
+ }
+
+ sp<AMessage> videoMeta = mPacketSources.valueFor(STREAMTYPE_VIDEO)->getLatestDequeuedMeta();
+ if (videoMeta != NULL
+ && videoMeta->findInt64("segmentStartTimeUs", &videoSegmentStartTimeUs)) {
+ if (minSegmentStartTimeUs < 0 || videoSegmentStartTimeUs < minSegmentStartTimeUs) {
+ minSegmentStartTimeUs = videoSegmentStartTimeUs;
+ }
+
+ }
+ return minSegmentStartTimeUs;
+}
+
status_t LiveSession::onSeek(const sp<AMessage> &msg) {
int64_t timeUs;
CHECK(msg->findInt64("timeUs", &timeUs));
if (!mReconfigurationInProgress) {
- changeConfiguration(timeUs, getBandwidthIndex());
+ changeConfiguration(timeUs, mCurBandwidthIndex);
+ return OK;
+ } else {
+ return -EWOULDBLOCK;
}
-
- return OK;
}
status_t LiveSession::getDuration(int64_t *durationUs) const {
- int64_t maxDurationUs = 0ll;
+ int64_t maxDurationUs = -1ll;
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
int64_t fetcherDurationUs = mFetcherInfos.valueAt(i).mDurationUs;
- if (fetcherDurationUs >= 0ll && fetcherDurationUs > maxDurationUs) {
+ if (fetcherDurationUs > maxDurationUs) {
maxDurationUs = fetcherDurationUs;
}
}
@@ -803,50 +1161,84 @@ bool LiveSession::hasDynamicDuration() const {
return false;
}
-status_t LiveSession::getTrackInfo(Parcel *reply) const {
- return mPlaylist->getTrackInfo(reply);
+size_t LiveSession::getTrackCount() const {
+ if (mPlaylist == NULL) {
+ return 0;
+ } else {
+ return mPlaylist->getTrackCount();
+ }
+}
+
+sp<AMessage> LiveSession::getTrackInfo(size_t trackIndex) const {
+ if (mPlaylist == NULL) {
+ return NULL;
+ } else {
+ return mPlaylist->getTrackInfo(trackIndex);
+ }
}
status_t LiveSession::selectTrack(size_t index, bool select) {
+ if (mPlaylist == NULL) {
+ return INVALID_OPERATION;
+ }
+
+ ++mSubtitleGeneration;
status_t err = mPlaylist->selectTrack(index, select);
if (err == OK) {
- (new AMessage(kWhatChangeConfiguration, id()))->post();
+ sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, id());
+ msg->setInt32("bandwidthIndex", mCurBandwidthIndex);
+ msg->setInt32("pickTrack", select);
+ msg->post();
}
return err;
}
+ssize_t LiveSession::getSelectedTrack(media_track_type type) const {
+ if (mPlaylist == NULL) {
+ return -1;
+ } else {
+ return mPlaylist->getSelectedTrack(type);
+ }
+}
+
+bool LiveSession::canSwitchUp() {
+ // Allow upwards bandwidth switch when a stream has buffered at least 10 seconds.
+ status_t err = OK;
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ sp<AnotherPacketSource> source = mPacketSources.valueAt(i);
+ int64_t dur = source->getBufferedDurationUs(&err);
+ if (err == OK && dur > 10000000) {
+ return true;
+ }
+ }
+ return false;
+}
+
void LiveSession::changeConfiguration(
int64_t timeUs, size_t bandwidthIndex, bool pickTrack) {
+ // Protect mPacketSources from a swapPacketSource race condition through reconfiguration.
+ // (changeConfiguration, onChangeConfiguration2, onChangeConfiguration3).
+ cancelBandwidthSwitch();
+
CHECK(!mReconfigurationInProgress);
mReconfigurationInProgress = true;
- mPrevBandwidthIndex = bandwidthIndex;
+ mCurBandwidthIndex = bandwidthIndex;
- ALOGV("changeConfiguration => timeUs:%lld us, bwIndex:%d, pickTrack:%d",
+ ALOGV("changeConfiguration => timeUs:%" PRId64 " us, bwIndex:%zu, pickTrack:%d",
timeUs, bandwidthIndex, pickTrack);
- if (pickTrack) {
- mPlaylist->pickRandomMediaItems();
- }
-
CHECK_LT(bandwidthIndex, mBandwidthItems.size());
const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex);
- uint32_t streamMask = 0;
+ uint32_t streamMask = 0; // streams that should be fetched by the new fetcher
+ uint32_t resumeMask = 0; // streams that should be fetched by the original fetcher
- AString audioURI;
- if (mPlaylist->getAudioURI(item.mPlaylistIndex, &audioURI)) {
- streamMask |= STREAMTYPE_AUDIO;
- }
-
- AString videoURI;
- if (mPlaylist->getVideoURI(item.mPlaylistIndex, &videoURI)) {
- streamMask |= STREAMTYPE_VIDEO;
- }
-
- AString subtitleURI;
- if (mPlaylist->getSubtitleURI(item.mPlaylistIndex, &subtitleURI)) {
- streamMask |= STREAMTYPE_SUBTITLES;
+ AString URIs[kMaxStreams];
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mPlaylist->getTypeURI(item.mPlaylistIndex, mStreams[i].mType, &URIs[i])) {
+ streamMask |= indexToType(i);
+ }
}
// Step 1, stop and discard fetchers that are no longer needed.
@@ -858,10 +1250,16 @@ void LiveSession::changeConfiguration(
// If we're seeking all current fetchers are discarded.
if (timeUs < 0ll) {
- if (((streamMask & STREAMTYPE_AUDIO) && uri == audioURI)
- || ((streamMask & STREAMTYPE_VIDEO) && uri == videoURI)
- || ((streamMask & STREAMTYPE_SUBTITLES) && uri == subtitleURI)) {
- discardFetcher = false;
+ // delay fetcher removal if not picking tracks
+ discardFetcher = pickTrack;
+
+ for (size_t j = 0; j < kMaxStreams; ++j) {
+ StreamType type = indexToType(j);
+ if ((streamMask & type) && uri == URIs[j]) {
+ resumeMask |= type;
+ streamMask &= ~type;
+ discardFetcher = false;
+ }
}
}
@@ -872,17 +1270,21 @@ void LiveSession::changeConfiguration(
}
}
- sp<AMessage> msg = new AMessage(kWhatChangeConfiguration2, id());
+ sp<AMessage> msg;
+ if (timeUs < 0ll) {
+ // skip onChangeConfiguration2 (decoder destruction) if not seeking.
+ msg = new AMessage(kWhatChangeConfiguration3, id());
+ } else {
+ msg = new AMessage(kWhatChangeConfiguration2, id());
+ }
msg->setInt32("streamMask", streamMask);
+ msg->setInt32("resumeMask", resumeMask);
+ msg->setInt32("pickTrack", pickTrack);
msg->setInt64("timeUs", timeUs);
- if (streamMask & STREAMTYPE_AUDIO) {
- msg->setString("audioURI", audioURI.c_str());
- }
- if (streamMask & STREAMTYPE_VIDEO) {
- msg->setString("videoURI", videoURI.c_str());
- }
- if (streamMask & STREAMTYPE_SUBTITLES) {
- msg->setString("subtitleURI", subtitleURI.c_str());
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if ((streamMask | resumeMask) & indexToType(i)) {
+ msg->setString(mStreams[i].uriKey().c_str(), URIs[i].c_str());
+ }
}
// Every time a fetcher acknowledges the stopAsync or pauseAsync request
@@ -894,12 +1296,23 @@ void LiveSession::changeConfiguration(
if (mContinuationCounter == 0) {
msg->post();
+
+ if (mSeekReplyID != 0) {
+ CHECK(mSeekReply != NULL);
+ mSeekReply->setInt32("err", OK);
+ mSeekReply->postReply(mSeekReplyID);
+ mSeekReplyID = 0;
+ mSeekReply.clear();
+ }
}
}
void LiveSession::onChangeConfiguration(const sp<AMessage> &msg) {
if (!mReconfigurationInProgress) {
- changeConfiguration(-1ll /* timeUs */, getBandwidthIndex());
+ int32_t pickTrack = 0, bandwidthIndex = mCurBandwidthIndex;
+ msg->findInt32("pickTrack", &pickTrack);
+ msg->findInt32("bandwidthIndex", &bandwidthIndex);
+ changeConfiguration(-1ll /* timeUs */, bandwidthIndex, pickTrack);
} else {
msg->post(1000000ll); // retry in 1 sec
}
@@ -910,21 +1323,22 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
// All fetchers are either suspended or have been removed now.
- uint32_t streamMask;
+ uint32_t streamMask, resumeMask;
CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask));
-
- AString audioURI, videoURI, subtitleURI;
- if (streamMask & STREAMTYPE_AUDIO) {
- CHECK(msg->findString("audioURI", &audioURI));
- ALOGV("audioURI = '%s'", audioURI.c_str());
- }
- if (streamMask & STREAMTYPE_VIDEO) {
- CHECK(msg->findString("videoURI", &videoURI));
- ALOGV("videoURI = '%s'", videoURI.c_str());
- }
- if (streamMask & STREAMTYPE_SUBTITLES) {
- CHECK(msg->findString("subtitleURI", &subtitleURI));
- ALOGV("subtitleURI = '%s'", subtitleURI.c_str());
+ CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask));
+
+ // currently onChangeConfiguration2 is only called for seeking;
+ // remove the following CHECK if using it else where.
+ CHECK_EQ(resumeMask, 0);
+ streamMask |= resumeMask;
+
+ AString URIs[kMaxStreams];
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (streamMask & indexToType(i)) {
+ const AString &uriKey = mStreams[i].uriKey();
+ CHECK(msg->findString(uriKey.c_str(), &URIs[i]));
+ ALOGV("%s = '%s'", uriKey.c_str(), URIs[i].c_str());
+ }
}
// Determine which decoders to shutdown on the player side,
@@ -934,15 +1348,12 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
// 2) its streamtype was already active and still is but the URI
// has changed.
uint32_t changedMask = 0;
- if (((mStreamMask & streamMask & STREAMTYPE_AUDIO)
- && !(audioURI == mAudioURI))
- || (mStreamMask & ~streamMask & STREAMTYPE_AUDIO)) {
- changedMask |= STREAMTYPE_AUDIO;
- }
- if (((mStreamMask & streamMask & STREAMTYPE_VIDEO)
- && !(videoURI == mVideoURI))
- || (mStreamMask & ~streamMask & STREAMTYPE_VIDEO)) {
- changedMask |= STREAMTYPE_VIDEO;
+ for (size_t i = 0; i < kMaxStreams && i != kSubtitleIndex; ++i) {
+ if (((mStreamMask & streamMask & indexToType(i))
+ && !(URIs[i] == mStreams[i].mUri))
+ || (mStreamMask & ~streamMask & indexToType(i))) {
+ changedMask |= indexToType(i);
+ }
}
if (changedMask == 0) {
@@ -968,68 +1379,76 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
}
void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
+ mContinuation.clear();
// All remaining fetchers are still suspended, the player has shutdown
// any decoders that needed it.
- uint32_t streamMask;
+ uint32_t streamMask, resumeMask;
CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask));
-
- AString audioURI, videoURI, subtitleURI;
- if (streamMask & STREAMTYPE_AUDIO) {
- CHECK(msg->findString("audioURI", &audioURI));
- }
- if (streamMask & STREAMTYPE_VIDEO) {
- CHECK(msg->findString("videoURI", &videoURI));
- }
- if (streamMask & STREAMTYPE_SUBTITLES) {
- CHECK(msg->findString("subtitleURI", &subtitleURI));
- }
+ CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask));
int64_t timeUs;
+ int32_t pickTrack;
+ bool switching = false;
CHECK(msg->findInt64("timeUs", &timeUs));
+ CHECK(msg->findInt32("pickTrack", &pickTrack));
if (timeUs < 0ll) {
- timeUs = mLastDequeuedTimeUs;
+ if (!pickTrack) {
+ switching = true;
+ }
+ mRealTimeBaseUs = ALooper::GetNowUs() - mLastDequeuedTimeUs;
+ } else {
+ mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
}
- mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;
- mStreamMask = streamMask;
- mAudioURI = audioURI;
- mVideoURI = videoURI;
- mSubtitleURI = subtitleURI;
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (streamMask & indexToType(i)) {
+ if (switching) {
+ CHECK(msg->findString(mStreams[i].uriKey().c_str(), &mStreams[i].mNewUri));
+ } else {
+ CHECK(msg->findString(mStreams[i].uriKey().c_str(), &mStreams[i].mUri));
+ }
+ }
+ }
+
+ mNewStreamMask = streamMask | resumeMask;
+ if (switching) {
+ mSwapMask = mStreamMask & ~resumeMask;
+ }
- // Resume all existing fetchers and assign them packet sources.
+ // Of all existing fetchers:
+ // * Resume fetchers that are still needed and assign them original packet sources.
+ // * Mark otherwise unneeded fetchers for removal.
+ ALOGV("resuming fetchers for mask 0x%08x", resumeMask);
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
const AString &uri = mFetcherInfos.keyAt(i);
- uint32_t resumeMask = 0;
-
- sp<AnotherPacketSource> audioSource;
- if ((streamMask & STREAMTYPE_AUDIO) && uri == audioURI) {
- audioSource = mPacketSources.valueFor(STREAMTYPE_AUDIO);
- resumeMask |= STREAMTYPE_AUDIO;
- }
-
- sp<AnotherPacketSource> videoSource;
- if ((streamMask & STREAMTYPE_VIDEO) && uri == videoURI) {
- videoSource = mPacketSources.valueFor(STREAMTYPE_VIDEO);
- resumeMask |= STREAMTYPE_VIDEO;
+ sp<AnotherPacketSource> sources[kMaxStreams];
+ for (size_t j = 0; j < kMaxStreams; ++j) {
+ if ((resumeMask & indexToType(j)) && uri == mStreams[j].mUri) {
+ sources[j] = mPacketSources.valueFor(indexToType(j));
+
+ if (j != kSubtitleIndex) {
+ ALOGV("queueing dummy discontinuity for stream type %d", indexToType(j));
+ sp<AnotherPacketSource> discontinuityQueue;
+ discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
+ discontinuityQueue->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_NONE,
+ NULL,
+ true);
+ }
+ }
}
- sp<AnotherPacketSource> subtitleSource;
- if ((streamMask & STREAMTYPE_SUBTITLES) && uri == subtitleURI) {
- subtitleSource = mPacketSources.valueFor(STREAMTYPE_SUBTITLES);
- resumeMask |= STREAMTYPE_SUBTITLES;
+ FetcherInfo &info = mFetcherInfos.editValueAt(i);
+ if (sources[kAudioIndex] != NULL || sources[kVideoIndex] != NULL
+ || sources[kSubtitleIndex] != NULL) {
+ info.mFetcher->startAsync(
+ sources[kAudioIndex], sources[kVideoIndex], sources[kSubtitleIndex]);
+ } else {
+ info.mToBeRemoved = true;
}
-
- CHECK_NE(resumeMask, 0u);
-
- ALOGV("resuming fetchers for mask 0x%08x", resumeMask);
-
- streamMask &= ~resumeMask;
-
- mFetcherInfos.valueAt(i).mFetcher->startAsync(
- audioSource, videoSource, subtitleSource);
}
// streamMask now only contains the types that need a new fetcher created.
@@ -1038,68 +1457,237 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
ALOGV("creating new fetchers for mask 0x%08x", streamMask);
}
- while (streamMask != 0) {
- StreamType streamType = (StreamType)(streamMask & ~(streamMask - 1));
+ // Find out when the original fetchers have buffered up to and start the new fetchers
+ // at a later timestamp.
+ for (size_t i = 0; i < kMaxStreams; i++) {
+ if (!(indexToType(i) & streamMask)) {
+ continue;
+ }
AString uri;
- switch (streamType) {
- case STREAMTYPE_AUDIO:
- uri = audioURI;
- break;
- case STREAMTYPE_VIDEO:
- uri = videoURI;
- break;
- case STREAMTYPE_SUBTITLES:
- uri = subtitleURI;
- break;
- default:
- TRESPASS();
- }
+ uri = switching ? mStreams[i].mNewUri : mStreams[i].mUri;
sp<PlaylistFetcher> fetcher = addFetcher(uri.c_str());
CHECK(fetcher != NULL);
- sp<AnotherPacketSource> audioSource;
- if ((streamMask & STREAMTYPE_AUDIO) && uri == audioURI) {
- audioSource = mPacketSources.valueFor(STREAMTYPE_AUDIO);
- audioSource->clear();
+ int64_t startTimeUs = -1;
+ int64_t segmentStartTimeUs = -1ll;
+ int32_t discontinuitySeq = -1;
+ sp<AnotherPacketSource> sources[kMaxStreams];
- streamMask &= ~STREAMTYPE_AUDIO;
+ if (i == kSubtitleIndex) {
+ segmentStartTimeUs = latestMediaSegmentStartTimeUs();
}
- sp<AnotherPacketSource> videoSource;
- if ((streamMask & STREAMTYPE_VIDEO) && uri == videoURI) {
- videoSource = mPacketSources.valueFor(STREAMTYPE_VIDEO);
- videoSource->clear();
+ // TRICKY: looping from i as earlier streams are already removed from streamMask
+ for (size_t j = i; j < kMaxStreams; ++j) {
+ const AString &streamUri = switching ? mStreams[j].mNewUri : mStreams[j].mUri;
+ if ((streamMask & indexToType(j)) && uri == streamUri) {
+ sources[j] = mPacketSources.valueFor(indexToType(j));
+
+ if (timeUs >= 0) {
+ sources[j]->clear();
+ startTimeUs = timeUs;
+
+ sp<AnotherPacketSource> discontinuityQueue;
+ sp<AMessage> extra = new AMessage;
+ extra->setInt64("timeUs", timeUs);
+ discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
+ discontinuityQueue->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_TIME, extra, true);
+ } else {
+ int32_t type;
+ sp<AMessage> meta;
+ if (pickTrack) {
+ // selecting
+ meta = sources[j]->getLatestDequeuedMeta();
+ } else {
+ // adapting
+ meta = sources[j]->getLatestEnqueuedMeta();
+ }
- streamMask &= ~STREAMTYPE_VIDEO;
- }
+ if (meta != NULL && !meta->findInt32("discontinuity", &type)) {
+ int64_t tmpUs;
+ int64_t tmpSegmentUs;
+
+ CHECK(meta->findInt64("timeUs", &tmpUs));
+ CHECK(meta->findInt64("segmentStartTimeUs", &tmpSegmentUs));
+ if (startTimeUs < 0 || tmpSegmentUs < segmentStartTimeUs) {
+ startTimeUs = tmpUs;
+ segmentStartTimeUs = tmpSegmentUs;
+ } else if (tmpSegmentUs == segmentStartTimeUs && tmpUs < startTimeUs) {
+ startTimeUs = tmpUs;
+ }
- sp<AnotherPacketSource> subtitleSource;
- if ((streamMask & STREAMTYPE_SUBTITLES) && uri == subtitleURI) {
- subtitleSource = mPacketSources.valueFor(STREAMTYPE_SUBTITLES);
- subtitleSource->clear();
+ int32_t seq;
+ CHECK(meta->findInt32("discontinuitySeq", &seq));
+ if (discontinuitySeq < 0 || seq < discontinuitySeq) {
+ discontinuitySeq = seq;
+ }
+ }
- streamMask &= ~STREAMTYPE_SUBTITLES;
+ if (pickTrack) {
+ // selecting track, queue discontinuities before content
+ sources[j]->clear();
+ if (j == kSubtitleIndex) {
+ break;
+ }
+ sp<AnotherPacketSource> discontinuityQueue;
+ discontinuityQueue = mDiscontinuities.valueFor(indexToType(j));
+ discontinuityQueue->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true);
+ } else {
+ // adapting, queue discontinuities after resume
+ sources[j] = mPacketSources2.valueFor(indexToType(j));
+ sources[j]->clear();
+ uint32_t extraStreams = mNewStreamMask & (~mStreamMask);
+ if (extraStreams & indexToType(j)) {
+ sources[j]->queueAccessUnit(createFormatChangeBuffer(/*swap*/ false));
+ }
+ }
+ }
+
+ streamMask &= ~indexToType(j);
+ }
}
- fetcher->startAsync(audioSource, videoSource, subtitleSource, timeUs);
+ fetcher->startAsync(
+ sources[kAudioIndex],
+ sources[kVideoIndex],
+ sources[kSubtitleIndex],
+ startTimeUs < 0 ? mLastSeekTimeUs : startTimeUs,
+ segmentStartTimeUs,
+ discontinuitySeq,
+ switching);
}
// All fetchers have now been started, the configuration change
// has completed.
+ cancelCheckBandwidthEvent();
scheduleCheckBandwidthEvent();
ALOGV("XXX configuration change completed.");
-
mReconfigurationInProgress = false;
+ if (switching) {
+ mSwitchInProgress = true;
+ } else {
+ mStreamMask = mNewStreamMask;
+ }
if (mDisconnectReplyID != 0) {
finishDisconnect();
}
}
+void LiveSession::onSwapped(const sp<AMessage> &msg) {
+ int32_t switchGeneration;
+ CHECK(msg->findInt32("switchGeneration", &switchGeneration));
+ if (switchGeneration != mSwitchGeneration) {
+ return;
+ }
+
+ int32_t stream;
+ CHECK(msg->findInt32("stream", &stream));
+
+ ssize_t idx = typeToIndex(stream);
+ CHECK(idx >= 0);
+ if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) {
+ ALOGW("swapping stream type %d %s to empty stream", stream, mStreams[idx].mUri.c_str());
+ }
+ mStreams[idx].mUri = mStreams[idx].mNewUri;
+ mStreams[idx].mNewUri.clear();
+
+ mSwapMask &= ~stream;
+ if (mSwapMask != 0) {
+ return;
+ }
+
+ // Check if new variant contains extra streams.
+ uint32_t extraStreams = mNewStreamMask & (~mStreamMask);
+ while (extraStreams) {
+ StreamType extraStream = (StreamType) (extraStreams & ~(extraStreams - 1));
+ swapPacketSource(extraStream);
+ extraStreams &= ~extraStream;
+
+ idx = typeToIndex(extraStream);
+ CHECK(idx >= 0);
+ if (mStreams[idx].mNewUri.empty()) {
+ ALOGW("swapping extra stream type %d %s to empty stream",
+ extraStream, mStreams[idx].mUri.c_str());
+ }
+ mStreams[idx].mUri = mStreams[idx].mNewUri;
+ mStreams[idx].mNewUri.clear();
+ }
+
+ tryToFinishBandwidthSwitch();
+}
+
+void LiveSession::onCheckSwitchDown() {
+ if (mSwitchDownMonitor == NULL) {
+ return;
+ }
+
+ if (mSwitchInProgress || mReconfigurationInProgress) {
+ ALOGV("Switch/Reconfig in progress, defer switch down");
+ mSwitchDownMonitor->post(1000000ll);
+ return;
+ }
+
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ int32_t targetDuration;
+ sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(indexToType(i));
+ sp<AMessage> meta = packetSource->getLatestDequeuedMeta();
+
+ if (meta != NULL && meta->findInt32("targetDuration", &targetDuration) ) {
+ int64_t bufferedDurationUs = packetSource->getEstimatedDurationUs();
+ int64_t targetDurationUs = targetDuration * 1000000ll;
+
+ if (bufferedDurationUs < targetDurationUs / 3) {
+ (new AMessage(kWhatSwitchDown, id()))->post();
+ break;
+ }
+ }
+ }
+
+ mSwitchDownMonitor->post(1000000ll);
+}
+
+void LiveSession::onSwitchDown() {
+ if (mReconfigurationInProgress || mSwitchInProgress || mCurBandwidthIndex == 0) {
+ return;
+ }
+
+ ssize_t bandwidthIndex = getBandwidthIndex();
+ if (bandwidthIndex < mCurBandwidthIndex) {
+ changeConfiguration(-1, bandwidthIndex, false);
+ return;
+ }
+
+}
+
+// Mark switch done when:
+// 1. all old buffers are swapped out
+void LiveSession::tryToFinishBandwidthSwitch() {
+ if (!mSwitchInProgress) {
+ return;
+ }
+
+ bool needToRemoveFetchers = false;
+ for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
+ if (mFetcherInfos.valueAt(i).mToBeRemoved) {
+ needToRemoveFetchers = true;
+ break;
+ }
+ }
+
+ if (!needToRemoveFetchers && mSwapMask == 0) {
+ ALOGI("mSwitchInProgress = false");
+ mStreamMask = mNewStreamMask;
+ mSwitchInProgress = false;
+ }
+}
+
void LiveSession::scheduleCheckBandwidthEvent() {
sp<AMessage> msg = new AMessage(kWhatCheckBandwidth, id());
msg->setInt32("generation", mCheckBandwidthGeneration);
@@ -1110,24 +1698,63 @@ void LiveSession::cancelCheckBandwidthEvent() {
++mCheckBandwidthGeneration;
}
-void LiveSession::onCheckBandwidth() {
- if (mReconfigurationInProgress) {
- scheduleCheckBandwidthEvent();
- return;
+void LiveSession::cancelBandwidthSwitch() {
+ Mutex::Autolock lock(mSwapMutex);
+ mSwitchGeneration++;
+ mSwitchInProgress = false;
+ mSwapMask = 0;
+
+ for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
+ FetcherInfo& info = mFetcherInfos.editValueAt(i);
+ if (info.mToBeRemoved) {
+ info.mToBeRemoved = false;
+ }
+ }
+
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (!mStreams[i].mNewUri.empty()) {
+ ssize_t j = mFetcherInfos.indexOfKey(mStreams[i].mNewUri);
+ if (j < 0) {
+ mStreams[i].mNewUri.clear();
+ continue;
+ }
+
+ const FetcherInfo &info = mFetcherInfos.valueAt(j);
+ info.mFetcher->stopAsync();
+ mFetcherInfos.removeItemsAt(j);
+ mStreams[i].mNewUri.clear();
+ }
+ }
+}
+
+bool LiveSession::canSwitchBandwidthTo(size_t bandwidthIndex) {
+ if (mReconfigurationInProgress || mSwitchInProgress) {
+ return false;
}
+ if (mCurBandwidthIndex < 0) {
+ return true;
+ }
+
+ if (bandwidthIndex == (size_t)mCurBandwidthIndex) {
+ return false;
+ } else if (bandwidthIndex > (size_t)mCurBandwidthIndex) {
+ return canSwitchUp();
+ } else {
+ return true;
+ }
+}
+
+void LiveSession::onCheckBandwidth(const sp<AMessage> &msg) {
size_t bandwidthIndex = getBandwidthIndex();
- if (mPrevBandwidthIndex < 0
- || bandwidthIndex != (size_t)mPrevBandwidthIndex) {
+ if (canSwitchBandwidthTo(bandwidthIndex)) {
changeConfiguration(-1ll /* timeUs */, bandwidthIndex);
+ } else {
+ // Come back and check again 10 seconds later in case there is nothing to do now.
+ // If we DO change configuration, once that completes it'll schedule a new
+ // check bandwidth event with an incremented mCheckBandwidthGeneration.
+ msg->post(10000000ll);
}
-
- // Handling the kWhatCheckBandwidth even here does _not_ automatically
- // schedule another one on return, only an explicit call to
- // scheduleCheckBandwidthEvent will do that.
- // This ensures that only one configuration change is ongoing at any
- // one time, once that completes it'll schedule another check bandwidth
- // event.
}
void LiveSession::postPrepared(status_t err) {
@@ -1144,6 +1771,9 @@ void LiveSession::postPrepared(status_t err) {
notify->post();
mInPreparationPhase = false;
+
+ mSwitchDownMonitor = new AMessage(kWhatCheckSwitchDown, id());
+ mSwitchDownMonitor->post();
}
} // namespace android
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 8f6a4ea..2d3a25a 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -19,6 +19,7 @@
#define LIVE_SESSION_H_
#include <media/stagefright/foundation/AHandler.h>
+#include <media/mediaplayer.h>
#include <utils/String8.h>
@@ -28,10 +29,10 @@ struct ABuffer;
struct AnotherPacketSource;
struct DataSource;
struct HTTPBase;
+struct IMediaHTTPService;
struct LiveDataSource;
struct M3UParser;
struct PlaylistFetcher;
-struct Parcel;
struct LiveSession : public AHandler {
enum Flags {
@@ -40,12 +41,20 @@ struct LiveSession : public AHandler {
};
LiveSession(
const sp<AMessage> &notify,
- uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService);
+
+ enum StreamIndex {
+ kAudioIndex = 0,
+ kVideoIndex = 1,
+ kSubtitleIndex = 2,
+ kMaxStreams = 3,
+ };
enum StreamType {
- STREAMTYPE_AUDIO = 1,
- STREAMTYPE_VIDEO = 2,
- STREAMTYPE_SUBTITLES = 4,
+ STREAMTYPE_AUDIO = 1 << kAudioIndex,
+ STREAMTYPE_VIDEO = 1 << kVideoIndex,
+ STREAMTYPE_SUBTITLES = 1 << kSubtitleIndex,
};
status_t dequeueAccessUnit(StreamType stream, sp<ABuffer> *accessUnit);
@@ -61,8 +70,10 @@ struct LiveSession : public AHandler {
status_t seekTo(int64_t timeUs);
status_t getDuration(int64_t *durationUs) const;
- status_t getTrackInfo(Parcel *reply) const;
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t trackIndex) const;
status_t selectTrack(size_t index, bool select);
+ ssize_t getSelectedTrack(media_track_type /* type */) const;
bool isSeekable() const;
bool hasDynamicDuration() const;
@@ -74,6 +85,11 @@ struct LiveSession : public AHandler {
kWhatPreparationFailed,
};
+ // create a format-change discontinuity
+ //
+ // swap:
+ // whether is format-change discontinuity should trigger a buffer swap
+ sp<ABuffer> createFormatChangeBuffer(bool swap = true);
protected:
virtual ~LiveSession();
@@ -92,8 +108,13 @@ private:
kWhatChangeConfiguration2 = 'chC2',
kWhatChangeConfiguration3 = 'chC3',
kWhatFinishDisconnect2 = 'fin2',
+ kWhatSwapped = 'swap',
+ kWhatCheckSwitchDown = 'ckSD',
+ kWhatSwitchDown = 'sDwn',
};
+ static const size_t kBandwidthHistoryBytes;
+
struct BandwidthItem {
size_t mPlaylistIndex;
unsigned long mBandwidth;
@@ -103,14 +124,39 @@ private:
sp<PlaylistFetcher> mFetcher;
int64_t mDurationUs;
bool mIsPrepared;
+ bool mToBeRemoved;
};
+ struct StreamItem {
+ const char *mType;
+ AString mUri, mNewUri;
+ size_t mCurDiscontinuitySeq;
+ int64_t mLastDequeuedTimeUs;
+ int64_t mLastSampleDurationUs;
+ StreamItem()
+ : mType(""),
+ mCurDiscontinuitySeq(0),
+ mLastDequeuedTimeUs(0),
+ mLastSampleDurationUs(0) {}
+ StreamItem(const char *type)
+ : mType(type),
+ mCurDiscontinuitySeq(0),
+ mLastDequeuedTimeUs(0),
+ mLastSampleDurationUs(0) {}
+ AString uriKey() {
+ AString key(mType);
+ key.append("URI");
+ return key;
+ }
+ };
+ StreamItem mStreams[kMaxStreams];
+
sp<AMessage> mNotify;
uint32_t mFlags;
- bool mUIDValid;
- uid_t mUID;
+ sp<IMediaHTTPService> mHTTPService;
bool mInPreparationPhase;
+ bool mBuffering[kMaxStreams];
sp<HTTPBase> mHTTPDataSource;
KeyedVector<String8, String8> mExtraHeaders;
@@ -118,26 +164,54 @@ private:
AString mMasterURL;
Vector<BandwidthItem> mBandwidthItems;
- ssize_t mPrevBandwidthIndex;
+ ssize_t mCurBandwidthIndex;
sp<M3UParser> mPlaylist;
KeyedVector<AString, FetcherInfo> mFetcherInfos;
- AString mAudioURI, mVideoURI, mSubtitleURI;
uint32_t mStreamMask;
+ // Masks used during reconfiguration:
+ // mNewStreamMask: streams in the variant playlist we're switching to;
+ // we don't want to immediately overwrite the original value.
+ uint32_t mNewStreamMask;
+
+ // mSwapMask: streams that have started to playback content in the new variant playlist;
+ // we use this to track reconfiguration progress.
+ uint32_t mSwapMask;
+
+ KeyedVector<StreamType, sp<AnotherPacketSource> > mDiscontinuities;
KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources;
+ // A second set of packet sources that buffer content for the variant we're switching to.
+ KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources2;
+
+ // A mutex used to serialize two sets of events:
+ // * the swapping of packet sources in dequeueAccessUnit on the player thread, AND
+ // * a forced bandwidth switch termination in cancelSwitch on the live looper.
+ Mutex mSwapMutex;
int32_t mCheckBandwidthGeneration;
+ int32_t mSwitchGeneration;
+ int32_t mSubtitleGeneration;
size_t mContinuationCounter;
sp<AMessage> mContinuation;
+ sp<AMessage> mSeekReply;
int64_t mLastDequeuedTimeUs;
int64_t mRealTimeBaseUs;
bool mReconfigurationInProgress;
+ bool mSwitchInProgress;
uint32_t mDisconnectReplyID;
+ uint32_t mSeekReplyID;
+
+ bool mFirstTimeUsValid;
+ int64_t mFirstTimeUs;
+ int64_t mLastSeekTimeUs;
+ sp<AMessage> mSwitchDownMonitor;
+ KeyedVector<size_t, int64_t> mDiscontinuityAbsStartTimesUs;
+ KeyedVector<size_t, int64_t> mDiscontinuityOffsetTimesUs;
sp<PlaylistFetcher> addFetcher(const char *uri);
@@ -145,33 +219,65 @@ private:
status_t onSeek(const sp<AMessage> &msg);
void onFinishDisconnect2();
- status_t fetchFile(
+ // If given a non-zero block_size (default 0), it is used to cap the number of
+ // bytes read in from the DataSource. If given a non-NULL buffer, new content
+ // is read into the end.
+ //
+ // The DataSource we read from is responsible for signaling error or EOF to help us
+ // break out of the read loop. The DataSource can be returned to the caller, so
+ // that the caller can reuse it for subsequent fetches (within the initially
+ // requested range).
+ //
+ // For reused HTTP sources, the caller must download a file sequentially without
+ // any overlaps or gaps to prevent reconnection.
+ ssize_t fetchFile(
const char *url, sp<ABuffer> *out,
+ /* request/open a file starting at range_offset for range_length bytes */
int64_t range_offset = 0, int64_t range_length = -1,
+ /* download block size */
+ uint32_t block_size = 0,
+ /* reuse DataSource if doing partial fetch */
+ sp<DataSource> *source = NULL,
String8 *actualUrl = NULL);
sp<M3UParser> fetchPlaylist(
const char *url, uint8_t *curPlaylistHash, bool *unchanged);
size_t getBandwidthIndex();
+ int64_t latestMediaSegmentStartTimeUs();
static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);
+ static StreamType indexToType(int idx);
+ static ssize_t typeToIndex(int32_t type);
void changeConfiguration(
int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false);
void onChangeConfiguration(const sp<AMessage> &msg);
void onChangeConfiguration2(const sp<AMessage> &msg);
void onChangeConfiguration3(const sp<AMessage> &msg);
+ void onSwapped(const sp<AMessage> &msg);
+ void onCheckSwitchDown();
+ void onSwitchDown();
+ void tryToFinishBandwidthSwitch();
void scheduleCheckBandwidthEvent();
void cancelCheckBandwidthEvent();
- void onCheckBandwidth();
+ // cancelBandwidthSwitch is atomic wrt swapPacketSource; call it to prevent packet sources
+ // from being swapped out on stale discontinuities while manipulating
+ // mPacketSources/mPacketSources2.
+ void cancelBandwidthSwitch();
+
+ bool canSwitchBandwidthTo(size_t bandwidthIndex);
+ void onCheckBandwidth(const sp<AMessage> &msg);
void finishDisconnect();
void postPrepared(status_t err);
+ void swapPacketSource(StreamType stream);
+ bool canSwitchUp();
+
DISALLOW_EVIL_CONSTRUCTORS(LiveSession);
};
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 5ef7c0f..997b694 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -23,7 +23,9 @@
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
#include <media/mediaplayer.h>
namespace android {
@@ -33,6 +35,7 @@ struct M3UParser::MediaGroup : public RefBase {
TYPE_AUDIO,
TYPE_VIDEO,
TYPE_SUBS,
+ TYPE_CC,
};
enum FlagBits {
@@ -57,13 +60,16 @@ struct M3UParser::MediaGroup : public RefBase {
void pickRandomMediaItems();
status_t selectTrack(size_t index, bool select);
- void getTrackInfo(Parcel* reply) const;
size_t countTracks() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
protected:
virtual ~MediaGroup();
private:
+
+ friend struct M3UParser;
+
struct Media {
AString mName;
AString mURI;
@@ -125,7 +131,7 @@ void M3UParser::MediaGroup::pickRandomMediaItems() {
mSelectedIndex = strtoul(value, &end, 10);
CHECK(end > value && *end == '\0');
- if (mSelectedIndex >= mMediaItems.size()) {
+ if (mSelectedIndex >= (ssize_t)mMediaItems.size()) {
mSelectedIndex = mMediaItems.size() - 1;
}
} else {
@@ -155,63 +161,70 @@ void M3UParser::MediaGroup::pickRandomMediaItems() {
}
status_t M3UParser::MediaGroup::selectTrack(size_t index, bool select) {
- if (mType != TYPE_SUBS) {
- ALOGE("only select subtitile tracks for now!");
+ if (mType != TYPE_SUBS && mType != TYPE_AUDIO) {
+ ALOGE("only select subtitile/audio tracks for now!");
return INVALID_OPERATION;
}
if (select) {
if (index >= mMediaItems.size()) {
- ALOGE("track %d does not exist", index);
+ ALOGE("track %zu does not exist", index);
return INVALID_OPERATION;
}
- if (mSelectedIndex == index) {
- ALOGE("track %d already selected", index);
+ if (mSelectedIndex == (ssize_t)index) {
+ ALOGE("track %zu already selected", index);
return BAD_VALUE;
}
- ALOGV("selected track %d", index);
+ ALOGV("selected track %zu", index);
mSelectedIndex = index;
} else {
- if (mSelectedIndex != index) {
- ALOGE("track %d is not selected", index);
+ if (mSelectedIndex != (ssize_t)index) {
+ ALOGE("track %zu is not selected", index);
return BAD_VALUE;
}
- ALOGV("unselected track %d", index);
+ ALOGV("unselected track %zu", index);
mSelectedIndex = -1;
}
return OK;
}
-void M3UParser::MediaGroup::getTrackInfo(Parcel* reply) const {
- for (size_t i = 0; i < mMediaItems.size(); ++i) {
- reply->writeInt32(2); // 2 fields
-
- if (mType == TYPE_AUDIO) {
- reply->writeInt32(MEDIA_TRACK_TYPE_AUDIO);
- } else if (mType == TYPE_VIDEO) {
- reply->writeInt32(MEDIA_TRACK_TYPE_VIDEO);
- } else if (mType == TYPE_SUBS) {
- reply->writeInt32(MEDIA_TRACK_TYPE_SUBTITLE);
- } else {
- reply->writeInt32(MEDIA_TRACK_TYPE_UNKNOWN);
- }
+size_t M3UParser::MediaGroup::countTracks() const {
+ return mMediaItems.size();
+}
- const Media &item = mMediaItems.itemAt(i);
- const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str();
- reply->writeString16(String16(lang));
+sp<AMessage> M3UParser::MediaGroup::getTrackInfo(size_t index) const {
+ if (index >= mMediaItems.size()) {
+ return NULL;
+ }
- if (mType == TYPE_SUBS) {
- // TO-DO: pass in a MediaFormat instead
- reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_AUTOSELECT));
- reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_DEFAULT));
- reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_FORCED));
- }
+ sp<AMessage> format = new AMessage();
+
+ int32_t trackType;
+ if (mType == TYPE_AUDIO) {
+ trackType = MEDIA_TRACK_TYPE_AUDIO;
+ } else if (mType == TYPE_VIDEO) {
+ trackType = MEDIA_TRACK_TYPE_VIDEO;
+ } else if (mType == TYPE_SUBS) {
+ trackType = MEDIA_TRACK_TYPE_SUBTITLE;
+ } else {
+ trackType = MEDIA_TRACK_TYPE_UNKNOWN;
+ }
+ format->setInt32("type", trackType);
+
+ const Media &item = mMediaItems.itemAt(index);
+ const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str();
+ format->setString("language", lang);
+
+ if (mType == TYPE_SUBS) {
+ // TO-DO: pass in a MediaFormat instead
+ format->setString("mime", MEDIA_MIMETYPE_TEXT_VTT);
+ format->setInt32("auto", !!(item.mFlags & MediaGroup::FLAG_AUTOSELECT));
+ format->setInt32("default", !!(item.mFlags & MediaGroup::FLAG_DEFAULT));
+ format->setInt32("forced", !!(item.mFlags & MediaGroup::FLAG_FORCED));
}
-}
-size_t M3UParser::MediaGroup::countTracks() const {
- return mMediaItems.size();
+ return format;
}
bool M3UParser::MediaGroup::getActiveURI(AString *uri) const {
@@ -237,6 +250,7 @@ M3UParser::M3UParser(
mIsVariantPlaylist(false),
mIsComplete(false),
mIsEvent(false),
+ mDiscontinuitySeq(0),
mSelectedIndex(-1) {
mInitCheck = parse(data, size);
}
@@ -264,6 +278,10 @@ bool M3UParser::isEvent() const {
return mIsEvent;
}
+size_t M3UParser::getDiscontinuitySeq() const {
+ return mDiscontinuitySeq;
+}
+
sp<AMessage> M3UParser::meta() {
return mMeta;
}
@@ -318,23 +336,62 @@ status_t M3UParser::selectTrack(size_t index, bool select) {
return INVALID_OPERATION;
}
-status_t M3UParser::getTrackInfo(Parcel* reply) const {
+size_t M3UParser::getTrackCount() const {
size_t trackCount = 0;
for (size_t i = 0; i < mMediaGroups.size(); ++i) {
trackCount += mMediaGroups.valueAt(i)->countTracks();
}
- reply->writeInt32(trackCount);
+ return trackCount;
+}
- for (size_t i = 0; i < mMediaGroups.size(); ++i) {
- mMediaGroups.valueAt(i)->getTrackInfo(reply);
+sp<AMessage> M3UParser::getTrackInfo(size_t index) const {
+ for (size_t i = 0, ii = index; i < mMediaGroups.size(); ++i) {
+ sp<MediaGroup> group = mMediaGroups.valueAt(i);
+ size_t tracks = group->countTracks();
+ if (ii < tracks) {
+ return group->getTrackInfo(ii);
+ }
+ ii -= tracks;
}
- return OK;
+ return NULL;
}
ssize_t M3UParser::getSelectedIndex() const {
return mSelectedIndex;
}
+ssize_t M3UParser::getSelectedTrack(media_track_type type) const {
+ MediaGroup::Type groupType;
+ switch (type) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ groupType = MediaGroup::TYPE_VIDEO;
+ break;
+
+ case MEDIA_TRACK_TYPE_AUDIO:
+ groupType = MediaGroup::TYPE_AUDIO;
+ break;
+
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ groupType = MediaGroup::TYPE_SUBS;
+ break;
+
+ default:
+ return -1;
+ }
+
+ for (size_t i = 0, ii = 0; i < mMediaGroups.size(); ++i) {
+ sp<MediaGroup> group = mMediaGroups.valueAt(i);
+ size_t tracks = group->countTracks();
+ if (groupType != group->mType) {
+ ii += tracks;
+ } else if (group->mSelectedIndex >= 0) {
+ return ii + group->mSelectedIndex;
+ }
+ }
+
+ return -1;
+}
+
bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
if (!mIsVariantPlaylist) {
*uri = mBaseURI;
@@ -352,9 +409,28 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
if (!meta->findString(key, &groupID)) {
*uri = mItems.itemAt(index).mURI;
- // Assume media without any more specific attribute contains
- // audio and video, but no subtitles.
- return !strcmp("audio", key) || !strcmp("video", key);
+ AString codecs;
+ if (!meta->findString("codecs", &codecs)) {
+ // Assume media without any more specific attribute contains
+ // audio and video, but no subtitles.
+ return !strcmp("audio", key) || !strcmp("video", key);
+ } else {
+ // Split the comma separated list of codecs.
+ size_t offset = 0;
+ ssize_t commaPos = -1;
+ codecs.append(',');
+ while ((commaPos = codecs.find(",", offset)) >= 0) {
+ AString codec(codecs, offset, commaPos - offset);
+ codec.trim();
+ // return true only if a codec of type `key` ("audio"/"video")
+ // is found.
+ if (codecIsType(codec, key)) {
+ return true;
+ }
+ offset = commaPos + 1;
+ }
+ return false;
+ }
}
sp<MediaGroup> group = mMediaGroups.valueFor(groupID);
@@ -369,18 +445,6 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
return true;
}
-bool M3UParser::getAudioURI(size_t index, AString *uri) const {
- return getTypeURI(index, "audio", uri);
-}
-
-bool M3UParser::getVideoURI(size_t index, AString *uri) const {
- return getTypeURI(index, "video", uri);
-}
-
-bool M3UParser::getSubtitleURI(size_t index, AString *uri) const {
- return getTypeURI(index, "subtitles", uri);
-}
-
static bool MakeURL(const char *baseURL, const char *url, AString *out) {
out->clear();
@@ -390,6 +454,8 @@ static bool MakeURL(const char *baseURL, const char *url, AString *out) {
// Base URL must be absolute
return false;
}
+ const size_t schemeEnd = (strstr(baseURL, "//") - baseURL) + 2;
+ CHECK(schemeEnd == 7 || schemeEnd == 8);
if (!strncasecmp("http://", url, 7) || !strncasecmp("https://", url, 8)) {
// "url" is already an absolute URL, ignore base URL.
@@ -434,7 +500,7 @@ static bool MakeURL(const char *baseURL, const char *url, AString *out) {
// Check whether the found slash actually is part of the path
// and not part of the "http://".
- if (end > 6) {
+ if (end >= schemeEnd) {
out->setTo(baseURL, end);
} else {
out->setTo(baseURL);
@@ -542,6 +608,12 @@ status_t M3UParser::parse(const void *_data, size_t size) {
}
} else if (line.startsWith("#EXT-X-MEDIA")) {
err = parseMedia(line);
+ } else if (line.startsWith("#EXT-X-DISCONTINUITY-SEQUENCE")) {
+ size_t seq;
+ err = parseDiscontinuitySequence(line, &seq);
+ if (err == OK) {
+ mDiscontinuitySeq = seq;
+ }
}
if (err != OK) {
@@ -694,12 +766,25 @@ status_t M3UParser::parseStreamInf(
*meta = new AMessage;
}
(*meta)->setInt32("bandwidth", x);
+ } else if (!strcasecmp("codecs", key.c_str())) {
+ if (!isQuotedString(val)) {
+ ALOGE("Expected quoted string for %s attribute, "
+ "got '%s' instead.",
+ key.c_str(), val.c_str());;
+
+ return ERROR_MALFORMED;
+ }
+
+ key.tolower();
+ const AString &codecs = unquoteString(val);
+ if (meta->get() == NULL) {
+ *meta = new AMessage;
+ }
+ (*meta)->setString(key.c_str(), codecs.c_str());
} else if (!strcasecmp("audio", key.c_str())
|| !strcasecmp("video", key.c_str())
|| !strcasecmp("subtitles", key.c_str())) {
- if (val.size() < 2
- || val.c_str()[0] != '"'
- || val.c_str()[val.size() - 1] != '"') {
+ if (!isQuotedString(val)) {
ALOGE("Expected quoted string for %s attribute, "
"got '%s' instead.",
key.c_str(), val.c_str());
@@ -707,7 +792,7 @@ status_t M3UParser::parseStreamInf(
return ERROR_MALFORMED;
}
- AString groupID(val, 1, val.size() - 2);
+ const AString &groupID = unquoteString(val);
ssize_t groupIndex = mMediaGroups.indexOfKey(groupID);
if (groupIndex < 0) {
@@ -718,6 +803,9 @@ status_t M3UParser::parseStreamInf(
}
key.tolower();
+ if (meta->get() == NULL) {
+ *meta = new AMessage;
+ }
(*meta)->setString(key.c_str(), groupID.c_str());
}
}
@@ -780,8 +868,8 @@ status_t M3UParser::parseCipherInfo(
if (MakeURL(baseURI.c_str(), val.c_str(), &absURI)) {
val = absURI;
} else {
- ALOGE("failed to make absolute url for '%s'.",
- val.c_str());
+ ALOGE("failed to make absolute url for %s.",
+ uriDebugString(baseURI).c_str());
}
}
@@ -904,6 +992,8 @@ status_t M3UParser::parseMedia(const AString &line) {
groupType = MediaGroup::TYPE_AUDIO;
} else if (!strcasecmp("video", val.c_str())) {
groupType = MediaGroup::TYPE_VIDEO;
+ } else if (!strcasecmp("closed-captions", val.c_str())){
+ groupType = MediaGroup::TYPE_CC;
} else {
ALOGE("Invalid media group type '%s'", val.c_str());
return ERROR_MALFORMED;
@@ -1016,6 +1106,13 @@ status_t M3UParser::parseMedia(const AString &line) {
return ERROR_MALFORMED;
}
+ if (groupType == MediaGroup::TYPE_CC) {
+ // TODO: ignore this for now.
+ // CC track will be detected by CCDecoder. But we still need to
+ // pass the CC track flags (lang, auto) to the app in the future.
+ return OK;
+ }
+
uint32_t flags = 0;
if (haveGroupAutoselect && groupAutoselect) {
flags |= MediaGroup::FLAG_AUTOSELECT;
@@ -1069,6 +1166,30 @@ status_t M3UParser::parseMedia(const AString &line) {
}
// static
+status_t M3UParser::parseDiscontinuitySequence(const AString &line, size_t *seq) {
+ ssize_t colonPos = line.find(":");
+
+ if (colonPos < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ int32_t x;
+ status_t err = ParseInt32(line.c_str() + colonPos + 1, &x);
+ if (err != OK) {
+ return err;
+ }
+
+ if (x < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (seq) {
+ *seq = x;
+ }
+ return OK;
+}
+
+// static
status_t M3UParser::ParseInt32(const char *s, int32_t *x) {
char *end;
long lval = strtol(s, &end, 10);
@@ -1096,4 +1217,121 @@ status_t M3UParser::ParseDouble(const char *s, double *x) {
return OK;
}
+// static
+bool M3UParser::isQuotedString(const AString &str) {
+ if (str.size() < 2
+ || str.c_str()[0] != '"'
+ || str.c_str()[str.size() - 1] != '"') {
+ return false;
+ }
+ return true;
+}
+
+// static
+AString M3UParser::unquoteString(const AString &str) {
+ if (!isQuotedString(str)) {
+ return str;
+ }
+ return AString(str, 1, str.size() - 2);
+}
+
+// static
+bool M3UParser::codecIsType(const AString &codec, const char *type) {
+ if (codec.size() < 4) {
+ return false;
+ }
+ const char *c = codec.c_str();
+ switch (FOURCC(c[0], c[1], c[2], c[3])) {
+ // List extracted from http://www.mp4ra.org/codecs.html
+ case 'ac-3':
+ case 'alac':
+ case 'dra1':
+ case 'dtsc':
+ case 'dtse':
+ case 'dtsh':
+ case 'dtsl':
+ case 'ec-3':
+ case 'enca':
+ case 'g719':
+ case 'g726':
+ case 'm4ae':
+ case 'mlpa':
+ case 'mp4a':
+ case 'raw ':
+ case 'samr':
+ case 'sawb':
+ case 'sawp':
+ case 'sevc':
+ case 'sqcp':
+ case 'ssmv':
+ case 'twos':
+ case 'agsm':
+ case 'alaw':
+ case 'dvi ':
+ case 'fl32':
+ case 'fl64':
+ case 'ima4':
+ case 'in24':
+ case 'in32':
+ case 'lpcm':
+ case 'Qclp':
+ case 'QDM2':
+ case 'QDMC':
+ case 'ulaw':
+ case 'vdva':
+ return !strcmp("audio", type);
+
+ case 'avc1':
+ case 'avc2':
+ case 'avcp':
+ case 'drac':
+ case 'encv':
+ case 'mjp2':
+ case 'mp4v':
+ case 'mvc1':
+ case 'mvc2':
+ case 'resv':
+ case 's263':
+ case 'svc1':
+ case 'vc-1':
+ case 'CFHD':
+ case 'civd':
+ case 'DV10':
+ case 'dvh5':
+ case 'dvh6':
+ case 'dvhp':
+ case 'DVOO':
+ case 'DVOR':
+ case 'DVTV':
+ case 'DVVT':
+ case 'flic':
+ case 'gif ':
+ case 'h261':
+ case 'h263':
+ case 'HD10':
+ case 'jpeg':
+ case 'M105':
+ case 'mjpa':
+ case 'mjpb':
+ case 'png ':
+ case 'PNTG':
+ case 'rle ':
+ case 'rpza':
+ case 'Shr0':
+ case 'Shr1':
+ case 'Shr2':
+ case 'Shr3':
+ case 'Shr4':
+ case 'SVQ1':
+ case 'SVQ3':
+ case 'tga ':
+ case 'tiff':
+ case 'WRLE':
+ return !strcmp("video", type);
+
+ default:
+ return false;
+ }
+}
+
} // namespace android
diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h
index 5248004..1cad060 100644
--- a/media/libstagefright/httplive/M3UParser.h
+++ b/media/libstagefright/httplive/M3UParser.h
@@ -21,6 +21,7 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AString.h>
+#include <media/mediaplayer.h>
#include <utils/Vector.h>
namespace android {
@@ -34,6 +35,7 @@ struct M3UParser : public RefBase {
bool isVariantPlaylist() const;
bool isComplete() const;
bool isEvent() const;
+ size_t getDiscontinuitySeq() const;
sp<AMessage> meta();
@@ -42,12 +44,12 @@ struct M3UParser : public RefBase {
void pickRandomMediaItems();
status_t selectTrack(size_t index, bool select);
- status_t getTrackInfo(Parcel* reply) const;
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
ssize_t getSelectedIndex() const;
+ ssize_t getSelectedTrack(media_track_type /* type */) const;
- bool getAudioURI(size_t index, AString *uri) const;
- bool getVideoURI(size_t index, AString *uri) const;
- bool getSubtitleURI(size_t index, AString *uri) const;
+ bool getTypeURI(size_t index, const char *key, AString *uri) const;
protected:
virtual ~M3UParser();
@@ -67,6 +69,7 @@ private:
bool mIsVariantPlaylist;
bool mIsComplete;
bool mIsEvent;
+ size_t mDiscontinuitySeq;
sp<AMessage> mMeta;
Vector<Item> mItems;
@@ -95,11 +98,15 @@ private:
status_t parseMedia(const AString &line);
- bool getTypeURI(size_t index, const char *key, AString *uri) const;
+ static status_t parseDiscontinuitySequence(const AString &line, size_t *seq);
static status_t ParseInt32(const char *s, int32_t *x);
static status_t ParseDouble(const char *s, double *x);
+ static bool isQuotedString(const AString &str);
+ static AString unquoteString(const AString &str);
+ static bool codecIsType(const AString &codec, const char *type);
+
DISALLOW_EVIL_CONSTRUCTORS(M3UParser);
};
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 973b779..1227600 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -40,6 +40,7 @@
#include <media/stagefright/Utils.h>
#include <ctype.h>
+#include <inttypes.h>
#include <openssl/aes.h>
#include <openssl/md5.h>
@@ -47,26 +48,41 @@ namespace android {
// static
const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll;
+const int64_t PlaylistFetcher::kMaxMonitorDelayUs = 3000000ll;
+// LCM of 188 (size of a TS packet) & 1k works well
+const int32_t PlaylistFetcher::kDownloadBlockSize = 47 * 1024;
+const int32_t PlaylistFetcher::kNumSkipFrames = 5;
PlaylistFetcher::PlaylistFetcher(
const sp<AMessage> &notify,
const sp<LiveSession> &session,
- const char *uri)
+ const char *uri,
+ int32_t subtitleGeneration)
: mNotify(notify),
+ mStartTimeUsNotify(notify->dup()),
mSession(session),
mURI(uri),
mStreamTypeMask(0),
mStartTimeUs(-1ll),
+ mSegmentStartTimeUs(-1ll),
+ mDiscontinuitySeq(-1ll),
+ mStartTimeUsRelative(false),
mLastPlaylistFetchTimeUs(-1ll),
mSeqNumber(-1),
mNumRetries(0),
mStartup(true),
+ mAdaptive(false),
+ mPrepared(false),
mNextPTSTimeUs(-1ll),
mMonitorQueueGeneration(0),
+ mSubtitleGeneration(subtitleGeneration),
mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY),
mFirstPTSValid(false),
- mAbsoluteTimeAnchorUs(0ll) {
+ mAbsoluteTimeAnchorUs(0ll),
+ mVideoBuffer(new AnotherPacketSource(NULL)) {
memset(mPlaylistHash, 0, sizeof(mPlaylistHash));
+ mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
+ mStartTimeUsNotify->setInt32("streamMask", 0);
}
PlaylistFetcher::~PlaylistFetcher() {
@@ -103,10 +119,16 @@ int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const {
return segmentStartUs;
}
-bool PlaylistFetcher::timeToRefreshPlaylist(int64_t nowUs) const {
- if (mPlaylist == NULL) {
+int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const {
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mPlaylist == NULL || mLastPlaylistFetchTimeUs < 0ll) {
CHECK_EQ((int)mRefreshState, (int)INITIAL_MINIMUM_RELOAD_DELAY);
- return true;
+ return 0ll;
+ }
+
+ if (mPlaylist->isComplete()) {
+ return (~0llu >> 1);
}
int32_t targetDurationSecs;
@@ -157,11 +179,13 @@ bool PlaylistFetcher::timeToRefreshPlaylist(int64_t nowUs) const {
break;
}
- return mLastPlaylistFetchTimeUs + minPlaylistAgeUs <= nowUs;
+ int64_t delayUs = mLastPlaylistFetchTimeUs + minPlaylistAgeUs - nowUs;
+ return delayUs > 0ll ? delayUs : 0ll;
}
status_t PlaylistFetcher::decryptBuffer(
- size_t playlistIndex, const sp<ABuffer> &buffer) {
+ size_t playlistIndex, const sp<ABuffer> &buffer,
+ bool first) {
sp<AMessage> itemMeta;
bool found = false;
AString method;
@@ -179,6 +203,7 @@ status_t PlaylistFetcher::decryptBuffer(
if (!found) {
method = "NONE";
}
+ buffer->meta()->setString("cipher-method", method.c_str());
if (method == "NONE") {
return OK;
@@ -199,9 +224,9 @@ status_t PlaylistFetcher::decryptBuffer(
if (index >= 0) {
key = mAESKeyForURI.valueAt(index);
} else {
- status_t err = mSession->fetchFile(keyURI.c_str(), &key);
+ ssize_t err = mSession->fetchFile(keyURI.c_str(), &key);
- if (err != OK) {
+ if (err < 0) {
ALOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());
return ERROR_IO;
} else if (key->size() != 16) {
@@ -218,63 +243,88 @@ status_t PlaylistFetcher::decryptBuffer(
return UNKNOWN_ERROR;
}
- unsigned char aes_ivec[16];
+ size_t n = buffer->size();
+ if (!n) {
+ return OK;
+ }
+ CHECK(n % 16 == 0);
- AString iv;
- if (itemMeta->findString("cipher-iv", &iv)) {
- if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
- || iv.size() != 16 * 2 + 2) {
- ALOGE("malformed cipher IV '%s'.", iv.c_str());
- return ERROR_MALFORMED;
- }
+ if (first) {
+ // If decrypting the first block in a file, read the iv from the manifest
+ // or derive the iv from the file's sequence number.
- memset(aes_ivec, 0, sizeof(aes_ivec));
- for (size_t i = 0; i < 16; ++i) {
- char c1 = tolower(iv.c_str()[2 + 2 * i]);
- char c2 = tolower(iv.c_str()[3 + 2 * i]);
- if (!isxdigit(c1) || !isxdigit(c2)) {
+ AString iv;
+ if (itemMeta->findString("cipher-iv", &iv)) {
+ if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
+ || iv.size() != 16 * 2 + 2) {
ALOGE("malformed cipher IV '%s'.", iv.c_str());
return ERROR_MALFORMED;
}
- uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10;
- uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10;
- aes_ivec[i] = nibble1 << 4 | nibble2;
+ memset(mAESInitVec, 0, sizeof(mAESInitVec));
+ for (size_t i = 0; i < 16; ++i) {
+ char c1 = tolower(iv.c_str()[2 + 2 * i]);
+ char c2 = tolower(iv.c_str()[3 + 2 * i]);
+ if (!isxdigit(c1) || !isxdigit(c2)) {
+ ALOGE("malformed cipher IV '%s'.", iv.c_str());
+ return ERROR_MALFORMED;
+ }
+ uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10;
+ uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10;
+
+ mAESInitVec[i] = nibble1 << 4 | nibble2;
+ }
+ } else {
+ memset(mAESInitVec, 0, sizeof(mAESInitVec));
+ mAESInitVec[15] = mSeqNumber & 0xff;
+ mAESInitVec[14] = (mSeqNumber >> 8) & 0xff;
+ mAESInitVec[13] = (mSeqNumber >> 16) & 0xff;
+ mAESInitVec[12] = (mSeqNumber >> 24) & 0xff;
}
- } else {
- memset(aes_ivec, 0, sizeof(aes_ivec));
- aes_ivec[15] = mSeqNumber & 0xff;
- aes_ivec[14] = (mSeqNumber >> 8) & 0xff;
- aes_ivec[13] = (mSeqNumber >> 16) & 0xff;
- aes_ivec[12] = (mSeqNumber >> 24) & 0xff;
}
AES_cbc_encrypt(
buffer->data(), buffer->data(), buffer->size(),
- &aes_key, aes_ivec, AES_DECRYPT);
+ &aes_key, mAESInitVec, AES_DECRYPT);
- // hexdump(buffer->data(), buffer->size());
-
- size_t n = buffer->size();
- CHECK_GT(n, 0u);
+ return OK;
+}
- size_t pad = buffer->data()[n - 1];
+status_t PlaylistFetcher::checkDecryptPadding(const sp<ABuffer> &buffer) {
+ AString method;
+ CHECK(buffer->meta()->findString("cipher-method", &method));
+ if (method == "NONE") {
+ return OK;
+ }
- CHECK_GT(pad, 0u);
- CHECK_LE(pad, 16u);
- CHECK_GE((size_t)n, pad);
- for (size_t i = 0; i < pad; ++i) {
- CHECK_EQ((unsigned)buffer->data()[n - 1 - i], pad);
+ uint8_t padding = 0;
+ if (buffer->size() > 0) {
+ padding = buffer->data()[buffer->size() - 1];
}
- n -= pad;
+ if (padding > 16) {
+ return ERROR_MALFORMED;
+ }
- buffer->setRange(buffer->offset(), n);
+ for (size_t i = buffer->size() - padding; i < padding; i++) {
+ if (buffer->data()[i] != padding) {
+ return ERROR_MALFORMED;
+ }
+ }
+ buffer->setRange(buffer->offset(), buffer->size() - padding);
return OK;
}
-void PlaylistFetcher::postMonitorQueue(int64_t delayUs) {
+void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) {
+ int64_t maxDelayUs = delayUsToRefreshPlaylist();
+ if (maxDelayUs < minDelayUs) {
+ maxDelayUs = minDelayUs;
+ }
+ if (delayUs > maxDelayUs) {
+ ALOGV("Need to refresh playlist in %" PRId64 , maxDelayUs);
+ delayUs = maxDelayUs;
+ }
sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id());
msg->setInt32("generation", mMonitorQueueGeneration);
msg->post(delayUs);
@@ -288,7 +338,10 @@ void PlaylistFetcher::startAsync(
const sp<AnotherPacketSource> &audioSource,
const sp<AnotherPacketSource> &videoSource,
const sp<AnotherPacketSource> &subtitleSource,
- int64_t startTimeUs) {
+ int64_t startTimeUs,
+ int64_t segmentStartTimeUs,
+ int32_t startDiscontinuitySeq,
+ bool adaptive) {
sp<AMessage> msg = new AMessage(kWhatStart, id());
uint32_t streamTypeMask = 0ul;
@@ -310,6 +363,9 @@ void PlaylistFetcher::startAsync(
msg->setInt32("streamTypeMask", streamTypeMask);
msg->setInt64("startTimeUs", startTimeUs);
+ msg->setInt64("segmentStartTimeUs", segmentStartTimeUs);
+ msg->setInt32("startDiscontinuitySeq", startDiscontinuitySeq);
+ msg->setInt32("adaptive", adaptive);
msg->post();
}
@@ -317,8 +373,16 @@ void PlaylistFetcher::pauseAsync() {
(new AMessage(kWhatPause, id()))->post();
}
-void PlaylistFetcher::stopAsync() {
- (new AMessage(kWhatStop, id()))->post();
+void PlaylistFetcher::stopAsync(bool clear) {
+ sp<AMessage> msg = new AMessage(kWhatStop, id());
+ msg->setInt32("clear", clear);
+ msg->post();
+}
+
+void PlaylistFetcher::resumeUntilAsync(const sp<AMessage> &params) {
+ AMessage* msg = new AMessage(kWhatResumeUntil, id());
+ msg->setMessage("params", params);
+ msg->post();
}
void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
@@ -346,7 +410,7 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
case kWhatStop:
{
- onStop();
+ onStop(msg);
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatStopped);
@@ -355,6 +419,7 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
}
case kWhatMonitorQueue:
+ case kWhatDownloadNext:
{
int32_t generation;
CHECK(msg->findInt32("generation", &generation));
@@ -364,7 +429,17 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- onMonitorQueue();
+ if (msg->what() == kWhatMonitorQueue) {
+ onMonitorQueue();
+ } else {
+ onDownloadNext();
+ }
+ break;
+ }
+
+ case kWhatResumeUntil:
+ {
+ onResumeUntil(msg);
break;
}
@@ -380,7 +455,13 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
CHECK(msg->findInt32("streamTypeMask", (int32_t *)&streamTypeMask));
int64_t startTimeUs;
+ int64_t segmentStartTimeUs;
+ int32_t startDiscontinuitySeq;
+ int32_t adaptive;
CHECK(msg->findInt64("startTimeUs", &startTimeUs));
+ CHECK(msg->findInt64("segmentStartTimeUs", &segmentStartTimeUs));
+ CHECK(msg->findInt32("startDiscontinuitySeq", &startDiscontinuitySeq));
+ CHECK(msg->findInt32("adaptive", &adaptive));
if (streamTypeMask & LiveSession::STREAMTYPE_AUDIO) {
void *ptr;
@@ -410,11 +491,16 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
}
mStreamTypeMask = streamTypeMask;
- mStartTimeUs = startTimeUs;
- if (mStartTimeUs >= 0ll) {
+ mSegmentStartTimeUs = segmentStartTimeUs;
+ mDiscontinuitySeq = startDiscontinuitySeq;
+
+ if (startTimeUs >= 0) {
+ mStartTimeUs = startTimeUs;
mSeqNumber = -1;
mStartup = true;
+ mPrepared = false;
+ mAdaptive = adaptive;
}
postMonitorQueue();
@@ -424,22 +510,82 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
void PlaylistFetcher::onPause() {
cancelMonitorQueue();
-
- mPacketSources.clear();
- mStreamTypeMask = 0;
}
-void PlaylistFetcher::onStop() {
+void PlaylistFetcher::onStop(const sp<AMessage> &msg) {
cancelMonitorQueue();
- for (size_t i = 0; i < mPacketSources.size(); ++i) {
- mPacketSources.valueAt(i)->clear();
+ int32_t clear;
+ CHECK(msg->findInt32("clear", &clear));
+ if (clear) {
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
+ packetSource->clear();
+ }
}
mPacketSources.clear();
mStreamTypeMask = 0;
}
+// Resume until we have reached the boundary timestamps listed in `msg`; when
+// the remaining time is too short (within a resume threshold) stop immediately
+// instead.
+status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ bool stop = false;
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
+
+ const char *stopKey;
+ int streamType = mPacketSources.keyAt(i);
+ switch (streamType) {
+ case LiveSession::STREAMTYPE_VIDEO:
+ stopKey = "timeUsVideo";
+ break;
+
+ case LiveSession::STREAMTYPE_AUDIO:
+ stopKey = "timeUsAudio";
+ break;
+
+ case LiveSession::STREAMTYPE_SUBTITLES:
+ stopKey = "timeUsSubtitle";
+ break;
+
+ default:
+ TRESPASS();
+ }
+
+ // Don't resume if we would stop within a resume threshold.
+ int32_t discontinuitySeq;
+ int64_t latestTimeUs = 0, stopTimeUs = 0;
+ sp<AMessage> latestMeta = packetSource->getLatestEnqueuedMeta();
+ if (latestMeta != NULL
+ && latestMeta->findInt32("discontinuitySeq", &discontinuitySeq)
+ && discontinuitySeq == mDiscontinuitySeq
+ && latestMeta->findInt64("timeUs", &latestTimeUs)
+ && params->findInt64(stopKey, &stopTimeUs)
+ && stopTimeUs - latestTimeUs < resumeThreshold(latestMeta)) {
+ stop = true;
+ }
+ }
+
+ if (stop) {
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ mPacketSources.valueAt(i)->queueAccessUnit(mSession->createFormatChangeBuffer());
+ }
+ stopAsync(/* clear = */ false);
+ return OK;
+ }
+
+ mStopParams = params;
+ postMonitorQueue();
+
+ return OK;
+}
+
void PlaylistFetcher::notifyError(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatError);
@@ -450,47 +596,84 @@ void PlaylistFetcher::notifyError(status_t err) {
void PlaylistFetcher::queueDiscontinuity(
ATSParser::DiscontinuityType type, const sp<AMessage> &extra) {
for (size_t i = 0; i < mPacketSources.size(); ++i) {
- mPacketSources.valueAt(i)->queueDiscontinuity(type, extra);
+ // do not discard buffer upon #EXT-X-DISCONTINUITY tag
+ // (seek will discard buffer by abandoning old fetchers)
+ mPacketSources.valueAt(i)->queueDiscontinuity(
+ type, extra, false /* discard */);
}
}
void PlaylistFetcher::onMonitorQueue() {
bool downloadMore = false;
+ refreshPlaylist();
+
+ int32_t targetDurationSecs;
+ int64_t targetDurationUs = kMinBufferedDurationUs;
+ if (mPlaylist != NULL) {
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "target-duration", &targetDurationSecs)) {
+ ALOGE("Playlist is missing required EXT-X-TARGETDURATION tag");
+ notifyError(ERROR_MALFORMED);
+ return;
+ }
+ targetDurationUs = targetDurationSecs * 1000000ll;
+ }
- status_t finalResult;
+ // buffer at least 3 times the target duration, or up to 10 seconds
+ int64_t durationToBufferUs = targetDurationUs * 3;
+ if (durationToBufferUs > kMinBufferedDurationUs) {
+ durationToBufferUs = kMinBufferedDurationUs;
+ }
+
+ int64_t bufferedDurationUs = 0ll;
+ status_t finalResult = NOT_ENOUGH_DATA;
if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
sp<AnotherPacketSource> packetSource =
mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES);
- int64_t bufferedDurationUs =
+ bufferedDurationUs =
packetSource->getBufferedDurationUs(&finalResult);
-
- downloadMore = (bufferedDurationUs < kMinBufferedDurationUs);
finalResult = OK;
} else {
- bool first = true;
- int64_t minBufferedDurationUs = 0ll;
-
+ // Use max stream duration to prevent us from waiting on a non-existent stream;
+ // when we cannot make out from the manifest what streams are included in a playlist
+ // we might assume extra streams.
for (size_t i = 0; i < mPacketSources.size(); ++i) {
if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) {
continue;
}
- int64_t bufferedDurationUs =
+ int64_t bufferedStreamDurationUs =
mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult);
-
- if (first || bufferedDurationUs < minBufferedDurationUs) {
- minBufferedDurationUs = bufferedDurationUs;
- first = false;
+ ALOGV("buffered %" PRId64 " for stream %d",
+ bufferedStreamDurationUs, mPacketSources.keyAt(i));
+ if (bufferedStreamDurationUs > bufferedDurationUs) {
+ bufferedDurationUs = bufferedStreamDurationUs;
}
}
+ }
+ downloadMore = (bufferedDurationUs < durationToBufferUs);
+
+ // signal start if buffered up at least the target size
+ if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) {
+ mPrepared = true;
- downloadMore =
- !first && (minBufferedDurationUs < kMinBufferedDurationUs);
+ ALOGV("prepared, buffered=%" PRId64 " > %" PRId64 "",
+ bufferedDurationUs, targetDurationUs);
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTemporarilyDoneFetching);
+ msg->post();
}
if (finalResult == OK && downloadMore) {
- onDownloadNext();
+ ALOGV("monitoring, buffered=%" PRId64 " < %" PRId64 "",
+ bufferedDurationUs, durationToBufferUs);
+ // delay the next download slightly; hopefully this gives other concurrent fetchers
+ // a better chance to run.
+ // onDownloadNext();
+ sp<AMessage> msg = new AMessage(kWhatDownloadNext, id());
+ msg->setInt32("generation", mMonitorQueueGeneration);
+ msg->post(1000l);
} else {
// Nothing to do yet, try again in a second.
@@ -498,15 +681,17 @@ void PlaylistFetcher::onMonitorQueue() {
msg->setInt32("what", kWhatTemporarilyDoneFetching);
msg->post();
- postMonitorQueue(1000000ll);
+ int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2;
+ ALOGV("pausing for %" PRId64 ", buffered=%" PRId64 " > %" PRId64 "",
+ delayUs, bufferedDurationUs, durationToBufferUs);
+ // :TRICKY: need to enforce minimum delay because the delay to
+ // refresh the playlist will become 0
+ postMonitorQueue(delayUs, mPrepared ? targetDurationUs * 2 : 0);
}
}
-void PlaylistFetcher::onDownloadNext() {
- int64_t nowUs = ALooper::GetNowUs();
-
- if (mLastPlaylistFetchTimeUs < 0ll
- || (!mPlaylist->isComplete() && timeToRefreshPlaylist(nowUs))) {
+status_t PlaylistFetcher::refreshPlaylist() {
+ if (delayUsToRefreshPlaylist() <= 0) {
bool unchanged;
sp<M3UParser> playlist = mSession->fetchPlaylist(
mURI.c_str(), mPlaylistHash, &unchanged);
@@ -520,9 +705,8 @@ void PlaylistFetcher::onDownloadNext() {
mRefreshState = (RefreshState)(mRefreshState + 1);
}
} else {
- ALOGE("failed to load playlist at url '%s'", mURI.c_str());
- notifyError(ERROR_IO);
- return;
+ ALOGE("failed to load playlist at url '%s'", uriDebugString(mURI).c_str());
+ return ERROR_IO;
}
} else {
mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY;
@@ -535,59 +719,149 @@ void PlaylistFetcher::onDownloadNext() {
mLastPlaylistFetchTimeUs = ALooper::GetNowUs();
}
+ return OK;
+}
- int32_t firstSeqNumberInPlaylist;
- if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
- "media-sequence", &firstSeqNumberInPlaylist)) {
- firstSeqNumberInPlaylist = 0;
- }
+// static
+bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) {
+ return buffer->size() > 0 && buffer->data()[0] == 0x47;
+}
- bool seekDiscontinuity = false;
- bool explicitDiscontinuity = false;
+void PlaylistFetcher::onDownloadNext() {
+ status_t err = refreshPlaylist();
+ int32_t firstSeqNumberInPlaylist = 0;
+ int32_t lastSeqNumberInPlaylist = 0;
+ bool discontinuity = false;
+
+ if (mPlaylist != NULL) {
+ if (mPlaylist->meta() != NULL) {
+ mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist);
+ }
- const int32_t lastSeqNumberInPlaylist =
- firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+ lastSeqNumberInPlaylist =
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
- if (mSeqNumber < 0) {
+ if (mDiscontinuitySeq < 0) {
+ mDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
+ }
+ }
+
+ if (mPlaylist != NULL && mSeqNumber < 0) {
CHECK_GE(mStartTimeUs, 0ll);
- if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
- mSeqNumber = getSeqNumberForTime(mStartTimeUs);
+ if (mSegmentStartTimeUs < 0) {
+ if (!mPlaylist->isComplete() && !mPlaylist->isEvent()) {
+ // If this is a live session, start 3 segments from the end on connect
+ mSeqNumber = lastSeqNumberInPlaylist - 3;
+ if (mSeqNumber < firstSeqNumberInPlaylist) {
+ mSeqNumber = firstSeqNumberInPlaylist;
+ }
+ } else {
+ // When seeking mSegmentStartTimeUs is unavailable (< 0), we
+ // use mStartTimeUs (client supplied timestamp) to determine both start segment
+ // and relative position inside a segment
+ mSeqNumber = getSeqNumberForTime(mStartTimeUs);
+ mStartTimeUs -= getSegmentStartTimeUs(mSeqNumber);
+ }
+ mStartTimeUsRelative = true;
+ ALOGV("Initial sequence number for time %" PRId64 " is %d from (%d .. %d)",
+ mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
} else {
- // If this is a live session, start 3 segments from the end.
- mSeqNumber = lastSeqNumberInPlaylist - 3;
+ // When adapting or track switching, mSegmentStartTimeUs (relative
+ // to media time 0) is used to determine the start segment; mStartTimeUs (absolute
+ // timestamps coming from the media container) is used to determine the position
+ // inside a segments.
+ mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs);
+ if (mAdaptive) {
+ // avoid double fetch/decode
+ mSeqNumber += 1;
+ }
+ ssize_t minSeq = getSeqNumberForDiscontinuity(mDiscontinuitySeq);
+ if (mSeqNumber < minSeq) {
+ mSeqNumber = minSeq;
+ }
+
if (mSeqNumber < firstSeqNumberInPlaylist) {
mSeqNumber = firstSeqNumberInPlaylist;
}
- }
- mStartTimeUs = -1ll;
+ if (mSeqNumber > lastSeqNumberInPlaylist) {
+ mSeqNumber = lastSeqNumberInPlaylist;
+ }
+ ALOGV("Initial sequence number for live event %d from (%d .. %d)",
+ mSeqNumber, firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
+ }
}
+ // if mPlaylist is NULL then err must be non-OK; but the other way around might not be true
if (mSeqNumber < firstSeqNumberInPlaylist
- || mSeqNumber > lastSeqNumberInPlaylist) {
- if (!mPlaylist->isComplete() && mNumRetries < kMaxNumRetries) {
+ || mSeqNumber > lastSeqNumberInPlaylist
+ || err != OK) {
+ if ((err != OK || !mPlaylist->isComplete()) && mNumRetries < kMaxNumRetries) {
++mNumRetries;
- if (mSeqNumber > lastSeqNumberInPlaylist) {
- mLastPlaylistFetchTimeUs = -1;
- postMonitorQueue(3000000ll);
+ if (mSeqNumber > lastSeqNumberInPlaylist || err != OK) {
+ // make sure we reach this retry logic on refresh failures
+ // by adding an err != OK clause to all enclosing if's.
+
+ // refresh in increasing fraction (1/2, 1/3, ...) of the
+ // playlist's target duration or 3 seconds, whichever is less
+ int64_t delayUs = kMaxMonitorDelayUs;
+ if (mPlaylist != NULL && mPlaylist->meta() != NULL) {
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ delayUs = mPlaylist->size() * targetDurationSecs *
+ 1000000ll / (1 + mNumRetries);
+ }
+ if (delayUs > kMaxMonitorDelayUs) {
+ delayUs = kMaxMonitorDelayUs;
+ }
+ ALOGV("sequence number high: %d from (%d .. %d), "
+ "monitor in %" PRId64 " (retry=%d)",
+ mSeqNumber, firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist, delayUs, mNumRetries);
+ postMonitorQueue(delayUs);
return;
}
- // we've missed the boat, let's start from the lowest sequence
+ if (err != OK) {
+ notifyError(err);
+ return;
+ }
+
+ // we've missed the boat, let's start 3 segments prior to the latest sequence
// number available and signal a discontinuity.
- ALOGI("We've missed the boat, restarting playback.");
- mSeqNumber = lastSeqNumberInPlaylist;
- explicitDiscontinuity = true;
+ ALOGI("We've missed the boat, restarting playback."
+ " mStartup=%d, was looking for %d in %d-%d",
+ mStartup, mSeqNumber, firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
+ if (mStopParams != NULL) {
+ // we should have kept on fetching until we hit the boundaries in mStopParams,
+ // but since the segments we are supposed to fetch have already rolled off
+ // the playlist, i.e. we have already missed the boat, we inevitably have to
+ // skip.
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ sp<ABuffer> formatChange = mSession->createFormatChangeBuffer();
+ mPacketSources.valueAt(i)->queueAccessUnit(formatChange);
+ }
+ stopAsync(/* clear = */ false);
+ return;
+ }
+ mSeqNumber = lastSeqNumberInPlaylist - 3;
+ if (mSeqNumber < firstSeqNumberInPlaylist) {
+ mSeqNumber = firstSeqNumberInPlaylist;
+ }
+ discontinuity = true;
// fall through
} else {
ALOGE("Cannot find sequence number %d in playlist "
"(contains %d - %d)",
mSeqNumber, firstSeqNumberInPlaylist,
- firstSeqNumberInPlaylist + mPlaylist->size() - 1);
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1);
notifyError(ERROR_END_OF_STREAM);
return;
@@ -605,7 +879,8 @@ void PlaylistFetcher::onDownloadNext() {
int32_t val;
if (itemMeta->findInt32("discontinuity", &val) && val != 0) {
- explicitDiscontinuity = true;
+ mDiscontinuitySeq++;
+ discontinuity = true;
}
int64_t range_offset, range_length;
@@ -620,50 +895,171 @@ void PlaylistFetcher::onDownloadNext() {
ALOGV("fetching '%s'", uri.c_str());
- sp<ABuffer> buffer;
- status_t err = mSession->fetchFile(
- uri.c_str(), &buffer, range_offset, range_length);
-
- if (err != OK) {
- ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
- notifyError(err);
- return;
+ sp<DataSource> source;
+ sp<ABuffer> buffer, tsBuffer;
+ // decrypt a junk buffer to prefetch key; since a session uses only one http connection,
+ // this avoids interleaved connections to the key and segment file.
+ {
+ sp<ABuffer> junk = new ABuffer(16);
+ junk->setRange(0, 16);
+ status_t err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, junk,
+ true /* first */);
+ if (err != OK) {
+ notifyError(err);
+ return;
+ }
}
- CHECK(buffer != NULL);
+ // block-wise download
+ bool startup = mStartup;
+ ssize_t bytesRead;
+ do {
+ bytesRead = mSession->fetchFile(
+ uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, &source);
+
+ if (bytesRead < 0) {
+ status_t err = bytesRead;
+ ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
+ notifyError(err);
+ return;
+ }
- err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer);
+ CHECK(buffer != NULL);
- if (err != OK) {
- ALOGE("decryptBuffer failed w/ error %d", err);
+ size_t size = buffer->size();
+ // Set decryption range.
+ buffer->setRange(size - bytesRead, bytesRead);
+ status_t err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer,
+ buffer->offset() == 0 /* first */);
+ // Unset decryption range.
+ buffer->setRange(0, size);
- notifyError(err);
- return;
- }
+ if (err != OK) {
+ ALOGE("decryptBuffer failed w/ error %d", err);
+
+ notifyError(err);
+ return;
+ }
+
+ if (startup || discontinuity) {
+ // Signal discontinuity.
+
+ if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
+ // If this was a live event this made no sense since
+ // we don't have access to all the segment before the current
+ // one.
+ mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber);
+ }
+
+ if (discontinuity) {
+ ALOGI("queueing discontinuity (explicit=%d)", discontinuity);
+
+ queueDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMATCHANGE,
+ NULL /* extra */);
+
+ discontinuity = false;
+ }
+
+ startup = false;
+ }
+
+ err = OK;
+ if (bufferStartsWithTsSyncByte(buffer)) {
+ // Incremental extraction is only supported for MPEG2 transport streams.
+ if (tsBuffer == NULL) {
+ tsBuffer = new ABuffer(buffer->data(), buffer->capacity());
+ tsBuffer->setRange(0, 0);
+ } else if (tsBuffer->capacity() != buffer->capacity()) {
+ size_t tsOff = tsBuffer->offset(), tsSize = tsBuffer->size();
+ tsBuffer = new ABuffer(buffer->data(), buffer->capacity());
+ tsBuffer->setRange(tsOff, tsSize);
+ }
+ tsBuffer->setRange(tsBuffer->offset(), tsBuffer->size() + bytesRead);
+
+ err = extractAndQueueAccessUnitsFromTs(tsBuffer);
+ }
+
+ if (err == -EAGAIN) {
+ // starting sequence number too low/high
+ mTSParser.clear();
+ for (size_t i = 0; i < mPacketSources.size(); i++) {
+ sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
+ packetSource->clear();
+ }
+ postMonitorQueue();
+ return;
+ } else if (err == ERROR_OUT_OF_RANGE) {
+ // reached stopping point
+ stopAsync(/* clear = */ false);
+ return;
+ } else if (err != OK) {
+ notifyError(err);
+ return;
+ }
+
+ } while (bytesRead != 0);
+
+ if (bufferStartsWithTsSyncByte(buffer)) {
+ // If we don't see a stream in the program table after fetching a full ts segment
+ // mark it as nonexistent.
+ const size_t kNumTypes = ATSParser::NUM_SOURCE_TYPES;
+ ATSParser::SourceType srcTypes[kNumTypes] =
+ { ATSParser::VIDEO, ATSParser::AUDIO };
+ LiveSession::StreamType streamTypes[kNumTypes] =
+ { LiveSession::STREAMTYPE_VIDEO, LiveSession::STREAMTYPE_AUDIO };
+
+ for (size_t i = 0; i < kNumTypes; i++) {
+ ATSParser::SourceType srcType = srcTypes[i];
+ LiveSession::StreamType streamType = streamTypes[i];
+
+ sp<AnotherPacketSource> source =
+ static_cast<AnotherPacketSource *>(
+ mTSParser->getSource(srcType).get());
- if (mStartup || seekDiscontinuity || explicitDiscontinuity) {
- // Signal discontinuity.
+ if (!mTSParser->hasSource(srcType)) {
+ ALOGW("MPEG2 Transport stream does not contain %s data.",
+ srcType == ATSParser::VIDEO ? "video" : "audio");
- if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
- // If this was a live event this made no sense since
- // we don't have access to all the segment before the current
- // one.
- mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber);
+ mStreamTypeMask &= ~streamType;
+ mPacketSources.removeItem(streamType);
+ }
}
- if (seekDiscontinuity || explicitDiscontinuity) {
- ALOGI("queueing discontinuity (seek=%d, explicit=%d)",
- seekDiscontinuity, explicitDiscontinuity);
+ }
- queueDiscontinuity(
- explicitDiscontinuity
- ? ATSParser::DISCONTINUITY_FORMATCHANGE
- : ATSParser::DISCONTINUITY_SEEK,
- NULL /* extra */);
+ if (checkDecryptPadding(buffer) != OK) {
+ ALOGE("Incorrect padding bytes after decryption.");
+ notifyError(ERROR_MALFORMED);
+ return;
+ }
+
+ err = OK;
+ if (tsBuffer != NULL) {
+ AString method;
+ CHECK(buffer->meta()->findString("cipher-method", &method));
+ if ((tsBuffer->size() > 0 && method == "NONE")
+ || tsBuffer->size() > 16) {
+ ALOGE("MPEG2 transport stream is not an even multiple of 188 "
+ "bytes in length.");
+ notifyError(ERROR_MALFORMED);
+ return;
}
}
- err = extractAndQueueAccessUnits(buffer, itemMeta);
+ // bulk extract non-ts files
+ if (tsBuffer == NULL) {
+ err = extractAndQueueAccessUnits(buffer, itemMeta);
+ if (err == -EAGAIN) {
+ // starting sequence number too low/high
+ postMonitorQueue();
+ return;
+ } else if (err == ERROR_OUT_OF_RANGE) {
+ // reached stopping point
+ stopAsync(/* clear = */false);
+ return;
+ }
+ }
if (err != OK) {
notifyError(err);
@@ -673,8 +1069,66 @@ void PlaylistFetcher::onDownloadNext() {
++mSeqNumber;
postMonitorQueue();
+}
+
+int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const {
+ int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL
+ || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
+ lastSeqNumberInPlaylist = firstSeqNumberInPlaylist + mPlaylist->size() - 1;
+
+ int32_t index = mSeqNumber - firstSeqNumberInPlaylist - 1;
+ while (index >= 0 && anchorTimeUs > mStartTimeUs) {
+ sp<AMessage> itemMeta;
+ CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta));
+
+ int64_t itemDurationUs;
+ CHECK(itemMeta->findInt64("durationUs", &itemDurationUs));
+
+ anchorTimeUs -= itemDurationUs;
+ --index;
+ }
+
+ int32_t newSeqNumber = firstSeqNumberInPlaylist + index + 1;
+ if (newSeqNumber <= lastSeqNumberInPlaylist) {
+ return newSeqNumber;
+ } else {
+ return lastSeqNumberInPlaylist;
+ }
+}
+
+int32_t PlaylistFetcher::getSeqNumberForDiscontinuity(size_t discontinuitySeq) const {
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL
+ || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
+
+ size_t curDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();
+ if (discontinuitySeq < curDiscontinuitySeq) {
+ return firstSeqNumberInPlaylist <= 0 ? 0 : (firstSeqNumberInPlaylist - 1);
+ }
+
+ size_t index = 0;
+ while (index < mPlaylist->size()) {
+ sp<AMessage> itemMeta;
+ CHECK(mPlaylist->itemAt( index, NULL /* uri */, &itemMeta));
+
+ int64_t discontinuity;
+ if (itemMeta->findInt64("discontinuity", &discontinuity)) {
+ curDiscontinuitySeq++;
+ }
+
+ if (curDiscontinuitySeq == discontinuitySeq) {
+ return firstSeqNumberInPlaylist + index;
+ }
+
+ ++index;
+ }
- mStartup = false;
+ return firstSeqNumberInPlaylist + mPlaylist->size();
}
int32_t PlaylistFetcher::getSeqNumberForTime(int64_t timeUs) const {
@@ -709,95 +1163,289 @@ int32_t PlaylistFetcher::getSeqNumberForTime(int64_t timeUs) const {
return firstSeqNumberInPlaylist + index;
}
-status_t PlaylistFetcher::extractAndQueueAccessUnits(
- const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta) {
- if (buffer->size() > 0 && buffer->data()[0] == 0x47) {
- // Let's assume this is an MPEG2 transport stream.
+const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties(
+ const sp<ABuffer> &accessUnit, const sp<AnotherPacketSource> &source, bool discard) {
+ sp<MetaData> format = source->getFormat();
+ if (format != NULL) {
+ // for simplicity, store a reference to the format in each unit
+ accessUnit->meta()->setObject("format", format);
+ }
- if ((buffer->size() % 188) != 0) {
- ALOGE("MPEG2 transport stream is not an even multiple of 188 "
- "bytes in length.");
- return ERROR_MALFORMED;
- }
+ if (discard) {
+ accessUnit->meta()->setInt32("discard", discard);
+ }
- if (mTSParser == NULL) {
- mTSParser = new ATSParser;
- }
+ int32_t targetDurationSecs;
+ if (mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)) {
+ accessUnit->meta()->setInt32("targetDuration", targetDurationSecs);
+ }
+
+ accessUnit->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq);
+ accessUnit->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber));
+ return accessUnit;
+}
- if (mNextPTSTimeUs >= 0ll) {
- sp<AMessage> extra = new AMessage;
- extra->setInt64(IStreamListener::kKeyMediaTimeUs, mNextPTSTimeUs);
+status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &buffer) {
+ if (mTSParser == NULL) {
+ // Use TS_TIMESTAMPS_ARE_ABSOLUTE so pts carry over between fetchers.
+ mTSParser = new ATSParser(ATSParser::TS_TIMESTAMPS_ARE_ABSOLUTE);
+ }
+
+ if (mNextPTSTimeUs >= 0ll) {
+ sp<AMessage> extra = new AMessage;
+ // Since we are using absolute timestamps, signal an offset of 0 to prevent
+ // ATSParser from skewing the timestamps of access units.
+ extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0);
- mTSParser->signalDiscontinuity(
- ATSParser::DISCONTINUITY_SEEK, extra);
+ mTSParser->signalDiscontinuity(
+ ATSParser::DISCONTINUITY_TIME, extra);
+
+ mAbsoluteTimeAnchorUs = mNextPTSTimeUs;
+ mNextPTSTimeUs = -1ll;
+ mFirstPTSValid = false;
+ }
- mNextPTSTimeUs = -1ll;
+ size_t offset = 0;
+ while (offset + 188 <= buffer->size()) {
+ status_t err = mTSParser->feedTSPacket(buffer->data() + offset, 188);
+
+ if (err != OK) {
+ return err;
}
- size_t offset = 0;
- while (offset < buffer->size()) {
- status_t err = mTSParser->feedTSPacket(buffer->data() + offset, 188);
+ offset += 188;
+ }
+ // setRange to indicate consumed bytes.
+ buffer->setRange(buffer->offset() + offset, buffer->size() - offset);
+
+ status_t err = OK;
+ for (size_t i = mPacketSources.size(); i-- > 0;) {
+ sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
+
+ const char *key;
+ ATSParser::SourceType type;
+ const LiveSession::StreamType stream = mPacketSources.keyAt(i);
+ switch (stream) {
+ case LiveSession::STREAMTYPE_VIDEO:
+ type = ATSParser::VIDEO;
+ key = "timeUsVideo";
+ break;
- if (err != OK) {
- return err;
+ case LiveSession::STREAMTYPE_AUDIO:
+ type = ATSParser::AUDIO;
+ key = "timeUsAudio";
+ break;
+
+ case LiveSession::STREAMTYPE_SUBTITLES:
+ {
+ ALOGE("MPEG2 Transport streams do not contain subtitles.");
+ return ERROR_MALFORMED;
+ break;
}
- offset += 188;
+ default:
+ TRESPASS();
}
- for (size_t i = mPacketSources.size(); i-- > 0;) {
- sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
+ sp<AnotherPacketSource> source =
+ static_cast<AnotherPacketSource *>(
+ mTSParser->getSource(type).get());
- ATSParser::SourceType type;
- switch (mPacketSources.keyAt(i)) {
- case LiveSession::STREAMTYPE_VIDEO:
- type = ATSParser::VIDEO;
- break;
+ if (source == NULL) {
+ continue;
+ }
- case LiveSession::STREAMTYPE_AUDIO:
- type = ATSParser::AUDIO;
- break;
+ int64_t timeUs;
+ sp<ABuffer> accessUnit;
+ status_t finalResult;
+ while (source->hasBufferAvailable(&finalResult)
+ && source->dequeueAccessUnit(&accessUnit) == OK) {
- case LiveSession::STREAMTYPE_SUBTITLES:
- {
- ALOGE("MPEG2 Transport streams do not contain subtitles.");
- return ERROR_MALFORMED;
- break;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ if (mStartup) {
+ if (!mFirstPTSValid) {
+ mFirstTimeUs = timeUs;
+ mFirstPTSValid = true;
+ }
+ if (mStartTimeUsRelative) {
+ timeUs -= mFirstTimeUs;
+ if (timeUs < 0) {
+ timeUs = 0;
+ }
}
- default:
- TRESPASS();
+ if (timeUs < mStartTimeUs) {
+ // buffer up to the closest preceding IDR frame
+ ALOGV("timeUs %" PRId64 " us < mStartTimeUs %" PRId64 " us",
+ timeUs, mStartTimeUs);
+ const char *mime;
+ sp<MetaData> format = source->getFormat();
+ bool isAvc = false;
+ if (format != NULL && format->findCString(kKeyMIMEType, &mime)
+ && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ isAvc = true;
+ }
+ if (isAvc && IsIDR(accessUnit)) {
+ mVideoBuffer->clear();
+ }
+ if (isAvc) {
+ mVideoBuffer->queueAccessUnit(accessUnit);
+ }
+
+ continue;
+ }
}
- sp<AnotherPacketSource> source =
- static_cast<AnotherPacketSource *>(
- mTSParser->getSource(type).get());
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+ if (mStartTimeUsNotify != NULL && timeUs > mStartTimeUs) {
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
- if (source == NULL) {
- ALOGW("MPEG2 Transport stream does not contain %s data.",
- type == ATSParser::VIDEO ? "video" : "audio");
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ // mStartup
+ // mStartup is true until we have queued a packet for all the streams
+ // we are fetching. We queue packets whose timestamps are greater than
+ // mStartTimeUs.
+ // mSegmentStartTimeUs >= 0
+ // mSegmentStartTimeUs is non-negative when adapting or switching tracks
+ // mSeqNumber > firstSeqNumberInPlaylist
+ // don't decrement mSeqNumber if it already points to the 1st segment
+ // timeUs - mStartTimeUs > targetDurationUs:
+ // This and the 2 above conditions should only happen when adapting in a live
+ // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher
+ // would start fetching after timeUs, which should be greater than mStartTimeUs;
+ // the old fetcher would then continue fetching data until timeUs. We don't want
+ // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to
+ // stop as early as possible. The definition of being "too far ahead" is
+ // arbitrary; here we use targetDurationUs as threshold.
+ if (mStartup && mSegmentStartTimeUs >= 0
+ && mSeqNumber > firstSeqNumberInPlaylist
+ && timeUs - mStartTimeUs > targetDurationUs) {
+ // we just guessed a starting timestamp that is too high when adapting in a
+ // live stream; re-adjust based on the actual timestamp extracted from the
+ // media segment; if we didn't move backward after the re-adjustment
+ // (newSeqNumber), start at least 1 segment prior.
+ int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs);
+ if (newSeqNumber >= mSeqNumber) {
+ --mSeqNumber;
+ } else {
+ mSeqNumber = newSeqNumber;
+ }
+ mStartTimeUsNotify = mNotify->dup();
+ mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
+ return -EAGAIN;
+ }
- mStreamTypeMask &= ~mPacketSources.keyAt(i);
- mPacketSources.removeItemsAt(i);
- continue;
+ int32_t seq;
+ if (!mStartTimeUsNotify->findInt32("discontinuitySeq", &seq)) {
+ mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);
+ }
+ int64_t startTimeUs;
+ if (!mStartTimeUsNotify->findInt64(key, &startTimeUs)) {
+ mStartTimeUsNotify->setInt64(key, timeUs);
+
+ uint32_t streamMask = 0;
+ mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask);
+ streamMask |= mPacketSources.keyAt(i);
+ mStartTimeUsNotify->setInt32("streamMask", streamMask);
+
+ if (streamMask == mStreamTypeMask) {
+ mStartup = false;
+ mStartTimeUsNotify->post();
+ mStartTimeUsNotify.clear();
+ }
+ }
}
- sp<ABuffer> accessUnit;
- status_t finalResult;
- while (source->hasBufferAvailable(&finalResult)
- && source->dequeueAccessUnit(&accessUnit) == OK) {
- // Note that we do NOT dequeue any discontinuities.
-
- packetSource->queueAccessUnit(accessUnit);
+ if (mStopParams != NULL) {
+ // Queue discontinuity in original stream.
+ int32_t discontinuitySeq;
+ int64_t stopTimeUs;
+ if (!mStopParams->findInt32("discontinuitySeq", &discontinuitySeq)
+ || discontinuitySeq > mDiscontinuitySeq
+ || !mStopParams->findInt64(key, &stopTimeUs)
+ || (discontinuitySeq == mDiscontinuitySeq
+ && timeUs >= stopTimeUs)) {
+ packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());
+ mStreamTypeMask &= ~stream;
+ mPacketSources.removeItemsAt(i);
+ break;
+ }
}
- if (packetSource->getFormat() == NULL) {
- packetSource->setFormat(source->getFormat());
+ // Note that we do NOT dequeue any discontinuities except for format change.
+ if (stream == LiveSession::STREAMTYPE_VIDEO) {
+ const bool discard = true;
+ status_t status;
+ while (mVideoBuffer->hasBufferAvailable(&status)) {
+ sp<ABuffer> videoBuffer;
+ mVideoBuffer->dequeueAccessUnit(&videoBuffer);
+ setAccessUnitProperties(videoBuffer, source, discard);
+ packetSource->queueAccessUnit(videoBuffer);
+ }
}
+
+ setAccessUnitProperties(accessUnit, source);
+ packetSource->queueAccessUnit(accessUnit);
}
- return OK;
- } else if (buffer->size() >= 7 && !memcmp("WEBVTT\n", buffer->data(), 7)) {
+ if (err != OK) {
+ break;
+ }
+ }
+
+ if (err != OK) {
+ for (size_t i = mPacketSources.size(); i-- > 0;) {
+ sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
+ packetSource->clear();
+ }
+ return err;
+ }
+
+ if (!mStreamTypeMask) {
+ // Signal gap is filled between original and new stream.
+ ALOGV("ERROR OUT OF RANGE");
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ return OK;
+}
+
+/* static */
+bool PlaylistFetcher::bufferStartsWithWebVTTMagicSequence(
+ const sp<ABuffer> &buffer) {
+ size_t pos = 0;
+
+ // skip possible BOM
+ if (buffer->size() >= pos + 3 &&
+ !memcmp("\xef\xbb\xbf", buffer->data() + pos, 3)) {
+ pos += 3;
+ }
+
+ // accept WEBVTT followed by SPACE, TAB or (CR) LF
+ if (buffer->size() < pos + 6 ||
+ memcmp("WEBVTT", buffer->data() + pos, 6)) {
+ return false;
+ }
+ pos += 6;
+
+ if (buffer->size() == pos) {
+ return true;
+ }
+
+ uint8_t sep = buffer->data()[pos];
+ return sep == ' ' || sep == '\t' || sep == '\n' || sep == '\r';
+}
+
+status_t PlaylistFetcher::extractAndQueueAccessUnits(
+ const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta) {
+ if (bufferStartsWithWebVTTMagicSequence(buffer)) {
if (mStreamTypeMask != LiveSession::STREAMTYPE_SUBTITLES) {
ALOGE("This stream only contains subtitles.");
return ERROR_MALFORMED;
@@ -810,6 +1458,9 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
CHECK(itemMeta->findInt64("durationUs", &durationUs));
buffer->meta()->setInt64("timeUs", getSegmentStartTimeUs(mSeqNumber));
buffer->meta()->setInt64("durationUs", durationUs);
+ buffer->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber));
+ buffer->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq);
+ buffer->meta()->setInt32("subtitleGeneration", mSubtitleGeneration);
packetSource->queueAccessUnit(buffer);
return OK;
@@ -875,14 +1526,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
firstID3Tag = false;
}
- if (!mFirstPTSValid) {
- mFirstPTSValid = true;
- mFirstPTS = PTS;
- }
- PTS -= mFirstPTS;
-
- int64_t timeUs = (PTS * 100ll) / 9ll + mAbsoluteTimeAnchorUs;
-
if (mStreamTypeMask != LiveSession::STREAMTYPE_AUDIO) {
ALOGW("This stream only contains audio data!");
@@ -903,7 +1546,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
CHECK_EQ(bits.getBits(12), 0xfffu);
bits.skipBits(3); // ID, layer
- bool protection_absent = bits.getBits(1) != 0;
+ bool protection_absent __unused = bits.getBits(1) != 0;
unsigned profile = bits.getBits(2);
CHECK_NE(profile, 3u);
@@ -925,6 +1568,12 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
int32_t sampleRate;
CHECK(packetSource->getFormat()->findInt32(kKeySampleRate, &sampleRate));
+ int64_t timeUs = (PTS * 100ll) / 9ll;
+ if (!mFirstPTSValid) {
+ mFirstPTSValid = true;
+ mFirstTimeUs = timeUs;
+ }
+
size_t offset = 0;
while (offset < buffer->size()) {
const uint8_t *adtsHeader = buffer->data() + offset;
@@ -935,20 +1584,85 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
| (adtsHeader[4] << 3)
| (adtsHeader[5] >> 5);
- CHECK_LE(offset + aac_frame_length, buffer->size());
+ if (aac_frame_length == 0) {
+ const uint8_t *id3Header = adtsHeader;
+ if (!memcmp(id3Header, "ID3", 3)) {
+ ID3 id3(id3Header, buffer->size() - offset, true);
+ if (id3.isValid()) {
+ offset += id3.rawSize();
+ continue;
+ };
+ }
+ return ERROR_MALFORMED;
+ }
- sp<ABuffer> unit = new ABuffer(aac_frame_length);
- memcpy(unit->data(), adtsHeader, aac_frame_length);
+ CHECK_LE(offset + aac_frame_length, buffer->size());
int64_t unitTimeUs = timeUs + numSamples * 1000000ll / sampleRate;
- unit->meta()->setInt64("timeUs", unitTimeUs);
+ offset += aac_frame_length;
// Each AAC frame encodes 1024 samples.
numSamples += 1024;
- packetSource->queueAccessUnit(unit);
+ if (mStartup) {
+ int64_t startTimeUs = unitTimeUs;
+ if (mStartTimeUsRelative) {
+ startTimeUs -= mFirstTimeUs;
+ if (startTimeUs < 0) {
+ startTimeUs = 0;
+ }
+ }
+ if (startTimeUs < mStartTimeUs) {
+ continue;
+ }
- offset += aac_frame_length;
+ if (mStartTimeUsNotify != NULL) {
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+
+ // Duplicated logic from how we handle .ts playlists.
+ if (mStartup && mSegmentStartTimeUs >= 0
+ && timeUs - mStartTimeUs > targetDurationUs) {
+ int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs);
+ if (newSeqNumber >= mSeqNumber) {
+ --mSeqNumber;
+ } else {
+ mSeqNumber = newSeqNumber;
+ }
+ return -EAGAIN;
+ }
+
+ mStartTimeUsNotify->setInt64("timeUsAudio", timeUs);
+ mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);
+ mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO);
+ mStartTimeUsNotify->post();
+ mStartTimeUsNotify.clear();
+ mStartup = false;
+ }
+ }
+
+ if (mStopParams != NULL) {
+ // Queue discontinuity in original stream.
+ int32_t discontinuitySeq;
+ int64_t stopTimeUs;
+ if (!mStopParams->findInt32("discontinuitySeq", &discontinuitySeq)
+ || discontinuitySeq > mDiscontinuitySeq
+ || !mStopParams->findInt64("timeUsAudio", &stopTimeUs)
+ || (discontinuitySeq == mDiscontinuitySeq && unitTimeUs >= stopTimeUs)) {
+ packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());
+ mStreamTypeMask = 0;
+ mPacketSources.clear();
+ return ERROR_OUT_OF_RANGE;
+ }
+ }
+
+ sp<ABuffer> unit = new ABuffer(aac_frame_length);
+ memcpy(unit->data(), adtsHeader, aac_frame_length);
+
+ unit->meta()->setInt64("timeUs", unitTimeUs);
+ setAccessUnitProperties(unit, packetSource);
+ packetSource->queueAccessUnit(unit);
}
return OK;
@@ -973,4 +1687,33 @@ void PlaylistFetcher::updateDuration() {
msg->post();
}
+int64_t PlaylistFetcher::resumeThreshold(const sp<AMessage> &msg) {
+ int64_t durationUs;
+ if (msg->findInt64("durationUs", &durationUs) && durationUs > 0) {
+ return kNumSkipFrames * durationUs;
+ }
+
+ sp<RefBase> obj;
+ msg->findObject("format", &obj);
+ MetaData *format = static_cast<MetaData *>(obj.get());
+
+ const char *mime;
+ CHECK(format->findCString(kKeyMIMEType, &mime));
+ bool audio = !strncasecmp(mime, "audio/", 6);
+ if (audio) {
+ // Assumes 1000 samples per frame.
+ int32_t sampleRate;
+ CHECK(format->findInt32(kKeySampleRate, &sampleRate));
+ return kNumSkipFrames /* frames */ * 1000 /* samples */
+ * (1000000 / sampleRate) /* sample duration (us) */;
+ } else {
+ int32_t frameRate;
+ if (format->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) {
+ return kNumSkipFrames * (1000000 / frameRate);
+ }
+ }
+
+ return 500000ll;
+}
+
} // namespace android
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 1648e02..4e15f85 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -31,9 +31,12 @@ struct DataSource;
struct HTTPBase;
struct LiveDataSource;
struct M3UParser;
-struct String8;
+class String8;
struct PlaylistFetcher : public AHandler {
+ static const int64_t kMinBufferedDurationUs;
+ static const int32_t kDownloadBlockSize;
+
enum {
kWhatStarted,
kWhatPaused,
@@ -43,12 +46,14 @@ struct PlaylistFetcher : public AHandler {
kWhatTemporarilyDoneFetching,
kWhatPrepared,
kWhatPreparationFailed,
+ kWhatStartedAt,
};
PlaylistFetcher(
const sp<AMessage> &notify,
const sp<LiveSession> &session,
- const char *uri);
+ const char *uri,
+ int32_t subtitleGeneration);
sp<DataSource> getDataSource();
@@ -56,11 +61,21 @@ struct PlaylistFetcher : public AHandler {
const sp<AnotherPacketSource> &audioSource,
const sp<AnotherPacketSource> &videoSource,
const sp<AnotherPacketSource> &subtitleSource,
- int64_t startTimeUs = -1ll);
+ int64_t startTimeUs = -1ll, // starting timestamps
+ int64_t segmentStartTimeUs = -1ll, // starting position within playlist
+ // startTimeUs!=segmentStartTimeUs only when playlist is live
+ int32_t startDiscontinuitySeq = 0,
+ bool adaptive = false);
void pauseAsync();
- void stopAsync();
+ void stopAsync(bool clear = true);
+
+ void resumeUntilAsync(const sp<AMessage> &params);
+
+ uint32_t getStreamTypeMask() const {
+ return mStreamTypeMask;
+ }
protected:
virtual ~PlaylistFetcher();
@@ -76,17 +91,35 @@ private:
kWhatPause = 'paus',
kWhatStop = 'stop',
kWhatMonitorQueue = 'moni',
+ kWhatResumeUntil = 'rsme',
+ kWhatDownloadNext = 'dlnx',
};
- static const int64_t kMinBufferedDurationUs;
+ static const int64_t kMaxMonitorDelayUs;
+ static const int32_t kNumSkipFrames;
+
+ static bool bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer);
+ static bool bufferStartsWithWebVTTMagicSequence(const sp<ABuffer>& buffer);
+ // notifications to mSession
sp<AMessage> mNotify;
+ sp<AMessage> mStartTimeUsNotify;
+
sp<LiveSession> mSession;
AString mURI;
uint32_t mStreamTypeMask;
int64_t mStartTimeUs;
+ // Start time relative to the beginning of the first segment in the initial
+ // playlist. It's value is initialized to a non-negative value only when we are
+ // adapting or switching tracks.
+ int64_t mSegmentStartTimeUs;
+
+ ssize_t mDiscontinuitySeq;
+ bool mStartTimeUsRelative;
+ sp<AMessage> mStopParams; // message containing the latest timestamps we should fetch.
+
KeyedVector<LiveSession::StreamType, sp<AnotherPacketSource> >
mPacketSources;
@@ -97,9 +130,12 @@ private:
int32_t mSeqNumber;
int32_t mNumRetries;
bool mStartup;
+ bool mAdaptive;
+ bool mPrepared;
int64_t mNextPTSTimeUs;
int32_t mMonitorQueueGeneration;
+ const int32_t mSubtitleGeneration;
enum RefreshState {
INITIAL_MINIMUM_RELOAD_DELAY,
@@ -115,15 +151,33 @@ private:
bool mFirstPTSValid;
uint64_t mFirstPTS;
+ int64_t mFirstTimeUs;
int64_t mAbsoluteTimeAnchorUs;
-
+ sp<AnotherPacketSource> mVideoBuffer;
+
+ // Stores the initialization vector to decrypt the next block of cipher text, which can
+ // either be derived from the sequence number, read from the manifest, or copied from
+ // the last block of cipher text (cipher-block chaining).
+ unsigned char mAESInitVec[16];
+
+ // Set first to true if decrypting the first segment of a playlist segment. When
+ // first is true, reset the initialization vector based on the available
+ // information in the manifest; otherwise, use the initialization vector as
+ // updated by the last call to AES_cbc_encrypt.
+ //
+ // For the input to decrypt correctly, decryptBuffer must be called on
+ // consecutive byte ranges on block boundaries, e.g. 0..15, 16..47, 48..63,
+ // and so on.
status_t decryptBuffer(
- size_t playlistIndex, const sp<ABuffer> &buffer);
+ size_t playlistIndex, const sp<ABuffer> &buffer,
+ bool first = true);
+ status_t checkDecryptPadding(const sp<ABuffer> &buffer);
- void postMonitorQueue(int64_t delayUs = 0);
+ void postMonitorQueue(int64_t delayUs = 0, int64_t minDelayUs = 0);
void cancelMonitorQueue();
- bool timeToRefreshPlaylist(int64_t nowUs) const;
+ int64_t delayUsToRefreshPlaylist() const;
+ status_t refreshPlaylist();
// Returns the media time in us of the segment specified by seqNumber.
// This is computed by summing the durations of all segments before it.
@@ -131,10 +185,19 @@ private:
status_t onStart(const sp<AMessage> &msg);
void onPause();
- void onStop();
+ void onStop(const sp<AMessage> &msg);
void onMonitorQueue();
void onDownloadNext();
+ // Resume a fetcher to continue until the stopping point stored in msg.
+ status_t onResumeUntil(const sp<AMessage> &msg);
+
+ const sp<ABuffer> &setAccessUnitProperties(
+ const sp<ABuffer> &accessUnit,
+ const sp<AnotherPacketSource> &source,
+ bool discard = false);
+ status_t extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &buffer);
+
status_t extractAndQueueAccessUnits(
const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta);
@@ -143,10 +206,16 @@ private:
void queueDiscontinuity(
ATSParser::DiscontinuityType type, const sp<AMessage> &extra);
+ int32_t getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const;
+ int32_t getSeqNumberForDiscontinuity(size_t discontinuitySeq) const;
int32_t getSeqNumberForTime(int64_t timeUs) const;
void updateDuration();
+ // Before resuming a fetcher in onResume, check the remaining duration is longer than that
+ // returned by resumeThreshold.
+ int64_t resumeThreshold(const sp<AMessage> &msg);
+
DISALLOW_EVIL_CONSTRUCTORS(PlaylistFetcher);
};
diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk
index bf6f7bb..2194c38 100644
--- a/media/libstagefright/id3/Android.mk
+++ b/media/libstagefright/id3/Android.mk
@@ -4,6 +4,8 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
ID3.cpp
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_id3
include $(BUILD_STATIC_LIBRARY)
@@ -15,6 +17,8 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
testid3.cpp
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libutils liblog libbinder libstagefright_foundation
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index 1ec4a40..d9491d6 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -41,9 +41,9 @@ struct MemorySource : public DataSource {
}
virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- off64_t available = (offset >= mSize) ? 0ll : mSize - offset;
+ off64_t available = (offset >= (off64_t)mSize) ? 0ll : mSize - offset;
- size_t copy = (available > size) ? size : available;
+ size_t copy = (available > (off64_t)size) ? size : available;
memcpy(data, mData + offset, copy);
return copy;
@@ -172,7 +172,7 @@ struct id3_header {
}
if (size > kMaxMetadataSize) {
- ALOGE("skipping huge ID3 metadata of size %d", size);
+ ALOGE("skipping huge ID3 metadata of size %zu", size);
return false;
}
@@ -468,49 +468,6 @@ void ID3::Iterator::getID(String8 *id) const {
}
}
-static void convertISO8859ToString8(
- const uint8_t *data, size_t size,
- String8 *s) {
- size_t utf8len = 0;
- for (size_t i = 0; i < size; ++i) {
- if (data[i] == '\0') {
- size = i;
- break;
- } else if (data[i] < 0x80) {
- ++utf8len;
- } else {
- utf8len += 2;
- }
- }
-
- if (utf8len == size) {
- // Only ASCII characters present.
-
- s->setTo((const char *)data, size);
- return;
- }
-
- char *tmp = new char[utf8len];
- char *ptr = tmp;
- for (size_t i = 0; i < size; ++i) {
- if (data[i] == '\0') {
- break;
- } else if (data[i] < 0x80) {
- *ptr++ = data[i];
- } else if (data[i] < 0xc0) {
- *ptr++ = 0xc2;
- *ptr++ = data[i];
- } else {
- *ptr++ = 0xc3;
- *ptr++ = data[i] - 64;
- }
- }
-
- s->setTo(tmp, utf8len);
-
- delete[] tmp;
- tmp = NULL;
-}
// the 2nd argument is used to get the data following the \0 in a comment field
void ID3::Iterator::getString(String8 *id, String8 *comment) const {
@@ -543,7 +500,9 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
return;
}
- convertISO8859ToString8(frameData, mFrameSize, id);
+ // this is supposed to be ISO-8859-1, but pass it up as-is to the caller, who will figure
+ // out the real encoding
+ id->setTo((const char*)frameData, mFrameSize);
return;
}
@@ -561,13 +520,13 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
}
if (encoding == 0x00) {
- // ISO 8859-1
- convertISO8859ToString8(frameData + 1, n, id);
+ // supposedly ISO 8859-1
+ id->setTo((const char*)frameData + 1, n);
} else if (encoding == 0x03) {
- // UTF-8
+ // supposedly UTF-8
id->setTo((const char *)(frameData + 1), n);
} else if (encoding == 0x02) {
- // UTF-16 BE, no byte order mark.
+ // supposedly UTF-16 BE, no byte order mark.
// API wants number of characters, not number of bytes...
int len = n / 2;
const char16_t *framedata = (const char16_t *) (frameData + 1);
@@ -583,7 +542,7 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
if (framedatacopy != NULL) {
delete[] framedatacopy;
}
- } else {
+ } else if (encoding == 0x01) {
// UCS-2
// API wants number of characters, not number of bytes...
int len = n / 2;
@@ -602,7 +561,27 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
framedata++;
len--;
}
- id->setTo(framedata, len);
+
+ // check if the resulting data consists entirely of 8-bit values
+ bool eightBit = true;
+ for (int i = 0; i < len; i++) {
+ if (framedata[i] > 0xff) {
+ eightBit = false;
+ break;
+ }
+ }
+ if (eightBit) {
+ // collapse to 8 bit, then let the media scanner client figure out the real encoding
+ char *frame8 = new char[len];
+ for (int i = 0; i < len; i++) {
+ frame8[i] = framedata[i];
+ }
+ id->setTo(frame8, len);
+ delete [] frame8;
+ } else {
+ id->setTo(framedata, len);
+ }
+
if (framedatacopy != NULL) {
delete[] framedatacopy;
}
@@ -651,11 +630,14 @@ void ID3::Iterator::findFrame() {
| (mParent.mData[mOffset + 4] << 8)
| mParent.mData[mOffset + 5];
- mFrameSize += 6;
+ if (mFrameSize == 0) {
+ return;
+ }
+ mFrameSize += 6; // add tag id and size field
if (mOffset + mFrameSize > mParent.mSize) {
- ALOGV("partial frame at offset %d (size = %d, bytes-remaining = %d)",
- mOffset, mFrameSize, mParent.mSize - mOffset - 6);
+ ALOGV("partial frame at offset %zu (size = %zu, bytes-remaining = %zu)",
+ mOffset, mFrameSize, mParent.mSize - mOffset - (size_t)6);
return;
}
@@ -692,11 +674,15 @@ void ID3::Iterator::findFrame() {
baseSize = U32_AT(&mParent.mData[mOffset + 4]);
}
- mFrameSize = 10 + baseSize;
+ if (baseSize == 0) {
+ return;
+ }
+
+ mFrameSize = 10 + baseSize; // add tag id, size field and flags
if (mOffset + mFrameSize > mParent.mSize) {
- ALOGV("partial frame at offset %d (size = %d, bytes-remaining = %d)",
- mOffset, mFrameSize, mParent.mSize - mOffset - 10);
+ ALOGV("partial frame at offset %zu (size = %zu, bytes-remaining = %zu)",
+ mOffset, mFrameSize, mParent.mSize - mOffset - (size_t)10);
return;
}
@@ -814,8 +800,8 @@ ID3::getAlbumArt(size_t *length, String8 *mime) const {
mime->setTo((const char *)&data[1]);
size_t mimeLen = strlen((const char *)&data[1]) + 1;
- uint8_t picType = data[1 + mimeLen];
#if 0
+ uint8_t picType = data[1 + mimeLen];
if (picType != 0x03) {
// Front Cover Art
it.next();
diff --git a/media/libstagefright/include/AACEncoder.h b/media/libstagefright/include/AACEncoder.h
index 3d5fc60..52beb0e 100644
--- a/media/libstagefright/include/AACEncoder.h
+++ b/media/libstagefright/include/AACEncoder.h
@@ -25,7 +25,7 @@ struct VO_MEM_OPERATOR;
namespace android {
-struct MediaBufferGroup;
+class MediaBufferGroup;
class AACEncoder: public MediaSource {
public:
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 271df8e..77d65e0 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -32,6 +32,7 @@
namespace android {
struct AudioPlayer;
+struct ClockEstimator;
struct DataSource;
struct MediaBuffer;
struct MediaExtractor;
@@ -63,6 +64,7 @@ struct AwesomePlayer {
void setUID(uid_t uid);
status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *uri,
const KeyedVector<String8, String8> *headers = NULL);
@@ -159,6 +161,7 @@ private:
SystemTimeSource mSystemTimeSource;
TimeSource *mTimeSource;
+ sp<IMediaHTTPService> mHTTPService;
String8 mUri;
KeyedVector<String8, String8> mUriHeaders;
@@ -234,6 +237,7 @@ private:
MediaBuffer *mVideoBuffer;
+ sp<ClockEstimator> mClockEstimator;
sp<HTTPBase> mConnectingDataSource;
sp<NuCachedSource2> mCachedSource;
@@ -247,6 +251,7 @@ private:
sp<MediaExtractor> mExtractor;
status_t setDataSource_l(
+ const sp<IMediaHTTPService> &httpService,
const char *uri,
const KeyedVector<String8, String8> *headers = NULL);
@@ -293,6 +298,7 @@ private:
bool getBitrate(int64_t *bitrate);
+ int64_t estimateRealTimeUs(TimeSource *ts, int64_t systemTimeUs);
void finishSeekIfNecessary(int64_t videoTimeUs);
void ensureCacheIsFetching_l();
diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h
deleted file mode 100644
index da188dd..0000000
--- a/media/libstagefright/include/ChromiumHTTPDataSource.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CHROME_HTTP_DATA_SOURCE_H_
-
-#define CHROME_HTTP_DATA_SOURCE_H_
-
-#include <media/stagefright/foundation/AString.h>
-#include <utils/threads.h>
-
-#include "HTTPBase.h"
-
-namespace android {
-
-struct SfDelegate;
-
-struct ChromiumHTTPDataSource : public HTTPBase {
- ChromiumHTTPDataSource(uint32_t flags = 0);
-
- virtual status_t connect(
- const char *uri,
- const KeyedVector<String8, String8> *headers = NULL,
- off64_t offset = 0);
-
- virtual void disconnect();
-
- virtual status_t initCheck() const;
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off64_t *size);
- virtual uint32_t flags();
-
- virtual sp<DecryptHandle> DrmInitialization(const char *mime);
-
- virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
-
- virtual String8 getUri();
-
- virtual String8 getMIMEType() const;
-
- virtual status_t reconnectAtOffset(off64_t offset);
-
- static status_t UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-protected:
- virtual ~ChromiumHTTPDataSource();
-
-private:
- friend struct SfDelegate;
-
- enum State {
- DISCONNECTED,
- CONNECTING,
- CONNECTED,
- READING,
- DISCONNECTING
- };
-
- const uint32_t mFlags;
-
- mutable Mutex mLock;
- Condition mCondition;
-
- State mState;
-
- SfDelegate *mDelegate;
-
- AString mURI;
- KeyedVector<String8, String8> mHeaders;
-
- off64_t mCurrentOffset;
-
- // Any connection error or the result of a read operation
- // (for the lattter this is the number of bytes read, if successful).
- ssize_t mIOResult;
-
- int64_t mContentSize;
-
- String8 mContentType;
-
- sp<DecryptHandle> mDecryptHandle;
- DrmManagerClient *mDrmManagerClient;
-
- void disconnect_l();
-
- status_t connect_l(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- static void InitiateRead(
- ChromiumHTTPDataSource *me, void *data, size_t size);
-
- void initiateRead(void *data, size_t size);
-
- void onConnectionEstablished(
- int64_t contentSize, const char *contentType);
-
- void onConnectionFailed(status_t err);
- void onReadCompleted(ssize_t size);
- void onDisconnectComplete();
- void onRedirect(const char *url);
-
- void clearDRMState_l();
-
- DISALLOW_EVIL_CONSTRUCTORS(ChromiumHTTPDataSource);
-};
-
-} // namespace android
-
-#endif // CHROME_HTTP_DATA_SOURCE_H_
diff --git a/media/libstagefright/include/FragmentedMP4Parser.h b/media/libstagefright/include/FragmentedMP4Parser.h
deleted file mode 100644
index dbe02b8..0000000
--- a/media/libstagefright/include/FragmentedMP4Parser.h
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PARSER_H_
-
-#define PARSER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/DataSource.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct ABuffer;
-
-struct FragmentedMP4Parser : public AHandler {
- struct Source : public RefBase {
- Source() {}
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
- virtual bool isSeekable() = 0;
-
- protected:
- virtual ~Source() {}
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(Source);
- };
-
- FragmentedMP4Parser();
-
- void start(const char *filename);
- void start(const sp<Source> &source);
- void start(sp<DataSource> &source);
-
- sp<AMessage> getFormat(bool audio, bool synchronous = false);
- status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit, bool synchronous = false);
- status_t seekTo(bool audio, int64_t timeUs);
- bool isSeekable() const;
-
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-protected:
- virtual ~FragmentedMP4Parser();
-
-private:
- enum {
- kWhatStart,
- kWhatProceed,
- kWhatReadMore,
- kWhatGetFormat,
- kWhatDequeueAccessUnit,
- kWhatSeekTo,
- };
-
- struct TrackFragment;
- struct DynamicTrackFragment;
- struct StaticTrackFragment;
-
- struct DispatchEntry {
- uint32_t mType;
- uint32_t mParentType;
- status_t (FragmentedMP4Parser::*mHandler)(uint32_t, size_t, uint64_t);
- };
-
- struct Container {
- uint64_t mOffset;
- uint64_t mBytesRemaining;
- uint32_t mType;
- bool mExtendsToEOF;
- };
-
- struct SampleDescription {
- uint32_t mType;
- uint16_t mDataRefIndex;
-
- sp<AMessage> mFormat;
- };
-
- struct SampleInfo {
- off64_t mOffset;
- size_t mSize;
- uint32_t mPresentationTime;
- size_t mSampleDescIndex;
- uint32_t mFlags;
- };
-
- struct MediaDataInfo {
- sp<ABuffer> mBuffer;
- off64_t mOffset;
- };
-
- struct SidxEntry {
- size_t mSize;
- uint32_t mDurationUs;
- };
-
- struct TrackInfo {
- enum Flags {
- kTrackEnabled = 0x01,
- kTrackInMovie = 0x02,
- kTrackInPreview = 0x04,
- };
-
- uint32_t mTrackID;
- uint32_t mFlags;
- uint32_t mDuration; // This is the duration in terms of movie timescale!
- uint64_t mSidxDuration; // usec, from sidx box, which can use a different timescale
-
- uint32_t mMediaTimeScale;
-
- uint32_t mMediaHandlerType;
- Vector<SampleDescription> mSampleDescs;
-
- // from track extends:
- uint32_t mDefaultSampleDescriptionIndex;
- uint32_t mDefaultSampleDuration;
- uint32_t mDefaultSampleSize;
- uint32_t mDefaultSampleFlags;
-
- uint32_t mDecodingTime;
-
- Vector<SidxEntry> mSidx;
- sp<StaticTrackFragment> mStaticFragment;
- List<sp<TrackFragment> > mFragments;
- };
-
- struct TrackFragmentHeaderInfo {
- enum Flags {
- kBaseDataOffsetPresent = 0x01,
- kSampleDescriptionIndexPresent = 0x02,
- kDefaultSampleDurationPresent = 0x08,
- kDefaultSampleSizePresent = 0x10,
- kDefaultSampleFlagsPresent = 0x20,
- kDurationIsEmpty = 0x10000,
- };
-
- uint32_t mTrackID;
- uint32_t mFlags;
- uint64_t mBaseDataOffset;
- uint32_t mSampleDescriptionIndex;
- uint32_t mDefaultSampleDuration;
- uint32_t mDefaultSampleSize;
- uint32_t mDefaultSampleFlags;
-
- uint64_t mDataOffset;
- };
-
- static const DispatchEntry kDispatchTable[];
-
- sp<Source> mSource;
- off_t mBufferPos;
- bool mSuspended;
- bool mDoneWithMoov;
- off_t mFirstMoofOffset; // used as the starting point for offsets calculated from the sidx box
- sp<ABuffer> mBuffer;
- Vector<Container> mStack;
- KeyedVector<uint32_t, TrackInfo> mTracks; // TrackInfo by trackID
- Vector<MediaDataInfo> mMediaData;
-
- uint32_t mCurrentTrackID;
-
- status_t mFinalResult;
-
- TrackFragmentHeaderInfo mTrackFragmentHeaderInfo;
-
- status_t onProceed();
- status_t onDequeueAccessUnit(size_t trackIndex, sp<ABuffer> *accessUnit);
- status_t onSeekTo(bool wantAudio, int64_t position);
-
- void enter(off64_t offset, uint32_t type, uint64_t size);
-
- uint16_t readU16(size_t offset);
- uint32_t readU32(size_t offset);
- uint64_t readU64(size_t offset);
- void skip(off_t distance);
- status_t need(size_t size);
- bool fitsContainer(uint64_t size) const;
-
- status_t parseTrackHeader(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseMediaHeader(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseMediaHandler(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseTrackExtends(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseTrackFragmentHeader(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseTrackFragmentRun(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseVisualSampleEntry(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseAudioSampleEntry(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSampleSizes(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseCompactSampleSizes(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSampleToChunk(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets64(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseAVCCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseESDSCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseMediaData(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSegmentIndex(
- uint32_t type, size_t offset, uint64_t size);
-
- TrackInfo *editTrack(uint32_t trackID, bool createIfNecessary = false);
-
- ssize_t findTrack(bool wantAudio) const;
-
- status_t makeAccessUnit(
- TrackInfo *info,
- const SampleInfo &sample,
- const MediaDataInfo &mdatInfo,
- sp<ABuffer> *accessUnit);
-
- status_t getSample(
- TrackInfo *info,
- sp<TrackFragment> *fragment,
- SampleInfo *sampleInfo);
-
- static int CompareSampleLocation(
- const SampleInfo &sample, const MediaDataInfo &mdatInfo);
-
- void resumeIfNecessary();
-
- void copyBuffer(
- sp<ABuffer> *dst,
- size_t offset, uint64_t size) const;
-
- DISALLOW_EVIL_CONSTRUCTORS(FragmentedMP4Parser);
-};
-
-} // namespace android
-
-#endif // PARSER_H_
-
diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h
index d4b7f9f..0c66e27 100644
--- a/media/libstagefright/include/HTTPBase.h
+++ b/media/libstagefright/include/HTTPBase.h
@@ -48,13 +48,7 @@ struct HTTPBase : public DataSource {
virtual status_t setBandwidthStatCollectFreq(int32_t freqMs);
- static status_t UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
- void setUID(uid_t uid);
- bool getUID(uid_t *uid) const;
-
- static sp<HTTPBase> Create(uint32_t flags = 0);
+ virtual void setBandwidthHistorySize(size_t numHistoryItems);
static void RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag);
static void UnRegisterSocketUserTag(int sockfd);
@@ -63,7 +57,7 @@ struct HTTPBase : public DataSource {
static void UnRegisterSocketUserMark(int sockfd);
protected:
- void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
+ virtual void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
private:
struct BandwidthEntry {
@@ -77,6 +71,7 @@ private:
size_t mNumBandwidthHistoryItems;
int64_t mTotalTransferTimeUs;
size_t mTotalTransferBytes;
+ size_t mMaxBandwidthHistoryItems;
enum {
kMinBandwidthCollectFreqMs = 1000, // 1 second
@@ -87,9 +82,6 @@ private:
int32_t mPrevEstimatedBandWidthKbps;
int32_t mBandWidthCollectFreqMs;
- bool mUIDValid;
- uid_t mUID;
-
DISALLOW_EVIL_CONSTRUCTORS(HTTPBase);
};
diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h
index e83f3ef..c2c4a6d 100644
--- a/media/libstagefright/include/ID3.h
+++ b/media/libstagefright/include/ID3.h
@@ -22,8 +22,8 @@
namespace android {
-struct DataSource;
-struct String8;
+class DataSource;
+class String8;
struct ID3 {
enum Version {
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index c5e86a6..db1187d 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -28,7 +28,7 @@ namespace android {
struct AMessage;
struct AnotherPacketSource;
struct ATSParser;
-struct DataSource;
+class DataSource;
struct MPEG2TSSource;
struct String8;
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index 7b4bc6d..1fe6fcf 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -39,6 +39,14 @@ struct SidxEntry {
uint32_t mDurationUs;
};
+struct Trex {
+ uint32_t track_ID;
+ uint32_t default_sample_description_index;
+ uint32_t default_sample_duration;
+ uint32_t default_sample_size;
+ uint32_t default_sample_flags;
+};
+
class MPEG4Extractor : public MediaExtractor {
public:
// Extractor assumes ownership of "source".
@@ -74,11 +82,12 @@ private:
};
Vector<SidxEntry> mSidxEntries;
- uint64_t mSidxDuration;
off64_t mMoofOffset;
Vector<PsshInfo> mPssh;
+ Vector<Trex> mTrex;
+
sp<DataSource> mDataSource;
status_t mInitCheck;
bool mHasVideo;
diff --git a/media/libstagefright/include/MidiExtractor.h b/media/libstagefright/include/MidiExtractor.h
new file mode 100644
index 0000000..9a2abc0
--- /dev/null
+++ b/media/libstagefright/include/MidiExtractor.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MIDI_EXTRACTOR_H_
+#define MIDI_EXTRACTOR_H_
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/MidiIoWrapper.h>
+#include <utils/String8.h>
+#include <libsonivox/eas.h>
+
+namespace android {
+
+class MidiEngine : public RefBase {
+public:
+ MidiEngine(const sp<DataSource> &dataSource,
+ const sp<MetaData> &fileMetadata,
+ const sp<MetaData> &trackMetadata);
+ ~MidiEngine();
+
+ status_t initCheck();
+
+ status_t allocateBuffers();
+ status_t releaseBuffers();
+ status_t seekTo(int64_t positionUs);
+ MediaBuffer* readBuffer();
+private:
+ sp<MidiIoWrapper> mIoWrapper;
+ MediaBufferGroup *mGroup;
+ EAS_DATA_HANDLE mEasData;
+ EAS_HANDLE mEasHandle;
+ const S_EAS_LIB_CONFIG* mEasConfig;
+ bool mIsInitialized;
+};
+
+class MidiExtractor : public MediaExtractor {
+
+public:
+ // Extractor assumes ownership of source
+ MidiExtractor(const sp<DataSource> &source);
+
+ virtual size_t countTracks();
+ virtual sp<MediaSource> getTrack(size_t index);
+ virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+
+ virtual sp<MetaData> getMetaData();
+
+protected:
+ virtual ~MidiExtractor();
+
+private:
+ sp<DataSource> mDataSource;
+ status_t mInitCheck;
+ sp<MetaData> mFileMetadata;
+
+ // There is only one track
+ sp<MetaData> mTrackMetadata;
+
+ sp<MidiEngine> mEngine;
+
+ EAS_DATA_HANDLE mEasData;
+ EAS_HANDLE mEasHandle;
+ EAS_PCM* mAudioBuffer;
+ EAS_I32 mPlayTime;
+ EAS_I32 mDuration;
+ EAS_STATE mState;
+ EAS_FILE mFileLocator;
+
+ MidiExtractor(const MidiExtractor &);
+ MidiExtractor &operator=(const MidiExtractor &);
+
+};
+
+bool SniffMidi(const sp<DataSource> &source, String8 *mimeType,
+ float *confidence, sp<AMessage> *);
+
+} // namespace android
+
+#endif // MIDI_EXTRACTOR_H_
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 5db4b4b..4252706 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -37,6 +37,8 @@ struct NuCachedSource2 : public DataSource {
virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+ virtual void disconnect();
+
virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
@@ -103,6 +105,7 @@ private:
off64_t mLastAccessPos;
sp<AMessage> mAsyncResult;
bool mFetching;
+ bool mDisconnecting;
int64_t mLastFetchTimeUs;
int32_t mNumRetriesLeft;
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 31a5077..e8c4970 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -75,6 +75,10 @@ public:
node_id node, OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 max_frame_width, OMX_U32 max_frame_height);
+ virtual status_t configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer);
@@ -134,10 +138,10 @@ public:
OMX_IN OMX_PTR pEventData);
OMX_ERRORTYPE OnEmptyBufferDone(
- node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
OMX_ERRORTYPE OnFillBufferDone(
- node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
void invalidateNodeID(node_id node);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 339179e..104dcfc 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -31,7 +31,7 @@ struct GraphicBufferSource;
struct OMXNodeInstance {
OMXNodeInstance(
- OMX *owner, const sp<IOMXObserver> &observer);
+ OMX *owner, const sp<IOMXObserver> &observer, const char *name);
void setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle);
@@ -62,6 +62,10 @@ struct OMXNodeInstance {
OMX_U32 portIndex, OMX_BOOL enable,
OMX_U32 maxFrameWidth, OMX_U32 maxFrameHeight);
+ status_t configureVideoTunnelMode(
+ OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle);
+
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer);
@@ -138,12 +142,37 @@ private:
OMX::buffer_id mID;
};
Vector<ActiveBuffer> mActiveBuffers;
+#ifdef __LP64__
+ Mutex mBufferIDLock;
+ uint32_t mBufferIDCount;
+ KeyedVector<OMX::buffer_id, OMX_BUFFERHEADERTYPE *> mBufferIDToBufferHeader;
+ KeyedVector<OMX_BUFFERHEADERTYPE *, OMX::buffer_id> mBufferHeaderToBufferID;
+#endif
+
+ // For debug support
+ char *mName;
+ int DEBUG;
+ size_t mNumPortBuffers[2]; // modified under mLock, read outside for debug
+ Mutex mDebugLock;
+ // following are modified and read under mDebugLock
+ int DEBUG_BUMP;
+ SortedVector<OMX_BUFFERHEADERTYPE *> mInputBuffersWithCodec, mOutputBuffersWithCodec;
+ size_t mDebugLevelBumpPendingBuffers[2];
+ void bumpDebugLevel_l(size_t numInputBuffers, size_t numOutputBuffers);
+ void unbumpDebugLevel_l(size_t portIndex);
~OMXNodeInstance();
void addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id);
void removeActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id);
void freeActiveBuffers();
+
+ // For buffer id management
+ OMX::buffer_id makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader);
+ OMX_BUFFERHEADERTYPE *findBufferHeader(OMX::buffer_id buffer);
+ OMX::buffer_id findBufferID(OMX_BUFFERHEADERTYPE *bufferHeader);
+ void invalidateBufferID(OMX::buffer_id buffer);
+
status_t useGraphicBuffer2_l(
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
OMX::buffer_id *buffer);
@@ -165,7 +194,13 @@ private:
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
- status_t storeMetaDataInBuffers_l(OMX_U32 portIndex, OMX_BOOL enable);
+ status_t storeMetaDataInBuffers_l(
+ OMX_U32 portIndex, OMX_BOOL enable,
+ OMX_BOOL useGraphicBuffer, OMX_BOOL *usingGraphicBufferInMeta);
+
+ status_t emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header,
+ OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr);
sp<GraphicBufferSource> getGraphicBufferSource();
void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource);
diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h
index ca59dc0..2c4f543 100644
--- a/media/libstagefright/include/SDPLoader.h
+++ b/media/libstagefright/include/SDPLoader.h
@@ -25,6 +25,7 @@
namespace android {
struct HTTPBase;
+struct IMediaHTTPService;
struct SDPLoader : public AHandler {
enum Flags {
@@ -34,7 +35,10 @@ struct SDPLoader : public AHandler {
enum {
kWhatSDPLoaded = 'sdpl'
};
- SDPLoader(const sp<AMessage> &notify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+ SDPLoader(
+ const sp<AMessage> &notify,
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService);
void load(const char* url, const KeyedVector<String8, String8> *headers);
@@ -55,8 +59,6 @@ private:
sp<AMessage> mNotify;
const char* mUrl;
uint32_t mFlags;
- bool mUIDValid;
- uid_t mUID;
sp<ALooper> mNetLooper;
bool mCancelled;
diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h
index b5a043c..60c9e7e 100644
--- a/media/libstagefright/include/SampleIterator.h
+++ b/media/libstagefright/include/SampleIterator.h
@@ -30,6 +30,7 @@ struct SampleIterator {
off64_t getSampleOffset() const { return mCurrentSampleOffset; }
size_t getSampleSize() const { return mCurrentSampleSize; }
uint32_t getSampleTime() const { return mCurrentSampleTime; }
+ uint32_t getSampleDuration() const { return mCurrentSampleDuration; }
status_t getSampleSizeDirect(
uint32_t sampleIndex, size_t *size);
@@ -61,11 +62,12 @@ private:
off64_t mCurrentSampleOffset;
size_t mCurrentSampleSize;
uint32_t mCurrentSampleTime;
+ uint32_t mCurrentSampleDuration;
void reset();
status_t findChunkRange(uint32_t sampleIndex);
status_t getChunkOffset(uint32_t chunk, off64_t *offset);
- status_t findSampleTime(uint32_t sampleIndex, uint32_t *time);
+ status_t findSampleTimeAndDuration(uint32_t sampleIndex, uint32_t *time, uint32_t *duration);
SampleIterator(const SampleIterator &);
SampleIterator &operator=(const SampleIterator &);
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index 847dff7..d06df7b 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -66,7 +66,8 @@ public:
off64_t *offset,
size_t *size,
uint32_t *compositionTime,
- bool *isSyncSample = NULL);
+ bool *isSyncSample = NULL,
+ uint32_t *sampleDuration = NULL);
enum {
kFlagBefore,
@@ -74,7 +75,8 @@ public:
kFlagClosest
};
status_t findSampleAtTime(
- uint32_t req_time, uint32_t *sample_index, uint32_t flags);
+ uint64_t req_time, uint64_t scale_num, uint64_t scale_den,
+ uint32_t *sample_index, uint32_t flags);
status_t findSyncSampleNear(
uint32_t start_sample_index, uint32_t *sample_index,
@@ -137,6 +139,13 @@ private:
friend struct SampleIterator;
+ // normally we don't round
+ inline uint64_t getSampleTime(
+ size_t sample_index, uint64_t scale_num, uint64_t scale_den) const {
+ return (mSampleTimeEntries[sample_index].mCompositionTime
+ * scale_num) / scale_den;
+ }
+
status_t getSampleSize_l(uint32_t sample_index, size_t *sample_size);
uint32_t getCompositionTimeOffset(uint32_t sampleIndex);
diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/include/SimpleSoftOMXComponent.h
index f8c61eb..591b38e 100644
--- a/media/libstagefright/include/SimpleSoftOMXComponent.h
+++ b/media/libstagefright/include/SimpleSoftOMXComponent.h
@@ -58,6 +58,11 @@ protected:
} mTransition;
};
+ enum {
+ kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1,
+ kPrepareForAdaptivePlaybackIndex,
+ };
+
void addPort(const OMX_PARAM_PORTDEFINITIONTYPE &def);
virtual OMX_ERRORTYPE internalGetParameter(
diff --git a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
index d050fa6..4529007 100644
--- a/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
+++ b/media/libstagefright/include/SoftVideoDecoderOMXComponent.h
@@ -27,8 +27,6 @@
#include <utils/threads.h>
#include <utils/Vector.h>
-#define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a)))
-
namespace android {
struct SoftVideoDecoderOMXComponent : public SimpleSoftOMXComponent {
@@ -57,12 +55,32 @@ protected:
virtual OMX_ERRORTYPE getConfig(
OMX_INDEXTYPE index, OMX_PTR params);
+ virtual OMX_ERRORTYPE getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index);
+
void initPorts(OMX_U32 numInputBuffers,
OMX_U32 inputBufferSize,
OMX_U32 numOutputBuffers,
- const char *mimeType);
+ const char *mimeType,
+ OMX_U32 minCompressionRatio = 1u);
- virtual void updatePortDefinitions();
+ virtual void updatePortDefinitions(bool updateCrop = true, bool updateInputSize = false);
+
+ uint32_t outputBufferWidth();
+ uint32_t outputBufferHeight();
+
+ enum CropSettingsMode {
+ kCropUnSet = 0,
+ kCropSet,
+ kCropChanged,
+ };
+ void handlePortSettingsChange(
+ bool *portWillReset, uint32_t width, uint32_t height,
+ CropSettingsMode cropSettingsMode = kCropUnSet, bool fakeStride = false);
+
+ void copyYV12FrameToOutputBuffer(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride);
enum {
kInputPortIndex = 0,
@@ -70,6 +88,8 @@ protected:
kMaxPortIndex = 1,
};
+ bool mIsAdaptive;
+ uint32_t mAdaptiveMaxWidth, mAdaptiveMaxHeight;
uint32_t mWidth, mHeight;
uint32_t mCropLeft, mCropTop, mCropWidth, mCropHeight;
@@ -80,6 +100,9 @@ protected:
} mOutputPortSettingsChange;
private:
+ uint32_t mMinInputBufferSize;
+ uint32_t mMinCompressionRatio;
+
const char *mComponentRole;
OMX_VIDEO_CODINGTYPE mCodingType;
const CodecProfileLevel *mProfileLevels;
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
new file mode 100644
index 0000000..b43635d
--- /dev/null
+++ b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+
+#define SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+
+#include <media/IOMX.h>
+
+#include "SimpleSoftOMXComponent.h"
+#include <system/window.h>
+
+struct hw_module_t;
+
+namespace android {
+
+struct SoftVideoEncoderOMXComponent : public SimpleSoftOMXComponent {
+ SoftVideoEncoderOMXComponent(
+ const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
+ int32_t width,
+ int32_t height,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+ virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR param);
+ virtual OMX_ERRORTYPE internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params);
+
+protected:
+ void initPorts(
+ OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
+ const char *mime, OMX_U32 minCompressionRatio = 1);
+
+ static void setRawVideoSize(OMX_PARAM_PORTDEFINITIONTYPE *def);
+
+ static void ConvertFlexYUVToPlanar(
+ uint8_t *dst, size_t dstStride, size_t dstVStride,
+ struct android_ycbcr *ycbcr, int32_t width, int32_t height);
+
+ static void ConvertYUV420SemiPlanarToYUV420Planar(
+ const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height);
+
+ static void ConvertRGB32ToPlanar(
+ uint8_t *dstY, size_t dstStride, size_t dstVStride,
+ const uint8_t *src, size_t width, size_t height, size_t srcStride,
+ bool bgr);
+
+ const uint8_t *extractGraphicBuffer(
+ uint8_t *dst, size_t dstSize, const uint8_t *src, size_t srcSize,
+ size_t width, size_t height) const;
+
+ virtual OMX_ERRORTYPE getExtensionIndex(const char *name, OMX_INDEXTYPE *index);
+
+ enum {
+ kInputPortIndex = 0,
+ kOutputPortIndex = 1,
+ };
+
+ bool mInputDataIsMeta;
+ int32_t mWidth; // width of the input frames
+ int32_t mHeight; // height of the input frames
+ uint32_t mBitrate; // target bitrate set for the encoder, in bits per second
+ uint32_t mFramerate; // target framerate set for the encoder, in Q16 format
+ OMX_COLOR_FORMATTYPE mColorFormat; // Color format for the input port
+
+private:
+ void updatePortParams();
+ OMX_ERRORTYPE internalSetPortParams(const OMX_PARAM_PORTDEFINITIONTYPE* port);
+
+ static const uint32_t kInputBufferAlignment = 1;
+ static const uint32_t kOutputBufferAlignment = 2;
+
+ mutable const hw_module_t *mGrallocModule;
+
+ uint32_t mMinOutputBufferSize;
+ uint32_t mMinCompressionRatio;
+
+ const char *mComponentRole;
+ OMX_VIDEO_CODINGTYPE mCodingType;
+ const CodecProfileLevel *mProfileLevels;
+ size_t mNumProfileLevels;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVideoEncoderOMXComponent);
+};
+
+} // namespace android
+
+#endif // SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 7ab0042..fa3ea89 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -24,17 +24,17 @@
namespace android {
-struct MetaData;
+struct AMessage;
class SoftwareRenderer {
public:
- SoftwareRenderer(
- const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta);
+ explicit SoftwareRenderer(const sp<ANativeWindow> &nativeWindow);
~SoftwareRenderer();
void render(
- const void *data, size_t size, void *platformPrivate);
+ const void *data, size_t size, int64_t timestampNs,
+ void *platformPrivate, const sp<AMessage> &format);
private:
enum YUVMode {
@@ -51,6 +51,8 @@ private:
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
+
+ void resetFormatIfChanged(const sp<AMessage> &format);
};
} // namespace android
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b02ed0e..6632c27 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -33,6 +33,7 @@ struct StagefrightMetadataRetriever : public MediaMetadataRetrieverInterface {
virtual ~StagefrightMetadataRetriever();
virtual status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers);
diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h
index 3e84256..2963150 100644
--- a/media/libstagefright/include/TimedEventQueue.h
+++ b/media/libstagefright/include/TimedEventQueue.h
@@ -122,7 +122,7 @@ private:
};
struct StopEvent : public TimedEventQueue::Event {
- virtual void fire(TimedEventQueue *queue, int64_t now_us) {
+ virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
queue->mStopped = true;
}
};
diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h
index 8e62946..ab7e8b8 100644
--- a/media/libstagefright/include/WVMExtractor.h
+++ b/media/libstagefright/include/WVMExtractor.h
@@ -49,6 +49,7 @@ public:
virtual sp<MediaSource> getTrack(size_t index);
virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
virtual sp<MetaData> getMetaData();
+ virtual void setUID(uid_t uid);
// Return the amount of data cached from the current
// playback positiion (in us).
@@ -74,8 +75,6 @@ public:
// codec.
void setCryptoPluginMode(bool cryptoPluginMode);
- void setUID(uid_t uid);
-
static bool getVendorLibHandle();
status_t getError();
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index d517320..c270bc1 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -23,7 +23,7 @@
namespace android {
-struct ABitReader;
+class ABitReader;
enum {
kAVCProfileBaseline = 0x42,
@@ -49,7 +49,7 @@ status_t getNextNALUnit(
const uint8_t **nalStart, size_t *nalSize,
bool startCodeFollows = false);
-struct MetaData;
+class MetaData;
sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit);
bool IsIDR(const sp<ABuffer> &accessUnit);
diff --git a/media/libstagefright/matroska/Android.mk b/media/libstagefright/matroska/Android.mk
index 2d8c1e1..446ff8c 100644
--- a/media/libstagefright/matroska/Android.mk
+++ b/media/libstagefright/matroska/Android.mk
@@ -8,7 +8,7 @@ LOCAL_C_INCLUDES:= \
$(TOP)/external/libvpx/libwebm \
$(TOP)/frameworks/native/include/media/openmax \
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror
LOCAL_MODULE:= libstagefright_matroska
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index d260d0f..0712bf0 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -20,8 +20,6 @@
#include "MatroskaExtractor.h"
-#include "mkvparser.hpp"
-
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/DataSource.h>
@@ -33,6 +31,8 @@
#include <media/stagefright/Utils.h>
#include <utils/String8.h>
+#include <inttypes.h>
+
namespace android {
struct DataSourceReader : public mkvparser::IMkvReader {
@@ -87,7 +87,7 @@ private:
////////////////////////////////////////////////////////////////////////////////
struct BlockIterator {
- BlockIterator(MatroskaExtractor *extractor, unsigned long trackNum);
+ BlockIterator(MatroskaExtractor *extractor, unsigned long trackNum, unsigned long index);
bool eos() const;
@@ -103,7 +103,8 @@ struct BlockIterator {
private:
MatroskaExtractor *mExtractor;
- unsigned long mTrackNum;
+ long long mTrackNum;
+ unsigned long mIndex;
const mkvparser::Cluster *mCluster;
const mkvparser::BlockEntry *mBlockEntry;
@@ -155,6 +156,53 @@ private:
MatroskaSource &operator=(const MatroskaSource &);
};
+const mkvparser::Track* MatroskaExtractor::TrackInfo::getTrack() const {
+ return mExtractor->mSegment->GetTracks()->GetTrackByNumber(mTrackNum);
+}
+
+// This function does exactly the same as mkvparser::Cues::Find, except that it
+// searches in our own track based vectors. We should not need this once mkvparser
+// adds the same functionality.
+const mkvparser::CuePoint::TrackPosition *MatroskaExtractor::TrackInfo::find(
+ long long timeNs) const {
+ ALOGV("mCuePoints.size %zu", mCuePoints.size());
+ if (mCuePoints.empty()) {
+ return NULL;
+ }
+
+ const mkvparser::CuePoint* cp = mCuePoints.itemAt(0);
+ const mkvparser::Track* track = getTrack();
+ if (timeNs <= cp->GetTime(mExtractor->mSegment)) {
+ return cp->Find(track);
+ }
+
+ // Binary searches through relevant cues; assumes cues are ordered by timecode.
+ // If we do detect out-of-order cues, return NULL.
+ size_t lo = 0;
+ size_t hi = mCuePoints.size();
+ while (lo < hi) {
+ const size_t mid = lo + (hi - lo) / 2;
+ const mkvparser::CuePoint* const midCp = mCuePoints.itemAt(mid);
+ const long long cueTimeNs = midCp->GetTime(mExtractor->mSegment);
+ if (cueTimeNs <= timeNs) {
+ lo = mid + 1;
+ } else {
+ hi = mid;
+ }
+ }
+
+ if (lo == 0) {
+ return NULL;
+ }
+
+ cp = mCuePoints.itemAt(lo - 1);
+ if (cp->GetTime(mExtractor->mSegment) > timeNs) {
+ return NULL;
+ }
+
+ return cp->Find(track);
+}
+
MatroskaSource::MatroskaSource(
const sp<MatroskaExtractor> &extractor, size_t index)
: mExtractor(extractor),
@@ -162,7 +210,8 @@ MatroskaSource::MatroskaSource(
mType(OTHER),
mIsAudio(false),
mBlockIter(mExtractor.get(),
- mExtractor->mTracks.itemAt(index).mTrackNum),
+ mExtractor->mTracks.itemAt(index).mTrackNum,
+ index),
mNALSizeLen(0) {
sp<MetaData> meta = mExtractor->mTracks.itemAt(index).mMeta;
@@ -183,7 +232,7 @@ MatroskaSource::MatroskaSource(
CHECK_GE(avccSize, 5u);
mNALSizeLen = 1 + (avcc[4] & 3);
- ALOGV("mNALSizeLen = %d", mNALSizeLen);
+ ALOGV("mNALSizeLen = %zu", mNALSizeLen);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
mType = AAC;
}
@@ -193,7 +242,7 @@ MatroskaSource::~MatroskaSource() {
clearPendingFrames();
}
-status_t MatroskaSource::start(MetaData *params) {
+status_t MatroskaSource::start(MetaData * /* params */) {
mBlockIter.reset();
return OK;
@@ -212,9 +261,10 @@ sp<MetaData> MatroskaSource::getFormat() {
////////////////////////////////////////////////////////////////////////////////
BlockIterator::BlockIterator(
- MatroskaExtractor *extractor, unsigned long trackNum)
+ MatroskaExtractor *extractor, unsigned long trackNum, unsigned long index)
: mExtractor(extractor),
mTrackNum(trackNum),
+ mIndex(index),
mCluster(NULL),
mBlockEntry(NULL),
mBlockEntryIndex(0) {
@@ -313,14 +363,14 @@ void BlockIterator::seek(
*actualFrameTimeUs = -1ll;
- const int64_t seekTimeNs = seekTimeUs * 1000ll;
+ const int64_t seekTimeNs = seekTimeUs * 1000ll - mExtractor->mSeekPreRollNs;
mkvparser::Segment* const pSegment = mExtractor->mSegment;
// Special case the 0 seek to avoid loading Cues when the application
// extraneously seeks to 0 before playing.
if (seekTimeNs <= 0) {
- ALOGV("Seek to beginning: %lld", seekTimeUs);
+ ALOGV("Seek to beginning: %" PRId64, seekTimeUs);
mCluster = pSegment->GetFirst();
mBlockEntryIndex = 0;
do {
@@ -329,7 +379,7 @@ void BlockIterator::seek(
return;
}
- ALOGV("Seeking to: %lld", seekTimeUs);
+ ALOGV("Seeking to: %" PRId64, seekTimeUs);
// If the Cues have not been located then find them.
const mkvparser::Cues* pCues = pSegment->GetCues();
@@ -362,9 +412,20 @@ void BlockIterator::seek(
}
const mkvparser::CuePoint* pCP;
+ mkvparser::Tracks const *pTracks = pSegment->GetTracks();
while (!pCues->DoneParsing()) {
pCues->LoadCuePoint();
pCP = pCues->GetLast();
+ CHECK(pCP);
+
+ size_t trackCount = mExtractor->mTracks.size();
+ for (size_t index = 0; index < trackCount; ++index) {
+ MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(index);
+ const mkvparser::Track *pTrack = pTracks->GetTrackByNumber(track.mTrackNum);
+ if (pTrack && pTrack->GetType() == 1 && pCP->Find(pTrack)) { // VIDEO_TRACK
+ track.mCuePoints.push_back(pCP);
+ }
+ }
if (pCP->GetTime(pSegment) >= seekTimeNs) {
ALOGV("Parsed past relevant Cue");
@@ -372,22 +433,26 @@ void BlockIterator::seek(
}
}
- // The Cue index is built around video keyframes
- mkvparser::Tracks const *pTracks = pSegment->GetTracks();
- const mkvparser::Track *pTrack = NULL;
- for (size_t index = 0; index < pTracks->GetTracksCount(); ++index) {
- pTrack = pTracks->GetTrackByIndex(index);
- if (pTrack && pTrack->GetType() == 1) { // VIDEO_TRACK
- ALOGV("Video track located at %d", index);
- break;
+ const mkvparser::CuePoint::TrackPosition *pTP = NULL;
+ const mkvparser::Track *thisTrack = pTracks->GetTrackByNumber(mTrackNum);
+ if (thisTrack->GetType() == 1) { // video
+ MatroskaExtractor::TrackInfo& track = mExtractor->mTracks.editItemAt(mIndex);
+ pTP = track.find(seekTimeNs);
+ } else {
+ // The Cue index is built around video keyframes
+ unsigned long int trackCount = pTracks->GetTracksCount();
+ for (size_t index = 0; index < trackCount; ++index) {
+ const mkvparser::Track *pTrack = pTracks->GetTrackByIndex(index);
+ if (pTrack && pTrack->GetType() == 1 && pCues->Find(seekTimeNs, pTrack, pCP, pTP)) {
+ ALOGV("Video track located at %zu", index);
+ break;
+ }
}
}
+
// Always *search* based on the video track, but finalize based on mTrackNum
- const mkvparser::CuePoint::TrackPosition* pTP;
- if (pTrack && pTrack->GetType() == 1) {
- pCues->Find(seekTimeNs, pTrack, pCP, pTP);
- } else {
+ if (!pTP) {
ALOGE("Did not locate the video track for seeking");
return;
}
@@ -408,10 +473,13 @@ void BlockIterator::seek(
if (isAudio || block()->IsKey()) {
// Accept the first key frame
- *actualFrameTimeUs = (block()->GetTime(mCluster) + 500LL) / 1000LL;
- ALOGV("Requested seek point: %lld actual: %lld",
- seekTimeUs, actualFrameTimeUs);
- break;
+ int64_t frameTimeUs = (block()->GetTime(mCluster) + 500LL) / 1000LL;
+ if (thisTrack->GetType() == 1 || frameTimeUs >= seekTimeUs) {
+ *actualFrameTimeUs = frameTimeUs;
+ ALOGV("Requested seek point: %" PRId64 " actual: %" PRId64,
+ seekTimeUs, *actualFrameTimeUs);
+ break;
+ }
}
}
}
@@ -432,17 +500,6 @@ static unsigned U24_AT(const uint8_t *ptr) {
return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
}
-static size_t clz(uint8_t x) {
- size_t numLeadingZeroes = 0;
-
- while (!(x & 0x80)) {
- ++numLeadingZeroes;
- x = x << 1;
- }
-
- return numLeadingZeroes;
-}
-
void MatroskaSource::clearPendingFrames() {
while (!mPendingFrames.empty()) {
MediaBuffer *frame = *mPendingFrames.begin();
@@ -628,7 +685,8 @@ MatroskaExtractor::MatroskaExtractor(const sp<DataSource> &source)
mReader(new DataSourceReader(mDataSource)),
mSegment(NULL),
mExtractedThumbnails(false),
- mIsWebm(false) {
+ mIsWebm(false),
+ mSeekPreRollNs(0) {
off64_t size;
mIsLiveStreaming =
(mDataSource->flags()
@@ -654,14 +712,22 @@ MatroskaExtractor::MatroskaExtractor(const sp<DataSource> &source)
return;
}
+ // from mkvparser::Segment::Load(), but stop at first cluster
ret = mSegment->ParseHeaders();
- CHECK_EQ(ret, 0);
-
- long len;
- ret = mSegment->LoadCluster(pos, len);
- CHECK_EQ(ret, 0);
+ if (ret == 0) {
+ long len;
+ ret = mSegment->LoadCluster(pos, len);
+ if (ret >= 1) {
+ // no more clusters
+ ret = 0;
+ }
+ } else if (ret > 0) {
+ ret = mkvparser::E_BUFFER_NOT_FULL;
+ }
if (ret < 0) {
+ ALOGW("Corrupt %s source: %s", mIsWebm ? "webm" : "matroska",
+ uriDebugString(mDataSource->getUri()).c_str());
delete mSegment;
mSegment = NULL;
return;
@@ -716,41 +782,61 @@ bool MatroskaExtractor::isLiveStreaming() const {
return mIsLiveStreaming;
}
+static int bytesForSize(size_t size) {
+ // use at most 28 bits (4 times 7)
+ CHECK(size <= 0xfffffff);
+
+ if (size > 0x1fffff) {
+ return 4;
+ } else if (size > 0x3fff) {
+ return 3;
+ } else if (size > 0x7f) {
+ return 2;
+ }
+ return 1;
+}
+
+static void storeSize(uint8_t *data, size_t &idx, size_t size) {
+ int numBytes = bytesForSize(size);
+ idx += numBytes;
+
+ data += idx;
+ size_t next = 0;
+ while (numBytes--) {
+ *--data = (size & 0x7f) | next;
+ size >>= 7;
+ next = 0x80;
+ }
+}
+
static void addESDSFromCodecPrivate(
const sp<MetaData> &meta,
bool isAudio, const void *priv, size_t privSize) {
- static const uint8_t kStaticESDS[] = {
- 0x03, 22,
- 0x00, 0x00, // ES_ID
- 0x00, // streamDependenceFlag, URL_Flag, OCRstreamFlag
-
- 0x04, 17,
- 0x40, // ObjectTypeIndication
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
- 0x00, 0x00, 0x00, 0x00,
-
- 0x05,
- // CodecSpecificInfo (with size prefix) follows
- };
- // Make sure all sizes can be coded in a single byte.
- CHECK(privSize + 22 - 2 < 128);
- size_t esdsSize = sizeof(kStaticESDS) + privSize + 1;
+ int privSizeBytesRequired = bytesForSize(privSize);
+ int esdsSize2 = 14 + privSizeBytesRequired + privSize;
+ int esdsSize2BytesRequired = bytesForSize(esdsSize2);
+ int esdsSize1 = 4 + esdsSize2BytesRequired + esdsSize2;
+ int esdsSize1BytesRequired = bytesForSize(esdsSize1);
+ size_t esdsSize = 1 + esdsSize1BytesRequired + esdsSize1;
uint8_t *esds = new uint8_t[esdsSize];
- memcpy(esds, kStaticESDS, sizeof(kStaticESDS));
- uint8_t *ptr = esds + sizeof(kStaticESDS);
- *ptr++ = privSize;
- memcpy(ptr, priv, privSize);
-
- // Increment by codecPrivateSize less 2 bytes that are accounted for
- // already in lengths of 22/17
- esds[1] += privSize - 2;
- esds[6] += privSize - 2;
- // Set ObjectTypeIndication.
- esds[7] = isAudio ? 0x40 // Audio ISO/IEC 14496-3
- : 0x20; // Visual ISO/IEC 14496-2
+ size_t idx = 0;
+ esds[idx++] = 0x03;
+ storeSize(esds, idx, esdsSize1);
+ esds[idx++] = 0x00; // ES_ID
+ esds[idx++] = 0x00; // ES_ID
+ esds[idx++] = 0x00; // streamDependenceFlag, URL_Flag, OCRstreamFlag
+ esds[idx++] = 0x04;
+ storeSize(esds, idx, esdsSize2);
+ esds[idx++] = isAudio ? 0x40 // Audio ISO/IEC 14496-3
+ : 0x20; // Visual ISO/IEC 14496-2
+ for (int i = 0; i < 12; i++) {
+ esds[idx++] = 0x00;
+ }
+ esds[idx++] = 0x05;
+ storeSize(esds, idx, privSize);
+ memcpy(esds + idx, priv, privSize);
meta->setData(kKeyESDS, 0, esds, esdsSize);
@@ -899,6 +985,12 @@ void MatroskaExtractor::addTracks() {
err = addVorbisCodecInfo(
meta, codecPrivate, codecPrivateSize);
+ } else if (!strcmp("A_OPUS", codecID)) {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+ meta->setData(kKeyOpusHeader, 0, codecPrivate, codecPrivateSize);
+ meta->setInt64(kKeyOpusCodecDelay, track->GetCodecDelay());
+ meta->setInt64(kKeyOpusSeekPreRoll, track->GetSeekPreRoll());
+ mSeekPreRollNs = track->GetSeekPreRoll();
} else if (!strcmp("A_MPEG/L3", codecID)) {
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
} else {
@@ -927,6 +1019,7 @@ void MatroskaExtractor::addTracks() {
TrackInfo *trackInfo = &mTracks.editItemAt(mTracks.size() - 1);
trackInfo->mTrackNum = track->GetNumber();
trackInfo->mMeta = meta;
+ trackInfo->mExtractor = this;
}
}
@@ -941,7 +1034,7 @@ void MatroskaExtractor::findThumbnails() {
continue;
}
- BlockIterator iter(this, info->mTrackNum);
+ BlockIterator iter(this, info->mTrackNum, i);
int32_t j = 0;
int64_t thumbnailTimeUs = 0;
size_t maxBlockSize = 0;
diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/libstagefright/matroska/MatroskaExtractor.h
index 1294b4f..db36bf8 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.h
+++ b/media/libstagefright/matroska/MatroskaExtractor.h
@@ -18,14 +18,12 @@
#define MATROSKA_EXTRACTOR_H_
+#include "mkvparser.hpp"
+
#include <media/stagefright/MediaExtractor.h>
#include <utils/Vector.h>
#include <utils/threads.h>
-namespace mkvparser {
-struct Segment;
-};
-
namespace android {
struct AMessage;
@@ -58,6 +56,11 @@ private:
struct TrackInfo {
unsigned long mTrackNum;
sp<MetaData> mMeta;
+ const MatroskaExtractor *mExtractor;
+ Vector<const mkvparser::CuePoint*> mCuePoints;
+
+ const mkvparser::Track* getTrack() const;
+ const mkvparser::CuePoint::TrackPosition *find(long long timeNs) const;
};
Mutex mLock;
@@ -69,6 +72,7 @@ private:
bool mExtractedThumbnails;
bool mIsLiveStreaming;
bool mIsWebm;
+ int64_t mSeekPreRollNs;
void addTracks();
void findThumbnails();
diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp
deleted file mode 100644
index 0102656..0000000
--- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp
+++ /dev/null
@@ -1,1993 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "FragmentedMP4Parser"
-#include <utils/Log.h>
-
-#include "include/avc_utils.h"
-#include "include/ESDS.h"
-#include "include/FragmentedMP4Parser.h"
-#include "TrackFragment.h"
-
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-
-namespace android {
-
-static const char *Fourcc2String(uint32_t fourcc) {
- static char buffer[5];
- buffer[4] = '\0';
- buffer[0] = fourcc >> 24;
- buffer[1] = (fourcc >> 16) & 0xff;
- buffer[2] = (fourcc >> 8) & 0xff;
- buffer[3] = fourcc & 0xff;
-
- return buffer;
-}
-
-static const char *IndentString(size_t n) {
- static const char kSpace[] = " ";
- return kSpace + sizeof(kSpace) - 2 * n - 1;
-}
-
-// static
-const FragmentedMP4Parser::DispatchEntry FragmentedMP4Parser::kDispatchTable[] = {
- { FOURCC('m', 'o', 'o', 'v'), 0, NULL },
- { FOURCC('t', 'r', 'a', 'k'), FOURCC('m', 'o', 'o', 'v'), NULL },
- { FOURCC('u', 'd', 't', 'a'), FOURCC('t', 'r', 'a', 'k'), NULL },
- { FOURCC('u', 'd', 't', 'a'), FOURCC('m', 'o', 'o', 'v'), NULL },
- { FOURCC('m', 'e', 't', 'a'), FOURCC('u', 'd', 't', 'a'), NULL },
- { FOURCC('i', 'l', 's', 't'), FOURCC('m', 'e', 't', 'a'), NULL },
-
- { FOURCC('t', 'k', 'h', 'd'), FOURCC('t', 'r', 'a', 'k'),
- &FragmentedMP4Parser::parseTrackHeader
- },
-
- { FOURCC('m', 'v', 'e', 'x'), FOURCC('m', 'o', 'o', 'v'), NULL },
-
- { FOURCC('t', 'r', 'e', 'x'), FOURCC('m', 'v', 'e', 'x'),
- &FragmentedMP4Parser::parseTrackExtends
- },
-
- { FOURCC('e', 'd', 't', 's'), FOURCC('t', 'r', 'a', 'k'), NULL },
- { FOURCC('m', 'd', 'i', 'a'), FOURCC('t', 'r', 'a', 'k'), NULL },
-
- { FOURCC('m', 'd', 'h', 'd'), FOURCC('m', 'd', 'i', 'a'),
- &FragmentedMP4Parser::parseMediaHeader
- },
-
- { FOURCC('h', 'd', 'l', 'r'), FOURCC('m', 'd', 'i', 'a'),
- &FragmentedMP4Parser::parseMediaHandler
- },
-
- { FOURCC('m', 'i', 'n', 'f'), FOURCC('m', 'd', 'i', 'a'), NULL },
- { FOURCC('d', 'i', 'n', 'f'), FOURCC('m', 'i', 'n', 'f'), NULL },
- { FOURCC('s', 't', 'b', 'l'), FOURCC('m', 'i', 'n', 'f'), NULL },
- { FOURCC('s', 't', 's', 'd'), FOURCC('s', 't', 'b', 'l'), NULL },
-
- { FOURCC('s', 't', 's', 'z'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseSampleSizes },
-
- { FOURCC('s', 't', 'z', '2'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseCompactSampleSizes },
-
- { FOURCC('s', 't', 's', 'c'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseSampleToChunk },
-
- { FOURCC('s', 't', 'c', 'o'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseChunkOffsets },
-
- { FOURCC('c', 'o', '6', '4'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseChunkOffsets64 },
-
- { FOURCC('a', 'v', 'c', 'C'), FOURCC('a', 'v', 'c', '1'),
- &FragmentedMP4Parser::parseAVCCodecSpecificData },
-
- { FOURCC('e', 's', 'd', 's'), FOURCC('m', 'p', '4', 'a'),
- &FragmentedMP4Parser::parseESDSCodecSpecificData },
-
- { FOURCC('e', 's', 'd', 's'), FOURCC('m', 'p', '4', 'v'),
- &FragmentedMP4Parser::parseESDSCodecSpecificData },
-
- { FOURCC('m', 'd', 'a', 't'), 0, &FragmentedMP4Parser::parseMediaData },
-
- { FOURCC('m', 'o', 'o', 'f'), 0, NULL },
- { FOURCC('t', 'r', 'a', 'f'), FOURCC('m', 'o', 'o', 'f'), NULL },
-
- { FOURCC('t', 'f', 'h', 'd'), FOURCC('t', 'r', 'a', 'f'),
- &FragmentedMP4Parser::parseTrackFragmentHeader
- },
- { FOURCC('t', 'r', 'u', 'n'), FOURCC('t', 'r', 'a', 'f'),
- &FragmentedMP4Parser::parseTrackFragmentRun
- },
-
- { FOURCC('m', 'f', 'r', 'a'), 0, NULL },
-
- { FOURCC('s', 'i', 'd', 'x'), 0, &FragmentedMP4Parser::parseSegmentIndex },
-};
-
-struct FileSource : public FragmentedMP4Parser::Source {
- FileSource(const char *filename)
- : mFile(fopen(filename, "rb")) {
- CHECK(mFile != NULL);
- }
-
- virtual ~FileSource() {
- fclose(mFile);
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- fseek(mFile, offset, SEEK_SET);
- return fread(data, 1, size, mFile);
- }
-
- virtual bool isSeekable() {
- return true;
- }
-
- private:
- FILE *mFile;
-
- DISALLOW_EVIL_CONSTRUCTORS(FileSource);
-};
-
-struct ReadTracker : public RefBase {
- ReadTracker(off64_t size) {
- allocSize = 1 + size / 8192; // 1 bit per kilobyte
- bitmap = (char*) calloc(1, allocSize);
- }
- virtual ~ReadTracker() {
- dumpToLog();
- free(bitmap);
- }
- void mark(off64_t offset, size_t size) {
- int firstbit = offset / 1024;
- int lastbit = (offset + size - 1) / 1024;
- for (int i = firstbit; i <= lastbit; i++) {
- bitmap[i/8] |= (0x80 >> (i & 7));
- }
- }
-
- private:
- void dumpToLog() {
- // 96 chars per line, each char represents one kilobyte, 1 kb per bit
- int numlines = allocSize / 12;
- char buf[97];
- char *cur = bitmap;
- for (int i = 0; i < numlines; i++ && cur) {
- for (int j = 0; j < 12; j++) {
- for (int k = 0; k < 8; k++) {
- buf[(j * 8) + k] = (*cur & (0x80 >> k)) ? 'X' : '.';
- }
- cur++;
- }
- buf[96] = '\0';
- ALOGI("%5dk: %s", i * 96, buf);
- }
- }
-
- size_t allocSize;
- char *bitmap;
-};
-
-struct DataSourceSource : public FragmentedMP4Parser::Source {
- DataSourceSource(sp<DataSource> &source)
- : mDataSource(source) {
- CHECK(mDataSource != NULL);
-#if 0
- off64_t size;
- if (source->getSize(&size) == OK) {
- mReadTracker = new ReadTracker(size);
- } else {
- ALOGE("couldn't get data source size");
- }
-#endif
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- if (mReadTracker != NULL) {
- mReadTracker->mark(offset, size);
- }
- return mDataSource->readAt(offset, data, size);
- }
-
- virtual bool isSeekable() {
- return true;
- }
-
- private:
- sp<DataSource> mDataSource;
- sp<ReadTracker> mReadTracker;
-
- DISALLOW_EVIL_CONSTRUCTORS(DataSourceSource);
-};
-
-FragmentedMP4Parser::FragmentedMP4Parser()
- : mBufferPos(0),
- mSuspended(false),
- mDoneWithMoov(false),
- mFirstMoofOffset(0),
- mFinalResult(OK) {
-}
-
-FragmentedMP4Parser::~FragmentedMP4Parser() {
-}
-
-void FragmentedMP4Parser::start(const char *filename) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setObject("source", new FileSource(filename));
- msg->post();
- ALOGV("Parser::start(%s)", filename);
-}
-
-void FragmentedMP4Parser::start(const sp<Source> &source) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setObject("source", source);
- msg->post();
- ALOGV("Parser::start(Source)");
-}
-
-void FragmentedMP4Parser::start(sp<DataSource> &source) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setObject("source", new DataSourceSource(source));
- msg->post();
- ALOGV("Parser::start(DataSource)");
-}
-
-sp<AMessage> FragmentedMP4Parser::getFormat(bool audio, bool synchronous) {
-
- while (true) {
- bool moovDone = mDoneWithMoov;
- sp<AMessage> msg = new AMessage(kWhatGetFormat, id());
- msg->setInt32("audio", audio);
-
- sp<AMessage> response;
- status_t err = msg->postAndAwaitResponse(&response);
-
- if (err != OK) {
- ALOGV("getFormat post failed: %d", err);
- return NULL;
- }
-
- if (response->findInt32("err", &err) && err != OK) {
- if (synchronous && err == -EWOULDBLOCK && !moovDone) {
- resumeIfNecessary();
- ALOGV("@getFormat parser not ready yet, retrying");
- usleep(10000);
- continue;
- }
- ALOGV("getFormat failed: %d", err);
- return NULL;
- }
-
- sp<AMessage> format;
- CHECK(response->findMessage("format", &format));
-
- ALOGV("returning format %s", format->debugString().c_str());
- return format;
- }
-}
-
-status_t FragmentedMP4Parser::seekTo(bool wantAudio, int64_t timeUs) {
- sp<AMessage> msg = new AMessage(kWhatSeekTo, id());
- msg->setInt32("audio", wantAudio);
- msg->setInt64("position", timeUs);
-
- sp<AMessage> response;
- status_t err = msg->postAndAwaitResponse(&response);
- return err;
-}
-
-bool FragmentedMP4Parser::isSeekable() const {
- while (mFirstMoofOffset == 0 && mFinalResult == OK) {
- usleep(10000);
- }
- bool seekable = mSource->isSeekable();
- for (size_t i = 0; seekable && i < mTracks.size(); i++) {
- const TrackInfo *info = &mTracks.valueAt(i);
- seekable &= !info->mSidx.empty();
- }
- return seekable;
-}
-
-status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) {
- status_t err = -EINVAL;
- ssize_t trackIndex = findTrack(wantAudio);
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- TrackInfo *info = &mTracks.editValueAt(trackIndex);
-
- int numSidxEntries = info->mSidx.size();
- int64_t totalTime = 0;
- off_t totalOffset = mFirstMoofOffset;
- for (int i = 0; i < numSidxEntries; i++) {
- const SidxEntry *se = &info->mSidx[i];
- if (totalTime + se->mDurationUs > position) {
- mBuffer->setRange(0,0);
- mBufferPos = totalOffset;
- if (mFinalResult == ERROR_END_OF_STREAM) {
- mFinalResult = OK;
- mSuspended = true; // force resume
- resumeIfNecessary();
- }
- info->mFragments.clear();
- info->mDecodingTime = totalTime * info->mMediaTimeScale / 1000000ll;
- return OK;
- }
- totalTime += se->mDurationUs;
- totalOffset += se->mSize;
- }
- }
- ALOGV("seekTo out of range");
- return err;
-}
-
-status_t FragmentedMP4Parser::dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit,
- bool synchronous) {
-
- while (true) {
- sp<AMessage> msg = new AMessage(kWhatDequeueAccessUnit, id());
- msg->setInt32("audio", audio);
-
- sp<AMessage> response;
- status_t err = msg->postAndAwaitResponse(&response);
-
- if (err != OK) {
- ALOGV("dequeue fail 1: %d", err);
- return err;
- }
-
- if (response->findInt32("err", &err) && err != OK) {
- if (synchronous && err == -EWOULDBLOCK) {
- resumeIfNecessary();
- ALOGV("Parser not ready yet, retrying");
- usleep(10000);
- continue;
- }
- ALOGV("dequeue fail 2: %d, %d", err, synchronous);
- return err;
- }
-
- CHECK(response->findBuffer("accessUnit", accessUnit));
-
- return OK;
- }
-}
-
-ssize_t FragmentedMP4Parser::findTrack(bool wantAudio) const {
- for (size_t i = 0; i < mTracks.size(); ++i) {
- const TrackInfo *info = &mTracks.valueAt(i);
-
- bool isAudio =
- info->mMediaHandlerType == FOURCC('s', 'o', 'u', 'n');
-
- bool isVideo =
- info->mMediaHandlerType == FOURCC('v', 'i', 'd', 'e');
-
- if ((wantAudio && isAudio) || (!wantAudio && !isAudio)) {
- if (info->mSampleDescs.empty()) {
- break;
- }
-
- return i;
- }
- }
-
- return -EWOULDBLOCK;
-}
-
-void FragmentedMP4Parser::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatStart:
- {
- sp<RefBase> obj;
- CHECK(msg->findObject("source", &obj));
-
- mSource = static_cast<Source *>(obj.get());
-
- mBuffer = new ABuffer(512 * 1024);
- mBuffer->setRange(0, 0);
-
- enter(0ll, 0, 0);
-
- (new AMessage(kWhatProceed, id()))->post();
- break;
- }
-
- case kWhatProceed:
- {
- CHECK(!mSuspended);
-
- status_t err = onProceed();
-
- if (err == OK) {
- if (!mSuspended) {
- msg->post();
- }
- } else if (err != -EAGAIN) {
- ALOGE("onProceed returned error %d", err);
- }
-
- break;
- }
-
- case kWhatReadMore:
- {
- size_t needed;
- CHECK(msg->findSize("needed", &needed));
-
- memmove(mBuffer->base(), mBuffer->data(), mBuffer->size());
- mBufferPos += mBuffer->offset();
- mBuffer->setRange(0, mBuffer->size());
-
- size_t maxBytesToRead = mBuffer->capacity() - mBuffer->size();
-
- if (maxBytesToRead < needed) {
- ALOGV("resizing buffer.");
-
- sp<ABuffer> newBuffer =
- new ABuffer((mBuffer->size() + needed + 1023) & ~1023);
- memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size());
- newBuffer->setRange(0, mBuffer->size());
-
- mBuffer = newBuffer;
- maxBytesToRead = mBuffer->capacity() - mBuffer->size();
- }
-
- CHECK_GE(maxBytesToRead, needed);
-
- ssize_t n = mSource->readAt(
- mBufferPos + mBuffer->size(),
- mBuffer->data() + mBuffer->size(), needed);
-
- if (n < (ssize_t)needed) {
- ALOGV("Reached EOF when reading %d @ %d + %d", needed, mBufferPos, mBuffer->size());
- if (n < 0) {
- mFinalResult = n;
- } else if (n == 0) {
- mFinalResult = ERROR_END_OF_STREAM;
- } else {
- mFinalResult = ERROR_IO;
- }
- } else {
- mBuffer->setRange(0, mBuffer->size() + n);
- (new AMessage(kWhatProceed, id()))->post();
- }
-
- break;
- }
-
- case kWhatGetFormat:
- {
- int32_t wantAudio;
- CHECK(msg->findInt32("audio", &wantAudio));
-
- status_t err = -EWOULDBLOCK;
- sp<AMessage> response = new AMessage;
-
- ssize_t trackIndex = findTrack(wantAudio);
-
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- TrackInfo *info = &mTracks.editValueAt(trackIndex);
-
- sp<AMessage> format = info->mSampleDescs.itemAt(0).mFormat;
- if (info->mSidxDuration) {
- format->setInt64("durationUs", info->mSidxDuration);
- } else {
- // this is probably going to be zero. Oh well...
- format->setInt64("durationUs",
- 1000000ll * info->mDuration / info->mMediaTimeScale);
- }
- response->setMessage(
- "format", format);
-
- err = OK;
- }
-
- response->setInt32("err", err);
-
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
- response->postReply(replyID);
- break;
- }
-
- case kWhatDequeueAccessUnit:
- {
- int32_t wantAudio;
- CHECK(msg->findInt32("audio", &wantAudio));
-
- status_t err = -EWOULDBLOCK;
- sp<AMessage> response = new AMessage;
-
- ssize_t trackIndex = findTrack(wantAudio);
-
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- sp<ABuffer> accessUnit;
- err = onDequeueAccessUnit(trackIndex, &accessUnit);
-
- if (err == OK) {
- response->setBuffer("accessUnit", accessUnit);
- }
- }
-
- response->setInt32("err", err);
-
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
- response->postReply(replyID);
- break;
- }
-
- case kWhatSeekTo:
- {
- ALOGV("kWhatSeekTo");
- int32_t wantAudio;
- CHECK(msg->findInt32("audio", &wantAudio));
- int64_t position;
- CHECK(msg->findInt64("position", &position));
-
- status_t err = -EWOULDBLOCK;
- sp<AMessage> response = new AMessage;
-
- ssize_t trackIndex = findTrack(wantAudio);
-
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- err = onSeekTo(wantAudio, position);
- }
- response->setInt32("err", err);
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
- response->postReply(replyID);
- break;
- }
- default:
- TRESPASS();
- }
-}
-
-status_t FragmentedMP4Parser::onProceed() {
- status_t err;
-
- if ((err = need(8)) != OK) {
- return err;
- }
-
- uint64_t size = readU32(0);
- uint32_t type = readU32(4);
-
- size_t offset = 8;
-
- if (size == 1) {
- if ((err = need(16)) != OK) {
- return err;
- }
-
- size = readU64(offset);
- offset += 8;
- }
-
- uint8_t userType[16];
-
- if (type == FOURCC('u', 'u', 'i', 'd')) {
- if ((err = need(offset + 16)) != OK) {
- return err;
- }
-
- memcpy(userType, mBuffer->data() + offset, 16);
- offset += 16;
- }
-
- CHECK(!mStack.isEmpty());
- uint32_t ptype = mStack.itemAt(mStack.size() - 1).mType;
-
- static const size_t kNumDispatchers =
- sizeof(kDispatchTable) / sizeof(kDispatchTable[0]);
-
- size_t i;
- for (i = 0; i < kNumDispatchers; ++i) {
- if (kDispatchTable[i].mType == type
- && kDispatchTable[i].mParentType == ptype) {
- break;
- }
- }
-
- // SampleEntry boxes are container boxes that start with a variable
- // amount of data depending on the media handler type.
- // We don't look inside 'hint' type SampleEntry boxes.
-
- bool isSampleEntryBox =
- (ptype == FOURCC('s', 't', 's', 'd'))
- && editTrack(mCurrentTrackID)->mMediaHandlerType
- != FOURCC('h', 'i', 'n', 't');
-
- if ((i < kNumDispatchers && kDispatchTable[i].mHandler == 0)
- || isSampleEntryBox || ptype == FOURCC('i', 'l', 's', 't')) {
- // This is a container box.
- if (type == FOURCC('m', 'o', 'o', 'f')) {
- if (mFirstMoofOffset == 0) {
- ALOGV("first moof @ %08x", mBufferPos + offset);
- mFirstMoofOffset = mBufferPos + offset - 8; // point at the size
- }
- }
- if (type == FOURCC('m', 'e', 't', 'a')) {
- if ((err = need(offset + 4)) < OK) {
- return err;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- offset += 4;
- } else if (type == FOURCC('s', 't', 's', 'd')) {
- if ((err = need(offset + 8)) < OK) {
- return err;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- if (readU32(offset + 4) == 0) {
- // We need at least some entries.
- return -EINVAL;
- }
-
- offset += 8;
- } else if (isSampleEntryBox) {
- size_t headerSize;
-
- switch (editTrack(mCurrentTrackID)->mMediaHandlerType) {
- case FOURCC('v', 'i', 'd', 'e'):
- {
- // 8 bytes SampleEntry + 70 bytes VisualSampleEntry
- headerSize = 78;
- break;
- }
-
- case FOURCC('s', 'o', 'u', 'n'):
- {
- // 8 bytes SampleEntry + 20 bytes AudioSampleEntry
- headerSize = 28;
- break;
- }
-
- case FOURCC('m', 'e', 't', 'a'):
- {
- headerSize = 8; // 8 bytes SampleEntry
- break;
- }
-
- default:
- TRESPASS();
- }
-
- if (offset + headerSize > size) {
- return -EINVAL;
- }
-
- if ((err = need(offset + headerSize)) != OK) {
- return err;
- }
-
- switch (editTrack(mCurrentTrackID)->mMediaHandlerType) {
- case FOURCC('v', 'i', 'd', 'e'):
- {
- err = parseVisualSampleEntry(
- type, offset, offset + headerSize);
- break;
- }
-
- case FOURCC('s', 'o', 'u', 'n'):
- {
- err = parseAudioSampleEntry(
- type, offset, offset + headerSize);
- break;
- }
-
- case FOURCC('m', 'e', 't', 'a'):
- {
- err = OK;
- break;
- }
-
- default:
- TRESPASS();
- }
-
- if (err != OK) {
- return err;
- }
-
- offset += headerSize;
- }
-
- skip(offset);
-
- ALOGV("%sentering box of type '%s'",
- IndentString(mStack.size()), Fourcc2String(type));
-
- enter(mBufferPos - offset, type, size - offset);
- } else {
- if (!fitsContainer(size)) {
- return -EINVAL;
- }
-
- if (i < kNumDispatchers && kDispatchTable[i].mHandler != 0) {
- // We have a handler for this box type.
-
- if ((err = need(size)) != OK) {
- return err;
- }
-
- ALOGV("%sparsing box of type '%s'",
- IndentString(mStack.size()), Fourcc2String(type));
-
- if ((err = (this->*kDispatchTable[i].mHandler)(
- type, offset, size)) != OK) {
- return err;
- }
- } else {
- // Unknown box type
-
- ALOGV("%sskipping box of type '%s', size %llu",
- IndentString(mStack.size()),
- Fourcc2String(type), size);
-
- }
-
- skip(size);
- }
-
- return OK;
-}
-
-// static
-int FragmentedMP4Parser::CompareSampleLocation(
- const SampleInfo &sample, const MediaDataInfo &mdatInfo) {
- if (sample.mOffset + sample.mSize < mdatInfo.mOffset) {
- return -1;
- }
-
- if (sample.mOffset >= mdatInfo.mOffset + mdatInfo.mBuffer->size()) {
- return 1;
- }
-
- // Otherwise make sure the sample is completely contained within this
- // media data block.
-
- CHECK_GE(sample.mOffset, mdatInfo.mOffset);
-
- CHECK_LE(sample.mOffset + sample.mSize,
- mdatInfo.mOffset + mdatInfo.mBuffer->size());
-
- return 0;
-}
-
-void FragmentedMP4Parser::resumeIfNecessary() {
- if (!mSuspended) {
- return;
- }
-
- ALOGV("resuming.");
-
- mSuspended = false;
- (new AMessage(kWhatProceed, id()))->post();
-}
-
-status_t FragmentedMP4Parser::getSample(
- TrackInfo *info, sp<TrackFragment> *fragment, SampleInfo *sampleInfo) {
- for (;;) {
- if (info->mFragments.empty()) {
- if (mFinalResult != OK) {
- return mFinalResult;
- }
-
- resumeIfNecessary();
- return -EWOULDBLOCK;
- }
-
- *fragment = *info->mFragments.begin();
-
- status_t err = (*fragment)->getSample(sampleInfo);
-
- if (err == OK) {
- return OK;
- } else if (err != ERROR_END_OF_STREAM) {
- return err;
- }
-
- // Really, end of this fragment...
-
- info->mFragments.erase(info->mFragments.begin());
- }
-}
-
-status_t FragmentedMP4Parser::onDequeueAccessUnit(
- size_t trackIndex, sp<ABuffer> *accessUnit) {
- TrackInfo *info = &mTracks.editValueAt(trackIndex);
-
- sp<TrackFragment> fragment;
- SampleInfo sampleInfo;
- status_t err = getSample(info, &fragment, &sampleInfo);
-
- if (err == -EWOULDBLOCK) {
- resumeIfNecessary();
- return err;
- } else if (err != OK) {
- return err;
- }
-
- err = -EWOULDBLOCK;
-
- bool checkDroppable = false;
-
- for (size_t i = 0; i < mMediaData.size(); ++i) {
- const MediaDataInfo &mdatInfo = mMediaData.itemAt(i);
-
- int cmp = CompareSampleLocation(sampleInfo, mdatInfo);
-
- if (cmp < 0 && !mSource->isSeekable()) {
- return -EPIPE;
- } else if (cmp == 0) {
- if (i > 0) {
- checkDroppable = true;
- }
-
- err = makeAccessUnit(info, sampleInfo, mdatInfo, accessUnit);
- break;
- }
- }
-
- if (err != OK) {
- return err;
- }
-
- fragment->advance();
-
- if (!mMediaData.empty() && checkDroppable) {
- size_t numDroppable = 0;
- bool done = false;
-
- // XXX FIXME: if one of the tracks is not advanced (e.g. if you play an audio+video
- // file with sf2), then mMediaData will not be pruned and keeps growing
- for (size_t i = 0; !done && i < mMediaData.size(); ++i) {
- const MediaDataInfo &mdatInfo = mMediaData.itemAt(i);
-
- for (size_t j = 0; j < mTracks.size(); ++j) {
- TrackInfo *info = &mTracks.editValueAt(j);
-
- sp<TrackFragment> fragment;
- SampleInfo sampleInfo;
- err = getSample(info, &fragment, &sampleInfo);
-
- if (err != OK) {
- done = true;
- break;
- }
-
- int cmp = CompareSampleLocation(sampleInfo, mdatInfo);
-
- if (cmp <= 0) {
- done = true;
- break;
- }
- }
-
- if (!done) {
- ++numDroppable;
- }
- }
-
- if (numDroppable > 0) {
- mMediaData.removeItemsAt(0, numDroppable);
-
- if (mMediaData.size() < 5) {
- resumeIfNecessary();
- }
- }
- }
-
- return err;
-}
-
-static size_t parseNALSize(size_t nalLengthSize, const uint8_t *data) {
- switch (nalLengthSize) {
- case 1:
- return *data;
- case 2:
- return U16_AT(data);
- case 3:
- return ((size_t)data[0] << 16) | U16_AT(&data[1]);
- case 4:
- return U32_AT(data);
- }
-
- // This cannot happen, mNALLengthSize springs to life by adding 1 to
- // a 2-bit integer.
- TRESPASS();
-
- return 0;
-}
-
-status_t FragmentedMP4Parser::makeAccessUnit(
- TrackInfo *info,
- const SampleInfo &sample,
- const MediaDataInfo &mdatInfo,
- sp<ABuffer> *accessUnit) {
- if (sample.mSampleDescIndex < 1
- || sample.mSampleDescIndex > info->mSampleDescs.size()) {
- return ERROR_MALFORMED;
- }
-
- int64_t presentationTimeUs =
- 1000000ll * sample.mPresentationTime / info->mMediaTimeScale;
-
- const SampleDescription &sampleDesc =
- info->mSampleDescs.itemAt(sample.mSampleDescIndex - 1);
-
- size_t nalLengthSize;
- if (!sampleDesc.mFormat->findSize("nal-length-size", &nalLengthSize)) {
- *accessUnit = new ABuffer(sample.mSize);
-
- memcpy((*accessUnit)->data(),
- mdatInfo.mBuffer->data() + (sample.mOffset - mdatInfo.mOffset),
- sample.mSize);
-
- (*accessUnit)->meta()->setInt64("timeUs", presentationTimeUs);
- if (IsIDR(*accessUnit)) {
- (*accessUnit)->meta()->setInt32("is-sync-frame", 1);
- }
-
- return OK;
- }
-
- const uint8_t *srcPtr =
- mdatInfo.mBuffer->data() + (sample.mOffset - mdatInfo.mOffset);
-
- for (int i = 0; i < 2 ; ++i) {
- size_t srcOffset = 0;
- size_t dstOffset = 0;
-
- while (srcOffset < sample.mSize) {
- if (srcOffset + nalLengthSize > sample.mSize) {
- return ERROR_MALFORMED;
- }
-
- size_t nalSize = parseNALSize(nalLengthSize, &srcPtr[srcOffset]);
- srcOffset += nalLengthSize;
-
- if (srcOffset + nalSize > sample.mSize) {
- return ERROR_MALFORMED;
- }
-
- if (i == 1) {
- memcpy((*accessUnit)->data() + dstOffset,
- "\x00\x00\x00\x01",
- 4);
-
- memcpy((*accessUnit)->data() + dstOffset + 4,
- srcPtr + srcOffset,
- nalSize);
- }
-
- srcOffset += nalSize;
- dstOffset += nalSize + 4;
- }
-
- if (i == 0) {
- (*accessUnit) = new ABuffer(dstOffset);
- (*accessUnit)->meta()->setInt64(
- "timeUs", presentationTimeUs);
- }
- }
- if (IsIDR(*accessUnit)) {
- (*accessUnit)->meta()->setInt32("is-sync-frame", 1);
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::need(size_t size) {
- if (!fitsContainer(size)) {
- return -EINVAL;
- }
-
- if (size <= mBuffer->size()) {
- return OK;
- }
-
- sp<AMessage> msg = new AMessage(kWhatReadMore, id());
- msg->setSize("needed", size - mBuffer->size());
- msg->post();
-
- // ALOGV("need(%d) returning -EAGAIN, only have %d", size, mBuffer->size());
-
- return -EAGAIN;
-}
-
-void FragmentedMP4Parser::enter(off64_t offset, uint32_t type, uint64_t size) {
- Container container;
- container.mOffset = offset;
- container.mType = type;
- container.mExtendsToEOF = (size == 0);
- container.mBytesRemaining = size;
-
- mStack.push(container);
-}
-
-bool FragmentedMP4Parser::fitsContainer(uint64_t size) const {
- CHECK(!mStack.isEmpty());
- const Container &container = mStack.itemAt(mStack.size() - 1);
-
- return container.mExtendsToEOF || size <= container.mBytesRemaining;
-}
-
-uint16_t FragmentedMP4Parser::readU16(size_t offset) {
- CHECK_LE(offset + 2, mBuffer->size());
-
- const uint8_t *ptr = mBuffer->data() + offset;
- return (ptr[0] << 8) | ptr[1];
-}
-
-uint32_t FragmentedMP4Parser::readU32(size_t offset) {
- CHECK_LE(offset + 4, mBuffer->size());
-
- const uint8_t *ptr = mBuffer->data() + offset;
- return (ptr[0] << 24) | (ptr[1] << 16) | (ptr[2] << 8) | ptr[3];
-}
-
-uint64_t FragmentedMP4Parser::readU64(size_t offset) {
- return (((uint64_t)readU32(offset)) << 32) | readU32(offset + 4);
-}
-
-void FragmentedMP4Parser::skip(off_t distance) {
- CHECK(!mStack.isEmpty());
- for (size_t i = mStack.size(); i-- > 0;) {
- Container *container = &mStack.editItemAt(i);
- if (!container->mExtendsToEOF) {
- CHECK_LE(distance, (off_t)container->mBytesRemaining);
-
- container->mBytesRemaining -= distance;
-
- if (container->mBytesRemaining == 0) {
- ALOGV("%sleaving box of type '%s'",
- IndentString(mStack.size() - 1),
- Fourcc2String(container->mType));
-
-#if 0
- if (container->mType == FOURCC('s', 't', 's', 'd')) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
- for (size_t i = 0;
- i < trackInfo->mSampleDescs.size(); ++i) {
- ALOGI("format #%d: %s",
- i,
- trackInfo->mSampleDescs.itemAt(i)
- .mFormat->debugString().c_str());
- }
- }
-#endif
-
- if (container->mType == FOURCC('s', 't', 'b', 'l')) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- trackInfo->mStaticFragment->signalCompletion();
-
- CHECK(trackInfo->mFragments.empty());
- trackInfo->mFragments.push_back(trackInfo->mStaticFragment);
- trackInfo->mStaticFragment.clear();
- } else if (container->mType == FOURCC('t', 'r', 'a', 'f')) {
- TrackInfo *trackInfo =
- editTrack(mTrackFragmentHeaderInfo.mTrackID);
-
- const sp<TrackFragment> &fragment =
- *--trackInfo->mFragments.end();
-
- static_cast<DynamicTrackFragment *>(
- fragment.get())->signalCompletion();
- } else if (container->mType == FOURCC('m', 'o', 'o', 'v')) {
- mDoneWithMoov = true;
- }
-
- container = NULL;
- mStack.removeItemsAt(i);
- }
- }
- }
-
- if (distance < (off_t)mBuffer->size()) {
- mBuffer->setRange(mBuffer->offset() + distance, mBuffer->size() - distance);
- mBufferPos += distance;
- return;
- }
-
- mBuffer->setRange(0, 0);
- mBufferPos += distance;
-}
-
-status_t FragmentedMP4Parser::parseTrackHeader(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- uint32_t flags = readU32(offset);
-
- uint32_t version = flags >> 24;
- flags &= 0xffffff;
-
- uint32_t trackID;
- uint64_t duration;
-
- if (version == 1) {
- if (offset + 36 > size) {
- return -EINVAL;
- }
-
- trackID = readU32(offset + 20);
- duration = readU64(offset + 28);
-
- offset += 36;
- } else if (version == 0) {
- if (offset + 24 > size) {
- return -EINVAL;
- }
-
- trackID = readU32(offset + 12);
- duration = readU32(offset + 20);
-
- offset += 24;
- } else {
- return -EINVAL;
- }
-
- TrackInfo *info = editTrack(trackID, true /* createIfNecessary */);
- info->mFlags = flags;
- info->mDuration = duration;
- if (info->mDuration == 0xffffffff) {
- // ffmpeg sets this to -1, which is incorrect.
- info->mDuration = 0;
- }
-
- info->mStaticFragment = new StaticTrackFragment;
-
- mCurrentTrackID = trackID;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseMediaHeader(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- uint32_t versionAndFlags = readU32(offset);
-
- if (versionAndFlags & 0xffffff) {
- return ERROR_MALFORMED;
- }
-
- uint32_t version = versionAndFlags >> 24;
-
- TrackInfo *info = editTrack(mCurrentTrackID);
-
- if (version == 1) {
- if (offset + 4 + 32 > size) {
- return -EINVAL;
- }
- info->mMediaTimeScale = U32_AT(mBuffer->data() + offset + 20);
- } else if (version == 0) {
- if (offset + 4 + 20 > size) {
- return -EINVAL;
- }
- info->mMediaTimeScale = U32_AT(mBuffer->data() + offset + 12);
- } else {
- return ERROR_MALFORMED;
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseMediaHandler(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 12 > size) {
- return -EINVAL;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- uint32_t handlerType = readU32(offset + 8);
-
- switch (handlerType) {
- case FOURCC('v', 'i', 'd', 'e'):
- case FOURCC('s', 'o', 'u', 'n'):
- case FOURCC('h', 'i', 'n', 't'):
- case FOURCC('m', 'e', 't', 'a'):
- break;
-
- default:
- return -EINVAL;
- }
-
- editTrack(mCurrentTrackID)->mMediaHandlerType = handlerType;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseVisualSampleEntry(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 78 > size) {
- return -EINVAL;
- }
-
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- trackInfo->mSampleDescs.push();
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- sampleDesc->mType = type;
- sampleDesc->mDataRefIndex = readU16(offset + 6);
-
- sp<AMessage> format = new AMessage;
-
- switch (type) {
- case FOURCC('a', 'v', 'c', '1'):
- format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
- break;
- case FOURCC('m', 'p', '4', 'v'):
- format->setString("mime", MEDIA_MIMETYPE_VIDEO_MPEG4);
- break;
- case FOURCC('s', '2', '6', '3'):
- case FOURCC('h', '2', '6', '3'):
- case FOURCC('H', '2', '6', '3'):
- format->setString("mime", MEDIA_MIMETYPE_VIDEO_H263);
- break;
- default:
- format->setString("mime", "application/octet-stream");
- break;
- }
-
- format->setInt32("width", readU16(offset + 8 + 16));
- format->setInt32("height", readU16(offset + 8 + 18));
-
- sampleDesc->mFormat = format;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseAudioSampleEntry(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 28 > size) {
- return -EINVAL;
- }
-
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- trackInfo->mSampleDescs.push();
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- sampleDesc->mType = type;
- sampleDesc->mDataRefIndex = readU16(offset + 6);
-
- sp<AMessage> format = new AMessage;
-
- format->setInt32("channel-count", readU16(offset + 8 + 8));
- format->setInt32("sample-size", readU16(offset + 8 + 10));
- format->setInt32("sample-rate", readU32(offset + 8 + 16) / 65536.0f);
-
- switch (type) {
- case FOURCC('m', 'p', '4', 'a'):
- format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
- break;
-
- case FOURCC('s', 'a', 'm', 'r'):
- format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
- format->setInt32("channel-count", 1);
- format->setInt32("sample-rate", 8000);
- break;
-
- case FOURCC('s', 'a', 'w', 'b'):
- format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
- format->setInt32("channel-count", 1);
- format->setInt32("sample-rate", 16000);
- break;
- default:
- format->setString("mime", "application/octet-stream");
- break;
- }
-
- sampleDesc->mFormat = format;
-
- return OK;
-}
-
-static void addCodecSpecificData(
- const sp<AMessage> &format, int32_t index,
- const void *data, size_t size,
- bool insertStartCode = false) {
- sp<ABuffer> csd = new ABuffer(insertStartCode ? size + 4 : size);
-
- memcpy(csd->data() + (insertStartCode ? 4 : 0), data, size);
-
- if (insertStartCode) {
- memcpy(csd->data(), "\x00\x00\x00\x01", 4);
- }
-
- csd->meta()->setInt32("csd", true);
- csd->meta()->setInt64("timeUs", 0ll);
-
- format->setBuffer(StringPrintf("csd-%d", index).c_str(), csd);
-}
-
-status_t FragmentedMP4Parser::parseSampleSizes(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseSampleSizes(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseCompactSampleSizes(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseCompactSampleSizes(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseSampleToChunk(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseSampleToChunk(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseChunkOffsets(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseChunkOffsets(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseChunkOffsets64(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseChunkOffsets64(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseAVCCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- if (sampleDesc->mType != FOURCC('a', 'v', 'c', '1')) {
- return -EINVAL;
- }
-
- const uint8_t *ptr = mBuffer->data() + offset;
-
- size -= offset;
- offset = 0;
-
- if (size < 7 || ptr[0] != 0x01) {
- return ERROR_MALFORMED;
- }
-
- sampleDesc->mFormat->setSize("nal-length-size", 1 + (ptr[4] & 3));
-
- size_t numSPS = ptr[5] & 31;
-
- ptr += 6;
- size -= 6;
-
- for (size_t i = 0; i < numSPS; ++i) {
- if (size < 2) {
- return ERROR_MALFORMED;
- }
-
- size_t length = U16_AT(ptr);
-
- ptr += 2;
- size -= 2;
-
- if (size < length) {
- return ERROR_MALFORMED;
- }
-
- addCodecSpecificData(
- sampleDesc->mFormat, i, ptr, length,
- true /* insertStartCode */);
-
- ptr += length;
- size -= length;
- }
-
- if (size < 1) {
- return ERROR_MALFORMED;
- }
-
- size_t numPPS = *ptr;
- ++ptr;
- --size;
-
- for (size_t i = 0; i < numPPS; ++i) {
- if (size < 2) {
- return ERROR_MALFORMED;
- }
-
- size_t length = U16_AT(ptr);
-
- ptr += 2;
- size -= 2;
-
- if (size < length) {
- return ERROR_MALFORMED;
- }
-
- addCodecSpecificData(
- sampleDesc->mFormat, numSPS + i, ptr, length,
- true /* insertStartCode */);
-
- ptr += length;
- size -= length;
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseESDSCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- if (sampleDesc->mType != FOURCC('m', 'p', '4', 'a')
- && sampleDesc->mType != FOURCC('m', 'p', '4', 'v')) {
- return -EINVAL;
- }
-
- const uint8_t *ptr = mBuffer->data() + offset;
-
- size -= offset;
- offset = 0;
-
- if (size < 4) {
- return -EINVAL;
- }
-
- if (U32_AT(ptr) != 0) {
- return -EINVAL;
- }
-
- ptr += 4;
- size -=4;
-
- ESDS esds(ptr, size);
-
- uint8_t objectTypeIndication;
- if (esds.getObjectTypeIndication(&objectTypeIndication) != OK) {
- return ERROR_MALFORMED;
- }
-
- const uint8_t *csd;
- size_t csd_size;
- if (esds.getCodecSpecificInfo(
- (const void **)&csd, &csd_size) != OK) {
- return ERROR_MALFORMED;
- }
-
- addCodecSpecificData(sampleDesc->mFormat, 0, csd, csd_size);
-
- if (sampleDesc->mType != FOURCC('m', 'p', '4', 'a')) {
- return OK;
- }
-
- if (csd_size == 0) {
- // There's no further information, i.e. no codec specific data
- // Let's assume that the information provided in the mpeg4 headers
- // is accurate and hope for the best.
-
- return OK;
- }
-
- if (csd_size < 2) {
- return ERROR_MALFORMED;
- }
-
- uint32_t objectType = csd[0] >> 3;
-
- if (objectType == 31) {
- return ERROR_UNSUPPORTED;
- }
-
- uint32_t freqIndex = (csd[0] & 7) << 1 | (csd[1] >> 7);
- int32_t sampleRate = 0;
- int32_t numChannels = 0;
- if (freqIndex == 15) {
- if (csd_size < 5) {
- return ERROR_MALFORMED;
- }
-
- sampleRate = (csd[1] & 0x7f) << 17
- | csd[2] << 9
- | csd[3] << 1
- | (csd[4] >> 7);
-
- numChannels = (csd[4] >> 3) & 15;
- } else {
- static uint32_t kSamplingRate[] = {
- 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
- 16000, 12000, 11025, 8000, 7350
- };
-
- if (freqIndex == 13 || freqIndex == 14) {
- return ERROR_MALFORMED;
- }
-
- sampleRate = kSamplingRate[freqIndex];
- numChannels = (csd[1] >> 3) & 15;
- }
-
- if (numChannels == 0) {
- return ERROR_UNSUPPORTED;
- }
-
- sampleDesc->mFormat->setInt32("sample-rate", sampleRate);
- sampleDesc->mFormat->setInt32("channel-count", numChannels);
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseMediaData(
- uint32_t type, size_t offset, uint64_t size) {
- ALOGV("skipping 'mdat' chunk at offsets 0x%08lx-0x%08llx.",
- mBufferPos + offset, mBufferPos + size);
-
- sp<ABuffer> buffer = new ABuffer(size - offset);
- memcpy(buffer->data(), mBuffer->data() + offset, size - offset);
-
- mMediaData.push();
- MediaDataInfo *info = &mMediaData.editItemAt(mMediaData.size() - 1);
- info->mBuffer = buffer;
- info->mOffset = mBufferPos + offset;
-
- if (mMediaData.size() > 10) {
- ALOGV("suspending for now.");
- mSuspended = true;
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseSegmentIndex(
- uint32_t type, size_t offset, uint64_t size) {
- ALOGV("sidx box type %d, offset %d, size %d", type, int(offset), int(size));
-// AString sidxstr;
-// hexdump(mBuffer->data() + offset, size, 0 /* indent */, &sidxstr);
-// ALOGV("raw sidx:");
-// ALOGV("%s", sidxstr.c_str());
- if (offset + 12 > size) {
- return -EINVAL;
- }
-
- uint32_t flags = readU32(offset);
-
- uint32_t version = flags >> 24;
- flags &= 0xffffff;
-
- ALOGV("sidx version %d", version);
-
- uint32_t referenceId = readU32(offset + 4);
- uint32_t timeScale = readU32(offset + 8);
- ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale);
-
- uint64_t earliestPresentationTime;
- uint64_t firstOffset;
-
- offset += 12;
-
- if (version == 0) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
- earliestPresentationTime = readU32(offset);
- firstOffset = readU32(offset + 4);
- offset += 8;
- } else {
- if (offset + 16 > size) {
- return -EINVAL;
- }
- earliestPresentationTime = readU64(offset);
- firstOffset = readU64(offset + 8);
- offset += 16;
- }
- ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset);
-
- if (offset + 4 > size) {
- return -EINVAL;
- }
- if (readU16(offset) != 0) { // reserved
- return -EINVAL;
- }
- int32_t referenceCount = readU16(offset + 2);
- offset += 4;
- ALOGV("refcount: %d", referenceCount);
-
- if (offset + referenceCount * 12 > size) {
- return -EINVAL;
- }
-
- TrackInfo *info = editTrack(mCurrentTrackID);
- uint64_t total_duration = 0;
- for (int i = 0; i < referenceCount; i++) {
- uint32_t d1 = readU32(offset);
- uint32_t d2 = readU32(offset + 4);
- uint32_t d3 = readU32(offset + 8);
-
- if (d1 & 0x80000000) {
- ALOGW("sub-sidx boxes not supported yet");
- }
- bool sap = d3 & 0x80000000;
- bool saptype = d3 >> 28;
- if (!sap || saptype > 2) {
- ALOGW("not a stream access point, or unsupported type");
- }
- total_duration += d2;
- offset += 12;
- ALOGV(" item %d, %08x %08x %08x", i, d1, d2, d3);
- SidxEntry se;
- se.mSize = d1 & 0x7fffffff;
- se.mDurationUs = 1000000LL * d2 / timeScale;
- info->mSidx.add(se);
- }
-
- info->mSidxDuration = total_duration * 1000000 / timeScale;
- ALOGV("duration: %lld", info->mSidxDuration);
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseTrackExtends(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 24 > size) {
- return -EINVAL;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- uint32_t trackID = readU32(offset + 4);
-
- TrackInfo *info = editTrack(trackID, true /* createIfNecessary */);
- info->mDefaultSampleDescriptionIndex = readU32(offset + 8);
- info->mDefaultSampleDuration = readU32(offset + 12);
- info->mDefaultSampleSize = readU32(offset + 16);
- info->mDefaultSampleFlags = readU32(offset + 20);
-
- return OK;
-}
-
-FragmentedMP4Parser::TrackInfo *FragmentedMP4Parser::editTrack(
- uint32_t trackID, bool createIfNecessary) {
- ssize_t i = mTracks.indexOfKey(trackID);
-
- if (i >= 0) {
- return &mTracks.editValueAt(i);
- }
-
- if (!createIfNecessary) {
- return NULL;
- }
-
- TrackInfo info;
- info.mTrackID = trackID;
- info.mFlags = 0;
- info.mDuration = 0xffffffff;
- info.mSidxDuration = 0;
- info.mMediaTimeScale = 0;
- info.mMediaHandlerType = 0;
- info.mDefaultSampleDescriptionIndex = 0;
- info.mDefaultSampleDuration = 0;
- info.mDefaultSampleSize = 0;
- info.mDefaultSampleFlags = 0;
-
- info.mDecodingTime = 0;
-
- mTracks.add(trackID, info);
- return &mTracks.editValueAt(mTracks.indexOfKey(trackID));
-}
-
-status_t FragmentedMP4Parser::parseTrackFragmentHeader(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
-
- uint32_t flags = readU32(offset);
-
- if (flags & 0xff000000) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mFlags = flags;
-
- mTrackFragmentHeaderInfo.mTrackID = readU32(offset + 4);
- offset += 8;
-
- if (flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mBaseDataOffset = readU64(offset);
- offset += 8;
- }
-
- if (flags & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mSampleDescriptionIndex = readU32(offset);
- offset += 4;
- }
-
- if (flags & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mDefaultSampleDuration = readU32(offset);
- offset += 4;
- }
-
- if (flags & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mDefaultSampleSize = readU32(offset);
- offset += 4;
- }
-
- if (flags & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mDefaultSampleFlags = readU32(offset);
- offset += 4;
- }
-
- if (!(flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent)) {
- // This should point to the position of the first byte of the
- // enclosing 'moof' container for the first track and
- // the end of the data of the preceding fragment for subsequent
- // tracks.
-
- CHECK_GE(mStack.size(), 2u);
-
- mTrackFragmentHeaderInfo.mBaseDataOffset =
- mStack.itemAt(mStack.size() - 2).mOffset;
-
- // XXX TODO: This does not do the right thing for the 2nd and
- // subsequent tracks yet.
- }
-
- mTrackFragmentHeaderInfo.mDataOffset =
- mTrackFragmentHeaderInfo.mBaseDataOffset;
-
- TrackInfo *trackInfo = editTrack(mTrackFragmentHeaderInfo.mTrackID);
-
- if (trackInfo->mFragments.empty()
- || (*trackInfo->mFragments.begin())->complete()) {
- trackInfo->mFragments.push_back(new DynamicTrackFragment);
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseTrackFragmentRun(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
-
- enum {
- kDataOffsetPresent = 0x01,
- kFirstSampleFlagsPresent = 0x04,
- kSampleDurationPresent = 0x100,
- kSampleSizePresent = 0x200,
- kSampleFlagsPresent = 0x400,
- kSampleCompositionTimeOffsetPresent = 0x800,
- };
-
- uint32_t flags = readU32(offset);
-
- if (flags & 0xff000000) {
- return -EINVAL;
- }
-
- if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) {
- // These two shall not be used together.
- return -EINVAL;
- }
-
- uint32_t sampleCount = readU32(offset + 4);
- offset += 8;
-
- uint64_t dataOffset = mTrackFragmentHeaderInfo.mDataOffset;
-
- uint32_t firstSampleFlags = 0;
-
- if (flags & kDataOffsetPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- int32_t dataOffsetDelta = (int32_t)readU32(offset);
-
- dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta;
-
- offset += 4;
- }
-
- if (flags & kFirstSampleFlagsPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- firstSampleFlags = readU32(offset);
- offset += 4;
- }
-
- TrackInfo *info = editTrack(mTrackFragmentHeaderInfo.mTrackID);
-
- if (info == NULL) {
- return -EINVAL;
- }
-
- uint32_t sampleDuration = 0, sampleSize = 0, sampleFlags = 0,
- sampleCtsOffset = 0;
-
- size_t bytesPerSample = 0;
- if (flags & kSampleDurationPresent) {
- bytesPerSample += 4;
- } else if (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
- sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
- } else {
- sampleDuration = info->mDefaultSampleDuration;
- }
-
- if (flags & kSampleSizePresent) {
- bytesPerSample += 4;
- } else if (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) {
- sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize;
- } else {
- sampleSize = info->mDefaultSampleSize;
- }
-
- if (flags & kSampleFlagsPresent) {
- bytesPerSample += 4;
- } else if (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) {
- sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags;
- } else {
- sampleFlags = info->mDefaultSampleFlags;
- }
-
- if (flags & kSampleCompositionTimeOffsetPresent) {
- bytesPerSample += 4;
- } else {
- sampleCtsOffset = 0;
- }
-
- if (offset + sampleCount * bytesPerSample > size) {
- return -EINVAL;
- }
-
- uint32_t sampleDescIndex =
- (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent)
- ? mTrackFragmentHeaderInfo.mSampleDescriptionIndex
- : info->mDefaultSampleDescriptionIndex;
-
- for (uint32_t i = 0; i < sampleCount; ++i) {
- if (flags & kSampleDurationPresent) {
- sampleDuration = readU32(offset);
- offset += 4;
- }
-
- if (flags & kSampleSizePresent) {
- sampleSize = readU32(offset);
- offset += 4;
- }
-
- if (flags & kSampleFlagsPresent) {
- sampleFlags = readU32(offset);
- offset += 4;
- }
-
- if (flags & kSampleCompositionTimeOffsetPresent) {
- sampleCtsOffset = readU32(offset);
- offset += 4;
- }
-
- ALOGV("adding sample at offset 0x%08llx, size %u, duration %u, "
- "sampleDescIndex=%u, flags 0x%08x",
- dataOffset, sampleSize, sampleDuration,
- sampleDescIndex,
- (flags & kFirstSampleFlagsPresent) && i == 0
- ? firstSampleFlags : sampleFlags);
-
- const sp<TrackFragment> &fragment = *--info->mFragments.end();
-
- uint32_t decodingTime = info->mDecodingTime;
- info->mDecodingTime += sampleDuration;
- uint32_t presentationTime = decodingTime + sampleCtsOffset;
-
- static_cast<DynamicTrackFragment *>(
- fragment.get())->addSample(
- dataOffset,
- sampleSize,
- presentationTime,
- sampleDescIndex,
- ((flags & kFirstSampleFlagsPresent) && i == 0)
- ? firstSampleFlags : sampleFlags);
-
- dataOffset += sampleSize;
- }
-
- mTrackFragmentHeaderInfo.mDataOffset = dataOffset;
-
- return OK;
-}
-
-void FragmentedMP4Parser::copyBuffer(
- sp<ABuffer> *dst, size_t offset, uint64_t size) const {
- sp<ABuffer> buf = new ABuffer(size);
- memcpy(buf->data(), mBuffer->data() + offset, size);
-
- *dst = buf;
-}
-
-} // namespace android
diff --git a/media/libstagefright/mp4/TrackFragment.cpp b/media/libstagefright/mp4/TrackFragment.cpp
deleted file mode 100644
index 3699038..0000000
--- a/media/libstagefright/mp4/TrackFragment.cpp
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "TrackFragment"
-#include <utils/Log.h>
-
-#include "TrackFragment.h"
-
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-
-namespace android {
-
-FragmentedMP4Parser::DynamicTrackFragment::DynamicTrackFragment()
- : mComplete(false),
- mSampleIndex(0) {
-}
-
-FragmentedMP4Parser::DynamicTrackFragment::~DynamicTrackFragment() {
-}
-
-status_t FragmentedMP4Parser::DynamicTrackFragment::getSample(SampleInfo *info) {
- if (mSampleIndex >= mSamples.size()) {
- return mComplete ? ERROR_END_OF_STREAM : -EWOULDBLOCK;
- }
-
- *info = mSamples.itemAt(mSampleIndex);
-
- return OK;
-}
-
-void FragmentedMP4Parser::DynamicTrackFragment::advance() {
- ++mSampleIndex;
-}
-
-void FragmentedMP4Parser::DynamicTrackFragment::addSample(
- off64_t dataOffset, size_t sampleSize,
- uint32_t presentationTime,
- size_t sampleDescIndex,
- uint32_t flags) {
- mSamples.push();
- SampleInfo *sampleInfo = &mSamples.editItemAt(mSamples.size() - 1);
-
- sampleInfo->mOffset = dataOffset;
- sampleInfo->mSize = sampleSize;
- sampleInfo->mPresentationTime = presentationTime;
- sampleInfo->mSampleDescIndex = sampleDescIndex;
- sampleInfo->mFlags = flags;
-}
-
-status_t FragmentedMP4Parser::DynamicTrackFragment::signalCompletion() {
- mComplete = true;
-
- return OK;
-}
-
-bool FragmentedMP4Parser::DynamicTrackFragment::complete() const {
- return mComplete;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-FragmentedMP4Parser::StaticTrackFragment::StaticTrackFragment()
- : mSampleIndex(0),
- mSampleCount(0),
- mChunkIndex(0),
- mSampleToChunkIndex(-1),
- mSampleToChunkRemaining(0),
- mPrevChunkIndex(0xffffffff),
- mNextSampleOffset(0) {
-}
-
-FragmentedMP4Parser::StaticTrackFragment::~StaticTrackFragment() {
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::getSample(SampleInfo *info) {
- if (mSampleIndex >= mSampleCount) {
- return ERROR_END_OF_STREAM;
- }
-
- *info = mSampleInfo;
-
- ALOGV("returning sample %d at [0x%08llx, 0x%08llx)",
- mSampleIndex,
- info->mOffset, info->mOffset + info->mSize);
-
- return OK;
-}
-
-void FragmentedMP4Parser::StaticTrackFragment::updateSampleInfo() {
- if (mSampleIndex >= mSampleCount) {
- return;
- }
-
- if (mSampleSizes != NULL) {
- uint32_t defaultSampleSize = U32_AT(mSampleSizes->data() + 4);
- if (defaultSampleSize > 0) {
- mSampleInfo.mSize = defaultSampleSize;
- } else {
- mSampleInfo.mSize= U32_AT(mSampleSizes->data() + 12 + 4 * mSampleIndex);
- }
- } else {
- CHECK(mCompactSampleSizes != NULL);
-
- uint32_t fieldSize = U32_AT(mCompactSampleSizes->data() + 4);
-
- switch (fieldSize) {
- case 4:
- {
- unsigned byte = mCompactSampleSizes->data()[12 + mSampleIndex / 2];
- mSampleInfo.mSize = (mSampleIndex & 1) ? byte & 0x0f : byte >> 4;
- break;
- }
-
- case 8:
- {
- mSampleInfo.mSize = mCompactSampleSizes->data()[12 + mSampleIndex];
- break;
- }
-
- default:
- {
- CHECK_EQ(fieldSize, 16);
- mSampleInfo.mSize =
- U16_AT(mCompactSampleSizes->data() + 12 + mSampleIndex * 2);
- break;
- }
- }
- }
-
- CHECK_GT(mSampleToChunkRemaining, 0);
-
- // The sample desc index is 1-based... XXX
- mSampleInfo.mSampleDescIndex =
- U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 8);
-
- if (mChunkIndex != mPrevChunkIndex) {
- mPrevChunkIndex = mChunkIndex;
-
- if (mChunkOffsets != NULL) {
- uint32_t entryCount = U32_AT(mChunkOffsets->data() + 4);
-
- if (mChunkIndex >= entryCount) {
- mSampleIndex = mSampleCount;
- return;
- }
-
- mNextSampleOffset =
- U32_AT(mChunkOffsets->data() + 8 + 4 * mChunkIndex);
- } else {
- CHECK(mChunkOffsets64 != NULL);
-
- uint32_t entryCount = U32_AT(mChunkOffsets64->data() + 4);
-
- if (mChunkIndex >= entryCount) {
- mSampleIndex = mSampleCount;
- return;
- }
-
- mNextSampleOffset =
- U64_AT(mChunkOffsets64->data() + 8 + 8 * mChunkIndex);
- }
- }
-
- mSampleInfo.mOffset = mNextSampleOffset;
-
- mSampleInfo.mPresentationTime = 0;
- mSampleInfo.mFlags = 0;
-}
-
-void FragmentedMP4Parser::StaticTrackFragment::advance() {
- mNextSampleOffset += mSampleInfo.mSize;
-
- ++mSampleIndex;
- if (--mSampleToChunkRemaining == 0) {
- ++mChunkIndex;
-
- uint32_t entryCount = U32_AT(mSampleToChunk->data() + 4);
-
- // If this is the last entry in the sample to chunk table, we will
- // stay on this entry.
- if ((uint32_t)(mSampleToChunkIndex + 1) < entryCount) {
- uint32_t nextChunkIndex =
- U32_AT(mSampleToChunk->data() + 8 + 12 * (mSampleToChunkIndex + 1));
-
- CHECK_GE(nextChunkIndex, 1u);
- --nextChunkIndex;
-
- if (mChunkIndex >= nextChunkIndex) {
- CHECK_EQ(mChunkIndex, nextChunkIndex);
- ++mSampleToChunkIndex;
- }
- }
-
- mSampleToChunkRemaining =
- U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 4);
- }
-
- updateSampleInfo();
-}
-
-static void setU32At(uint8_t *ptr, uint32_t x) {
- ptr[0] = x >> 24;
- ptr[1] = (x >> 16) & 0xff;
- ptr[2] = (x >> 8) & 0xff;
- ptr[3] = x & 0xff;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::signalCompletion() {
- mSampleToChunkIndex = 0;
-
- mSampleToChunkRemaining =
- (mSampleToChunk == NULL)
- ? 0
- : U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 4);
-
- updateSampleInfo();
-
- return OK;
-}
-
-bool FragmentedMP4Parser::StaticTrackFragment::complete() const {
- return true;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 12 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t sampleSize = parser->readU32(offset + 4);
- uint32_t sampleCount = parser->readU32(offset + 8);
-
- if (sampleSize == 0 && offset + 12 + sampleCount * 4 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mSampleSizes, offset, size);
-
- mSampleCount = sampleCount;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseCompactSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 12 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t fieldSize = parser->readU32(offset + 4);
-
- if (fieldSize != 4 && fieldSize != 8 && fieldSize != 16) {
- return ERROR_MALFORMED;
- }
-
- uint32_t sampleCount = parser->readU32(offset + 8);
-
- if (offset + 12 + (sampleCount * fieldSize + 4) / 8 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mCompactSampleSizes, offset, size);
-
- mSampleCount = sampleCount;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseSampleToChunk(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t entryCount = parser->readU32(offset + 4);
-
- if (entryCount == 0) {
- return OK;
- }
-
- if (offset + 8 + entryCount * 12 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mSampleToChunk, offset, size);
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseChunkOffsets(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t entryCount = parser->readU32(offset + 4);
-
- if (offset + 8 + entryCount * 4 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mChunkOffsets, offset, size);
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseChunkOffsets64(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t entryCount = parser->readU32(offset + 4);
-
- if (offset + 8 + entryCount * 8 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mChunkOffsets64, offset, size);
-
- return OK;
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/mp4/TrackFragment.h b/media/libstagefright/mp4/TrackFragment.h
deleted file mode 100644
index e1ad46e..0000000
--- a/media/libstagefright/mp4/TrackFragment.h
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TRACK_FRAGMENT_H_
-
-#define TRACK_FRAGMENT_H_
-
-#include "include/FragmentedMP4Parser.h"
-
-namespace android {
-
-struct FragmentedMP4Parser::TrackFragment : public RefBase {
- TrackFragment() {}
-
- virtual status_t getSample(SampleInfo *info) = 0;
- virtual void advance() = 0;
-
- virtual status_t signalCompletion() = 0;
- virtual bool complete() const = 0;
-
-protected:
- virtual ~TrackFragment() {}
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(TrackFragment);
-};
-
-struct FragmentedMP4Parser::DynamicTrackFragment : public FragmentedMP4Parser::TrackFragment {
- DynamicTrackFragment();
-
- virtual status_t getSample(SampleInfo *info);
- virtual void advance();
-
- void addSample(
- off64_t dataOffset, size_t sampleSize,
- uint32_t presentationTime,
- size_t sampleDescIndex,
- uint32_t flags);
-
- // No more samples will be added to this fragment.
- virtual status_t signalCompletion();
-
- virtual bool complete() const;
-
-protected:
- virtual ~DynamicTrackFragment();
-
-private:
- bool mComplete;
- size_t mSampleIndex;
- Vector<SampleInfo> mSamples;
-
- DISALLOW_EVIL_CONSTRUCTORS(DynamicTrackFragment);
-};
-
-struct FragmentedMP4Parser::StaticTrackFragment : public FragmentedMP4Parser::TrackFragment {
- StaticTrackFragment();
-
- virtual status_t getSample(SampleInfo *info);
- virtual void advance();
-
- virtual status_t signalCompletion();
- virtual bool complete() const;
-
- status_t parseSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseCompactSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSampleToChunk(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets64(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
-protected:
- virtual ~StaticTrackFragment();
-
-private:
- size_t mSampleIndex;
- size_t mSampleCount;
- uint32_t mChunkIndex;
-
- SampleInfo mSampleInfo;
-
- sp<ABuffer> mSampleSizes;
- sp<ABuffer> mCompactSampleSizes;
-
- sp<ABuffer> mSampleToChunk;
- ssize_t mSampleToChunkIndex;
- size_t mSampleToChunkRemaining;
-
- sp<ABuffer> mChunkOffsets;
- sp<ABuffer> mChunkOffsets64;
- uint32_t mPrevChunkIndex;
- uint64_t mNextSampleOffset;
-
- void updateSampleInfo();
-
- DISALLOW_EVIL_CONSTRUCTORS(StaticTrackFragment);
-};
-
-} // namespace android
-
-#endif // TRACK_FRAGMENT_H_
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 175a263..482ccff 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -36,6 +36,8 @@
#include <media/IStreamSource.h>
#include <utils/KeyedVector.h>
+#include <inttypes.h>
+
namespace android {
// I want the expression "y" evaluated even if verbose logging is off.
@@ -61,6 +63,7 @@ struct ATSParser::Program : public RefBase {
void signalEOS(status_t finalResult);
sp<MediaSource> getSource(SourceType type);
+ bool hasSource(SourceType type) const;
int64_t convertPTSToTimestamp(uint64_t PTS);
@@ -117,6 +120,9 @@ struct ATSParser::Stream : public RefBase {
sp<MediaSource> getSource(SourceType type);
+ bool isAudio() const;
+ bool isVideo() const;
+
protected:
virtual ~Stream();
@@ -144,9 +150,6 @@ private:
void extractAACFrames(const sp<ABuffer> &buffer);
- bool isAudio() const;
- bool isVideo() const;
-
DISALLOW_EVIL_CONSTRUCTORS(Stream);
};
@@ -242,11 +245,16 @@ struct StreamInfo {
status_t ATSParser::Program::parseProgramMap(ABitReader *br) {
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
- CHECK_EQ(table_id, 0x02u);
-
+ if (table_id != 0x02u) {
+ ALOGE("PMT data error!");
+ return ERROR_MALFORMED;
+ }
unsigned section_syntax_indicator = br->getBits(1);
ALOGV(" section_syntax_indicator = %u", section_syntax_indicator);
- CHECK_EQ(section_syntax_indicator, 1u);
+ if (section_syntax_indicator != 1u) {
+ ALOGE("PMT data error!");
+ return ERROR_MALFORMED;
+ }
CHECK_EQ(br->getBits(1), 0u);
MY_LOGV(" reserved = %u", br->getBits(2));
@@ -433,6 +441,19 @@ sp<MediaSource> ATSParser::Program::getSource(SourceType type) {
return NULL;
}
+bool ATSParser::Program::hasSource(SourceType type) const {
+ for (size_t i = 0; i < mStreams.size(); ++i) {
+ const sp<Stream> &stream = mStreams.valueAt(i);
+ if (type == AUDIO && stream->isAudio()) {
+ return true;
+ } else if (type == VIDEO && stream->isVideo()) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) {
if (!(mParser->mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)) {
if (!mFirstPTSValid) {
@@ -501,9 +522,10 @@ ATSParser::Stream::Stream(
ElementaryStreamQueue::MPEG4_VIDEO);
break;
- case STREAMTYPE_PCM_AUDIO:
+ case STREAMTYPE_LPCM_AC3:
+ case STREAMTYPE_AC3:
mQueue = new ElementaryStreamQueue(
- ElementaryStreamQueue::PCM_AUDIO);
+ ElementaryStreamQueue::AC3);
break;
default:
@@ -548,7 +570,9 @@ status_t ATSParser::Stream::parse(
}
#endif
- return OK;
+ if (!payload_unit_start_indicator) {
+ return OK;
+ }
}
mExpectedContinuityCounter = (continuity_counter + 1) & 0x0f;
@@ -581,7 +605,7 @@ status_t ATSParser::Stream::parse(
// Increment in multiples of 64K.
neededSize = (neededSize + 65535) & ~65535;
- ALOGI("resizing buffer to %d bytes", neededSize);
+ ALOGI("resizing buffer to %zu bytes", neededSize);
sp<ABuffer> newBuffer = new ABuffer(neededSize);
memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size());
@@ -613,7 +637,8 @@ bool ATSParser::Stream::isAudio() const {
case STREAMTYPE_MPEG1_AUDIO:
case STREAMTYPE_MPEG2_AUDIO:
case STREAMTYPE_MPEG2_AUDIO_ADTS:
- case STREAMTYPE_PCM_AUDIO:
+ case STREAMTYPE_LPCM_AC3:
+ case STREAMTYPE_AC3:
return true;
default:
@@ -654,12 +679,12 @@ void ATSParser::Stream::signalDiscontinuity(
int64_t resumeAtMediaTimeUs =
mProgram->convertPTSToTimestamp(resumeAtPTS);
- extra->setInt64("resume-at-mediatimeUs", resumeAtMediaTimeUs);
+ extra->setInt64("resume-at-mediaTimeUs", resumeAtMediaTimeUs);
}
}
if (mSource != NULL) {
- mSource->queueDiscontinuity(type, extra);
+ mSource->queueDiscontinuity(type, extra, true);
}
}
@@ -733,8 +758,10 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) {
CHECK_GE(optional_bytes_remaining, 5u);
- CHECK_EQ(br->getBits(4), PTS_DTS_flags);
-
+ if (br->getBits(4) != PTS_DTS_flags) {
+ ALOGE("PES data Error!");
+ return ERROR_MALFORMED;
+ }
PTS = ((uint64_t)br->getBits(3)) << 30;
CHECK_EQ(br->getBits(1), 1u);
PTS |= ((uint64_t)br->getBits(15)) << 15;
@@ -742,7 +769,7 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
PTS |= br->getBits(15);
CHECK_EQ(br->getBits(1), 1u);
- ALOGV("PTS = 0x%016llx (%.2f)", PTS, PTS / 90000.0);
+ ALOGV("PTS = 0x%016" PRIx64 " (%.2f)", PTS, PTS / 90000.0);
optional_bytes_remaining -= 5;
@@ -758,7 +785,7 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
DTS |= br->getBits(15);
CHECK_EQ(br->getBits(1), 1u);
- ALOGV("DTS = %llu", DTS);
+ ALOGV("DTS = %" PRIu64, DTS);
optional_bytes_remaining -= 5;
}
@@ -776,7 +803,7 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
ESCR |= br->getBits(15);
CHECK_EQ(br->getBits(1), 1u);
- ALOGV("ESCR = %llu", ESCR);
+ ALOGV("ESCR = %" PRIu64, ESCR);
MY_LOGV("ESCR_extension = %u", br->getBits(9));
CHECK_EQ(br->getBits(1), 1u);
@@ -806,7 +833,7 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
if (br->numBitsLeft() < dataLength * 8) {
ALOGE("PES packet does not carry enough data to contain "
- "payload. (numBitsLeft = %d, required = %d)",
+ "payload. (numBitsLeft = %zu, required = %u)",
br->numBitsLeft(), dataLength * 8);
return ERROR_MALFORMED;
@@ -826,7 +853,7 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {
size_t payloadSizeBits = br->numBitsLeft();
CHECK_EQ(payloadSizeBits % 8, 0u);
- ALOGV("There's %d bytes of payload.", payloadSizeBits / 8);
+ ALOGV("There's %zu bytes of payload.", payloadSizeBits / 8);
}
} else if (stream_id == 0xbe) { // padding_stream
CHECK_NE(PES_packet_length, 0u);
@@ -844,7 +871,7 @@ status_t ATSParser::Stream::flush() {
return OK;
}
- ALOGV("flushing stream 0x%04x size = %d", mElementaryPID, mBuffer->size());
+ ALOGV("flushing stream 0x%04x size = %zu", mElementaryPID, mBuffer->size());
ABitReader br(mBuffer->data(), mBuffer->size());
@@ -856,7 +883,7 @@ status_t ATSParser::Stream::flush() {
}
void ATSParser::Stream::onPayloadData(
- unsigned PTS_DTS_flags, uint64_t PTS, uint64_t DTS,
+ unsigned PTS_DTS_flags, uint64_t PTS, uint64_t /* DTS */,
const uint8_t *data, size_t size) {
#if 0
ALOGI("payload streamType 0x%02x, PTS = 0x%016llx, dPTS = %lld",
@@ -888,6 +915,12 @@ void ATSParser::Stream::onPayloadData(
ALOGV("Stream PID 0x%08x of type 0x%02x now has data.",
mElementaryPID, mStreamType);
+ const char *mime;
+ if (meta->findCString(kKeyMIMEType, &mime)
+ && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
+ && !IsIDR(accessUnit)) {
+ continue;
+ }
mSource = new AnotherPacketSource(meta);
mSource->queueAccessUnit(accessUnit);
}
@@ -991,8 +1024,10 @@ void ATSParser::signalEOS(status_t finalResult) {
void ATSParser::parseProgramAssociationTable(ABitReader *br) {
unsigned table_id = br->getBits(8);
ALOGV(" table_id = %u", table_id);
- CHECK_EQ(table_id, 0x00u);
-
+ if (table_id != 0x00u) {
+ ALOGE("PAT data error!");
+ return ;
+ }
unsigned section_syntax_indictor = br->getBits(1);
ALOGV(" section_syntax_indictor = %u", section_syntax_indictor);
CHECK_EQ(section_syntax_indictor, 1u);
@@ -1062,7 +1097,9 @@ status_t ATSParser::parsePID(
sp<PSISection> section = mPSISections.valueAt(sectionIndex);
if (payload_unit_start_indicator) {
- CHECK(section->isEmpty());
+ if (!section->isEmpty()) {
+ return ERROR_UNSUPPORTED;
+ }
unsigned skip = br->getBits(8);
br->skipBits(skip * 8);
@@ -1166,7 +1203,7 @@ void ATSParser::parseAdaptationField(ABitReader *br, unsigned PID) {
uint64_t PCR = PCR_base * 300 + PCR_ext;
- ALOGV("PID 0x%04x: PCR = 0x%016llx (%.2f)",
+ ALOGV("PID 0x%04x: PCR = 0x%016" PRIx64 " (%.2f)",
PID, PCR, PCR / 27E6);
// The number of bytes received by this parser up to and
@@ -1191,7 +1228,10 @@ status_t ATSParser::parseTS(ABitReader *br) {
ALOGV("---");
unsigned sync_byte = br->getBits(8);
- CHECK_EQ(sync_byte, 0x47u);
+ if (sync_byte != 0x47u) {
+ ALOGE("[error] parseTS: return error as sync_byte=0x%x", sync_byte);
+ return BAD_VALUE;
+ }
if (br->getBits(1)) { // transport_error_indicator
// silently ignore.
@@ -1252,6 +1292,17 @@ sp<MediaSource> ATSParser::getSource(SourceType type) {
return NULL;
}
+bool ATSParser::hasSource(SourceType type) const {
+ for (size_t i = 0; i < mPrograms.size(); ++i) {
+ const sp<Program> &program = mPrograms.itemAt(i);
+ if (program->hasSource(type)) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
bool ATSParser::PTSTimeDeltaEstablished() {
if (mPrograms.isEmpty()) {
return false;
@@ -1261,8 +1312,8 @@ bool ATSParser::PTSTimeDeltaEstablished() {
}
void ATSParser::updatePCR(
- unsigned PID, uint64_t PCR, size_t byteOffsetFromStart) {
- ALOGV("PCR 0x%016llx @ %d", PCR, byteOffsetFromStart);
+ unsigned /* PID */, uint64_t PCR, size_t byteOffsetFromStart) {
+ ALOGV("PCR 0x%016" PRIx64 " @ %zu", PCR, byteOffsetFromStart);
if (mNumPCRs == 2) {
mPCR[0] = mPCR[1];
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index a10edc9..75d76dc 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -28,7 +28,7 @@
namespace android {
-struct ABitReader;
+class ABitReader;
struct ABuffer;
struct MediaSource;
@@ -41,8 +41,6 @@ struct ATSParser : public RefBase {
DISCONTINUITY_ABSOLUTE_TIME = 8,
DISCONTINUITY_TIME_OFFSET = 16,
- DISCONTINUITY_SEEK = DISCONTINUITY_TIME,
-
// For legacy reasons this also implies a time discontinuity.
DISCONTINUITY_FORMATCHANGE =
DISCONTINUITY_AUDIO_FORMAT
@@ -71,10 +69,12 @@ struct ATSParser : public RefBase {
void signalEOS(status_t finalResult);
enum SourceType {
- VIDEO,
- AUDIO
+ VIDEO = 0,
+ AUDIO = 1,
+ NUM_SOURCE_TYPES = 2
};
sp<MediaSource> getSource(SourceType type);
+ bool hasSource(SourceType type) const;
bool PTSTimeDeltaEstablished();
@@ -88,7 +88,13 @@ struct ATSParser : public RefBase {
STREAMTYPE_MPEG2_AUDIO_ADTS = 0x0f,
STREAMTYPE_MPEG4_VIDEO = 0x10,
STREAMTYPE_H264 = 0x1b,
- STREAMTYPE_PCM_AUDIO = 0x83,
+
+ // From ATSC A/53 Part 3:2009, 6.7.1
+ STREAMTYPE_AC3 = 0x81,
+
+ // Stream type 0x83 is non-standard,
+ // it could be LPCM or TrueHD AC3
+ STREAMTYPE_LPCM_AC3 = 0x83,
};
protected:
diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk
index c1a7a9d..c17a0b7 100644
--- a/media/libstagefright/mpeg2ts/Android.mk
+++ b/media/libstagefright/mpeg2ts/Android.mk
@@ -13,6 +13,8 @@ LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE:= libstagefright_mpeg2ts
ifeq ($(TARGET_ARCH),arm)
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 3153c8b..f266fe7 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -14,6 +14,9 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AnotherPacketSource"
+
#include "AnotherPacketSource.h"
#include <media/stagefright/foundation/ABuffer.h>
@@ -26,15 +29,21 @@
#include <media/stagefright/MetaData.h>
#include <utils/Vector.h>
+#include <inttypes.h>
+
namespace android {
const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs
AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
: mIsAudio(false),
+ mIsVideo(false),
mFormat(NULL),
mLastQueuedTimeUs(0),
- mEOSResult(OK) {
+ mEOSResult(OK),
+ mLatestEnqueuedMeta(NULL),
+ mLatestDequeuedMeta(NULL),
+ mQueuedDiscontinuityCount(0) {
setFormat(meta);
}
@@ -42,6 +51,7 @@ void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
CHECK(mFormat == NULL);
mIsAudio = false;
+ mIsVideo = false;
if (meta == NULL) {
return;
@@ -53,15 +63,17 @@ void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
if (!strncasecmp("audio/", mime, 6)) {
mIsAudio = true;
+ } else if (!strncasecmp("video/", mime, 6)) {
+ mIsVideo = true;
} else {
- CHECK(!strncasecmp("video/", mime, 6));
+ CHECK(!strncasecmp("text/", mime, 5));
}
}
AnotherPacketSource::~AnotherPacketSource() {
}
-status_t AnotherPacketSource::start(MetaData *params) {
+status_t AnotherPacketSource::start(MetaData * /* params */) {
return OK;
}
@@ -70,7 +82,27 @@ status_t AnotherPacketSource::stop() {
}
sp<MetaData> AnotherPacketSource::getFormat() {
- return mFormat;
+ Mutex::Autolock autoLock(mLock);
+ if (mFormat != NULL) {
+ return mFormat;
+ }
+
+ List<sp<ABuffer> >::iterator it = mBuffers.begin();
+ while (it != mBuffers.end()) {
+ sp<ABuffer> buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ break;
+ }
+
+ sp<RefBase> object;
+ if (buffer->meta()->findObject("format", &object)) {
+ return mFormat = static_cast<MetaData*>(object.get());
+ }
+
+ ++it;
+ }
+ return NULL;
}
status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
@@ -91,9 +123,17 @@ status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {
mFormat.clear();
}
+ --mQueuedDiscontinuityCount;
return INFO_DISCONTINUITY;
}
+ mLatestDequeuedMeta = (*buffer)->meta()->dup();
+
+ sp<RefBase> object;
+ if ((*buffer)->meta()->findObject("format", &object)) {
+ mFormat = static_cast<MetaData*>(object.get());
+ }
+
return OK;
}
@@ -110,8 +150,10 @@ status_t AnotherPacketSource::read(
}
if (!mBuffers.empty()) {
+
const sp<ABuffer> buffer = *mBuffers.begin();
mBuffers.erase(mBuffers.begin());
+ mLatestDequeuedMeta = buffer->meta()->dup();
int32_t discontinuity;
if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
@@ -120,17 +162,22 @@ status_t AnotherPacketSource::read(
}
return INFO_DISCONTINUITY;
- } else {
- int64_t timeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ }
- MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
+ sp<RefBase> object;
+ if (buffer->meta()->findObject("format", &object)) {
+ mFormat = static_cast<MetaData*>(object.get());
+ }
- mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
+ int64_t timeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- *out = mediaBuffer;
- return OK;
- }
+ MediaBuffer *mediaBuffer = new MediaBuffer(buffer);
+
+ mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs);
+
+ *out = mediaBuffer;
+ return OK;
}
return mEOSResult;
@@ -142,7 +189,11 @@ bool AnotherPacketSource::wasFormatChange(
return (discontinuityType & ATSParser::DISCONTINUITY_AUDIO_FORMAT) != 0;
}
- return (discontinuityType & ATSParser::DISCONTINUITY_VIDEO_FORMAT) != 0;
+ if (mIsVideo) {
+ return (discontinuityType & ATSParser::DISCONTINUITY_VIDEO_FORMAT) != 0;
+ }
+
+ return false;
}
void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
@@ -152,12 +203,36 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
return;
}
- CHECK(buffer->meta()->findInt64("timeUs", &mLastQueuedTimeUs));
- ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
+ int64_t lastQueuedTimeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
+ mLastQueuedTimeUs = lastQueuedTimeUs;
+ ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
Mutex::Autolock autoLock(mLock);
mBuffers.push_back(buffer);
mCondition.signal();
+
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ ++mQueuedDiscontinuityCount;
+ }
+
+ if (mLatestEnqueuedMeta == NULL) {
+ mLatestEnqueuedMeta = buffer->meta()->dup();
+ } else {
+ int64_t latestTimeUs = 0;
+ int64_t frameDeltaUs = 0;
+ CHECK(mLatestEnqueuedMeta->findInt64("timeUs", &latestTimeUs));
+ if (lastQueuedTimeUs > latestTimeUs) {
+ mLatestEnqueuedMeta = buffer->meta()->dup();
+ frameDeltaUs = lastQueuedTimeUs - latestTimeUs;
+ mLatestEnqueuedMeta->setInt64("durationUs", frameDeltaUs);
+ } else if (!mLatestEnqueuedMeta->findInt64("durationUs", &frameDeltaUs)) {
+ // For B frames
+ frameDeltaUs = latestTimeUs - lastQueuedTimeUs;
+ mLatestEnqueuedMeta->setInt64("durationUs", frameDeltaUs);
+ }
+ }
}
void AnotherPacketSource::clear() {
@@ -165,33 +240,44 @@ void AnotherPacketSource::clear() {
mBuffers.clear();
mEOSResult = OK;
+ mQueuedDiscontinuityCount = 0;
mFormat = NULL;
+ mLatestEnqueuedMeta = NULL;
}
void AnotherPacketSource::queueDiscontinuity(
ATSParser::DiscontinuityType type,
- const sp<AMessage> &extra) {
+ const sp<AMessage> &extra,
+ bool discard) {
Mutex::Autolock autoLock(mLock);
- // Leave only discontinuities in the queue.
- List<sp<ABuffer> >::iterator it = mBuffers.begin();
- while (it != mBuffers.end()) {
- sp<ABuffer> oldBuffer = *it;
+ if (discard) {
+ // Leave only discontinuities in the queue.
+ List<sp<ABuffer> >::iterator it = mBuffers.begin();
+ while (it != mBuffers.end()) {
+ sp<ABuffer> oldBuffer = *it;
+
+ int32_t oldDiscontinuityType;
+ if (!oldBuffer->meta()->findInt32(
+ "discontinuity", &oldDiscontinuityType)) {
+ it = mBuffers.erase(it);
+ continue;
+ }
- int32_t oldDiscontinuityType;
- if (!oldBuffer->meta()->findInt32(
- "discontinuity", &oldDiscontinuityType)) {
- it = mBuffers.erase(it);
- continue;
+ ++it;
}
-
- ++it;
}
mEOSResult = OK;
mLastQueuedTimeUs = 0;
+ mLatestEnqueuedMeta = NULL;
+ if (type == ATSParser::DISCONTINUITY_NONE) {
+ return;
+ }
+
+ ++mQueuedDiscontinuityCount;
sp<ABuffer> buffer = new ABuffer(0);
buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));
buffer->meta()->setMessage("extra", extra);
@@ -220,7 +306,10 @@ bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {
int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) {
Mutex::Autolock autoLock(mLock);
+ return getBufferedDurationUs_l(finalResult);
+}
+int64_t AnotherPacketSource::getBufferedDurationUs_l(status_t *finalResult) {
*finalResult = mEOSResult;
if (mBuffers.empty()) {
@@ -229,6 +318,7 @@ int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) {
int64_t time1 = -1;
int64_t time2 = -1;
+ int64_t durationUs = 0;
List<sp<ABuffer> >::iterator it = mBuffers.begin();
while (it != mBuffers.end()) {
@@ -236,20 +326,64 @@ int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) {
int64_t timeUs;
if (buffer->meta()->findInt64("timeUs", &timeUs)) {
- if (time1 < 0) {
+ if (time1 < 0 || timeUs < time1) {
time1 = timeUs;
}
- time2 = timeUs;
+ if (time2 < 0 || timeUs > time2) {
+ time2 = timeUs;
+ }
} else {
// This is a discontinuity, reset everything.
+ durationUs += time2 - time1;
time1 = time2 = -1;
}
++it;
}
- return time2 - time1;
+ return durationUs + (time2 - time1);
+}
+
+// A cheaper but less precise version of getBufferedDurationUs that we would like to use in
+// LiveSession::dequeueAccessUnit to trigger downwards adaptation.
+int64_t AnotherPacketSource::getEstimatedDurationUs() {
+ Mutex::Autolock autoLock(mLock);
+ if (mBuffers.empty()) {
+ return 0;
+ }
+
+ if (mQueuedDiscontinuityCount > 0) {
+ status_t finalResult;
+ return getBufferedDurationUs_l(&finalResult);
+ }
+
+ List<sp<ABuffer> >::iterator it = mBuffers.begin();
+ sp<ABuffer> buffer = *it;
+
+ int64_t startTimeUs;
+ buffer->meta()->findInt64("timeUs", &startTimeUs);
+ if (startTimeUs < 0) {
+ return 0;
+ }
+
+ it = mBuffers.end();
+ --it;
+ buffer = *it;
+
+ int64_t endTimeUs;
+ buffer->meta()->findInt64("timeUs", &endTimeUs);
+ if (endTimeUs < 0) {
+ return 0;
+ }
+
+ int64_t diffUs;
+ if (endTimeUs > startTimeUs) {
+ diffUs = endTimeUs - startTimeUs;
+ } else {
+ diffUs = startTimeUs - endTimeUs;
+ }
+ return diffUs;
}
status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) {
@@ -278,4 +412,14 @@ bool AnotherPacketSource::isFinished(int64_t duration) const {
return (mEOSResult != OK);
}
+sp<AMessage> AnotherPacketSource::getLatestEnqueuedMeta() {
+ Mutex::Autolock autoLock(mLock);
+ return mLatestEnqueuedMeta;
+}
+
+sp<AMessage> AnotherPacketSource::getLatestDequeuedMeta() {
+ Mutex::Autolock autoLock(mLock);
+ return mLatestDequeuedMeta;
+}
+
} // namespace android
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index e16cf78..809a858 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -49,12 +49,16 @@ struct AnotherPacketSource : public MediaSource {
// presentation timestamps since the last discontinuity (if any).
int64_t getBufferedDurationUs(status_t *finalResult);
+ int64_t getEstimatedDurationUs();
+
status_t nextBufferTime(int64_t *timeUs);
void queueAccessUnit(const sp<ABuffer> &buffer);
void queueDiscontinuity(
- ATSParser::DiscontinuityType type, const sp<AMessage> &extra);
+ ATSParser::DiscontinuityType type,
+ const sp<AMessage> &extra,
+ bool discard);
void signalEOS(status_t result);
@@ -62,6 +66,9 @@ struct AnotherPacketSource : public MediaSource {
bool isFinished(int64_t duration) const;
+ sp<AMessage> getLatestEnqueuedMeta();
+ sp<AMessage> getLatestDequeuedMeta();
+
protected:
virtual ~AnotherPacketSource();
@@ -70,12 +77,18 @@ private:
Condition mCondition;
bool mIsAudio;
+ bool mIsVideo;
sp<MetaData> mFormat;
int64_t mLastQueuedTimeUs;
List<sp<ABuffer> > mBuffers;
status_t mEOSResult;
+ sp<AMessage> mLatestEnqueuedMeta;
+ sp<AMessage> mLatestDequeuedMeta;
+
+ size_t mQueuedDiscontinuityCount;
bool wasFormatChange(int32_t discontinuityType) const;
+ int64_t getBufferedDurationUs_l(status_t *finalResult);
DISALLOW_EVIL_CONSTRUCTORS(AnotherPacketSource);
};
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index e0ff0d1..f257289 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -31,6 +31,7 @@
#include "include/avc_utils.h"
+#include <inttypes.h>
#include <netinet/in.h>
namespace android {
@@ -56,8 +57,122 @@ void ElementaryStreamQueue::clear(bool clearFormat) {
}
}
-static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) {
- if (size < 3) {
+// Parse AC3 header assuming the current ptr is start position of syncframe,
+// update metadata only applicable, and return the payload size
+static unsigned parseAC3SyncFrame(
+ const uint8_t *ptr, size_t size, sp<MetaData> *metaData) {
+ static const unsigned channelCountTable[] = {2, 1, 2, 3, 3, 4, 4, 5};
+ static const unsigned samplingRateTable[] = {48000, 44100, 32000};
+
+ static const unsigned frameSizeTable[19][3] = {
+ { 64, 69, 96 },
+ { 80, 87, 120 },
+ { 96, 104, 144 },
+ { 112, 121, 168 },
+ { 128, 139, 192 },
+ { 160, 174, 240 },
+ { 192, 208, 288 },
+ { 224, 243, 336 },
+ { 256, 278, 384 },
+ { 320, 348, 480 },
+ { 384, 417, 576 },
+ { 448, 487, 672 },
+ { 512, 557, 768 },
+ { 640, 696, 960 },
+ { 768, 835, 1152 },
+ { 896, 975, 1344 },
+ { 1024, 1114, 1536 },
+ { 1152, 1253, 1728 },
+ { 1280, 1393, 1920 },
+ };
+
+ ABitReader bits(ptr, size);
+ if (bits.numBitsLeft() < 16) {
+ return 0;
+ }
+ if (bits.getBits(16) != 0x0B77) {
+ return 0;
+ }
+
+ if (bits.numBitsLeft() < 16 + 2 + 6 + 5 + 3 + 3) {
+ ALOGV("Not enough bits left for further parsing");
+ return 0;
+ }
+ bits.skipBits(16); // crc1
+
+ unsigned fscod = bits.getBits(2);
+ if (fscod == 3) {
+ ALOGW("Incorrect fscod in AC3 header");
+ return 0;
+ }
+
+ unsigned frmsizecod = bits.getBits(6);
+ if (frmsizecod > 37) {
+ ALOGW("Incorrect frmsizecod in AC3 header");
+ return 0;
+ }
+
+ unsigned bsid = bits.getBits(5);
+ if (bsid > 8) {
+ ALOGW("Incorrect bsid in AC3 header. Possibly E-AC-3?");
+ return 0;
+ }
+
+ unsigned bsmod __unused = bits.getBits(3);
+ unsigned acmod = bits.getBits(3);
+ unsigned cmixlev __unused = 0;
+ unsigned surmixlev __unused = 0;
+ unsigned dsurmod __unused = 0;
+
+ if ((acmod & 1) > 0 && acmod != 1) {
+ if (bits.numBitsLeft() < 2) {
+ return 0;
+ }
+ cmixlev = bits.getBits(2);
+ }
+ if ((acmod & 4) > 0) {
+ if (bits.numBitsLeft() < 2) {
+ return 0;
+ }
+ surmixlev = bits.getBits(2);
+ }
+ if (acmod == 2) {
+ if (bits.numBitsLeft() < 2) {
+ return 0;
+ }
+ dsurmod = bits.getBits(2);
+ }
+
+ if (bits.numBitsLeft() < 1) {
+ return 0;
+ }
+ unsigned lfeon = bits.getBits(1);
+
+ unsigned samplingRate = samplingRateTable[fscod];
+ unsigned payloadSize = frameSizeTable[frmsizecod >> 1][fscod];
+ if (fscod == 1) {
+ payloadSize += frmsizecod & 1;
+ }
+ payloadSize <<= 1; // convert from 16-bit words to bytes
+
+ unsigned channelCount = channelCountTable[acmod] + lfeon;
+
+ if (metaData != NULL) {
+ (*metaData)->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AC3);
+ (*metaData)->setInt32(kKeyChannelCount, channelCount);
+ (*metaData)->setInt32(kKeySampleRate, samplingRate);
+ }
+
+ return payloadSize;
+}
+
+static bool IsSeeminglyValidAC3Header(const uint8_t *ptr, size_t size) {
+ return parseAC3SyncFrame(ptr, size, NULL) > 0;
+}
+
+static bool IsSeeminglyValidADTSHeader(
+ const uint8_t *ptr, size_t size, size_t *frameLength) {
+ if (size < 7) {
// Not enough data to verify header.
return false;
}
@@ -80,6 +195,13 @@ static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) {
return false;
}
+ size_t frameLengthInHeader =
+ ((ptr[3] & 3) << 11) + (ptr[4] << 3) + ((ptr[5] >> 5) & 7);
+ if (frameLengthInHeader > size) {
+ return false;
+ }
+
+ *frameLength = frameLengthInHeader;
return true;
}
@@ -135,8 +257,8 @@ status_t ElementaryStreamQueue::appendData(
uint8_t *ptr = (uint8_t *)data;
ssize_t startOffset = -1;
- for (size_t i = 0; i + 3 < size; ++i) {
- if (!memcmp("\x00\x00\x00\x01", &ptr[i], 4)) {
+ for (size_t i = 0; i + 2 < size; ++i) {
+ if (!memcmp("\x00\x00\x01", &ptr[i], 3)) {
startOffset = i;
break;
}
@@ -148,7 +270,7 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an H.264/MPEG syncword "
- "at offset %d",
+ "at offset %zd",
startOffset);
}
@@ -181,7 +303,7 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an H.264/MPEG syncword "
- "at offset %d",
+ "at offset %zd",
startOffset);
}
@@ -201,8 +323,10 @@ status_t ElementaryStreamQueue::appendData(
}
#else
ssize_t startOffset = -1;
+ size_t frameLength;
for (size_t i = 0; i < size; ++i) {
- if (IsSeeminglyValidADTSHeader(&ptr[i], size - i)) {
+ if (IsSeeminglyValidADTSHeader(
+ &ptr[i], size - i, &frameLength)) {
startOffset = i;
break;
}
@@ -214,16 +338,49 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an AAC syncword at "
- "offset %d",
+ "offset %zd",
startOffset);
}
+ if (frameLength != size - startOffset) {
+ ALOGV("First ADTS AAC frame length is %zd bytes, "
+ "while the buffer size is %zd bytes.",
+ frameLength, size - startOffset);
+ }
+
data = &ptr[startOffset];
size -= startOffset;
#endif
break;
}
+ case AC3:
+ {
+ uint8_t *ptr = (uint8_t *)data;
+
+ ssize_t startOffset = -1;
+ for (size_t i = 0; i < size; ++i) {
+ if (IsSeeminglyValidAC3Header(&ptr[i], size - i)) {
+ startOffset = i;
+ break;
+ }
+ }
+
+ if (startOffset < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ if (startOffset > 0) {
+ ALOGI("found something resembling an AC3 syncword at "
+ "offset %zd",
+ startOffset);
+ }
+
+ data = &ptr[startOffset];
+ size -= startOffset;
+ break;
+ }
+
case MPEG_AUDIO:
{
uint8_t *ptr = (uint8_t *)data;
@@ -242,7 +399,7 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an MPEG audio "
- "syncword at offset %d",
+ "syncword at offset %zd",
startOffset);
}
@@ -266,7 +423,7 @@ status_t ElementaryStreamQueue::appendData(
if (mBuffer == NULL || neededSize > mBuffer->capacity()) {
neededSize = (neededSize + 65535) & ~65535;
- ALOGV("resizing buffer to size %d", neededSize);
+ ALOGV("resizing buffer to size %zu", neededSize);
sp<ABuffer> buffer = new ABuffer(neededSize);
if (mBuffer != NULL) {
@@ -289,7 +446,7 @@ status_t ElementaryStreamQueue::appendData(
#if 0
if (mMode == AAC) {
- ALOGI("size = %d, timeUs = %.2f secs", size, timeUs / 1E6);
+ ALOGI("size = %zu, timeUs = %.2f secs", size, timeUs / 1E6);
hexdump(data, size);
}
#endif
@@ -328,6 +485,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnit() {
return dequeueAccessUnitH264();
case AAC:
return dequeueAccessUnitAAC();
+ case AC3:
+ return dequeueAccessUnitAC3();
case MPEG_VIDEO:
return dequeueAccessUnitMPEGVideo();
case MPEG4_VIDEO:
@@ -340,6 +499,51 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnit() {
}
}
+sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAC3() {
+ unsigned syncStartPos = 0; // in bytes
+ unsigned payloadSize = 0;
+ sp<MetaData> format = new MetaData;
+ while (true) {
+ if (syncStartPos + 2 >= mBuffer->size()) {
+ return NULL;
+ }
+
+ payloadSize = parseAC3SyncFrame(
+ mBuffer->data() + syncStartPos,
+ mBuffer->size() - syncStartPos,
+ &format);
+ if (payloadSize > 0) {
+ break;
+ }
+ ++syncStartPos;
+ }
+
+ if (mBuffer->size() < syncStartPos + payloadSize) {
+ ALOGV("Not enough buffer size for AC3");
+ return NULL;
+ }
+
+ if (mFormat == NULL) {
+ mFormat = format;
+ }
+
+ sp<ABuffer> accessUnit = new ABuffer(syncStartPos + payloadSize);
+ memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);
+
+ int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize);
+ CHECK_GE(timeUs, 0ll);
+ accessUnit->meta()->setInt64("timeUs", timeUs);
+
+ memmove(
+ mBuffer->data(),
+ mBuffer->data() + syncStartPos + payloadSize,
+ mBuffer->size() - syncStartPos - payloadSize);
+
+ mBuffer->setRange(0, mBuffer->size() - syncStartPos - payloadSize);
+
+ return accessUnit;
+}
+
sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() {
if (mBuffer->size() < 4) {
return NULL;
@@ -349,7 +553,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() {
CHECK_EQ(bits.getBits(8), 0xa0);
unsigned numAUs = bits.getBits(8);
bits.skipBits(8);
- unsigned quantization_word_length = bits.getBits(2);
+ unsigned quantization_word_length __unused = bits.getBits(2);
unsigned audio_sampling_frequency = bits.getBits(3);
unsigned num_channels = bits.getBits(3);
@@ -413,6 +617,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
// having to interpolate.
// The final AAC frame may well extend into the next RangeInfo but
// that's ok.
+ // TODO: the logic commented above is skipped because codec cannot take
+ // arbitrary sized input buffers;
size_t offset = 0;
while (offset < info.mLength) {
if (offset + 7 > mBuffer->size()) {
@@ -425,7 +631,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
CHECK_EQ(bits.getBits(12), 0xfffu);
bits.skipBits(3); // ID, layer
- bool protection_absent = bits.getBits(1) != 0;
+ bool protection_absent __unused = bits.getBits(1) != 0;
if (mFormat == NULL) {
unsigned profile = bits.getBits(2);
@@ -474,12 +680,15 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
return NULL;
}
- size_t headerSize = protection_absent ? 7 : 9;
+ size_t headerSize __unused = protection_absent ? 7 : 9;
offset += aac_frame_length;
+ // TODO: move back to concatenation when codec can support arbitrary input buffers.
+ // For now only queue a single buffer
+ break;
}
- int64_t timeUs = fetchTimestamp(offset);
+ int64_t timeUs = fetchTimestampAAC(offset);
sp<ABuffer> accessUnit = new ABuffer(offset);
memcpy(accessUnit->data(), mBuffer->data(), offset);
@@ -526,6 +735,45 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
return timeUs;
}
+// TODO: avoid interpolating timestamps once codec supports arbitrary sized input buffers
+int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) {
+ int64_t timeUs = -1;
+ bool first = true;
+
+ size_t samplesize = size;
+ while (size > 0) {
+ CHECK(!mRangeInfos.empty());
+
+ RangeInfo *info = &*mRangeInfos.begin();
+
+ if (first) {
+ timeUs = info->mTimestampUs;
+ first = false;
+ }
+
+ if (info->mLength > size) {
+ int32_t sampleRate;
+ CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate));
+ info->mLength -= size;
+ size_t numSamples = 1024 * size / samplesize;
+ info->mTimestampUs += numSamples * 1000000ll / sampleRate;
+ size = 0;
+ } else {
+ size -= info->mLength;
+
+ mRangeInfos.erase(mRangeInfos.begin());
+ info = NULL;
+ }
+
+ }
+
+ if (timeUs == 0ll) {
+ ALOGV("Returning 0 timestamp");
+ }
+
+ return timeUs;
+}
+
struct NALPosition {
size_t nalOffset;
size_t nalSize;
@@ -586,6 +834,12 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
unsigned nalType = mBuffer->data()[pos.nalOffset] & 0x1f;
+ if (nalType == 6) {
+ sp<ABuffer> sei = new ABuffer(pos.nalSize);
+ memcpy(sei->data(), mBuffer->data() + pos.nalOffset, pos.nalSize);
+ accessUnit->meta()->setBuffer("sei", sei);
+ }
+
#if !LOG_NDEBUG
char tmp[128];
sprintf(tmp, "0x%02x", nalType);
@@ -837,7 +1091,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {
accessUnit->meta()->setInt64("timeUs", timeUs);
- ALOGV("returning MPEG video access unit at time %lld us",
+ ALOGV("returning MPEG video access unit at time %" PRId64 " us",
timeUs);
// hexdump(accessUnit->data(), accessUnit->size());
@@ -996,7 +1250,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {
accessUnit->meta()->setInt64("timeUs", timeUs);
- ALOGV("returning MPEG4 video access unit at time %lld us",
+ ALOGV("returning MPEG4 video access unit at time %" PRId64 " us",
timeUs);
// hexdump(accessUnit->data(), accessUnit->size());
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h
index 66a8087..eb4b1c9 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.h
+++ b/media/libstagefright/mpeg2ts/ESQueue.h
@@ -26,12 +26,13 @@
namespace android {
struct ABuffer;
-struct MetaData;
+class MetaData;
struct ElementaryStreamQueue {
enum Mode {
H264,
AAC,
+ AC3,
MPEG_AUDIO,
MPEG_VIDEO,
MPEG4_VIDEO,
@@ -67,6 +68,7 @@ private:
sp<ABuffer> dequeueAccessUnitH264();
sp<ABuffer> dequeueAccessUnitAAC();
+ sp<ABuffer> dequeueAccessUnitAC3();
sp<ABuffer> dequeueAccessUnitMPEGAudio();
sp<ABuffer> dequeueAccessUnitMPEGVideo();
sp<ABuffer> dequeueAccessUnitMPEG4Video();
@@ -75,6 +77,7 @@ private:
// consume a logical (compressed) access unit of size "size",
// returns its timestamp in us (or -1 if no time information).
int64_t fetchTimestamp(size_t size);
+ int64_t fetchTimestampAAC(size_t size);
DISALLOW_EVIL_CONSTRUCTORS(ElementaryStreamQueue);
};
diff --git a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
index dd714c9..85859f7 100644
--- a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
+++ b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp
@@ -36,6 +36,8 @@
#include <media/stagefright/Utils.h>
#include <utils/String8.h>
+#include <inttypes.h>
+
namespace android {
struct MPEG2PSExtractor::Track : public MediaSource {
@@ -130,7 +132,8 @@ sp<MediaSource> MPEG2PSExtractor::getTrack(size_t index) {
return new WrappedTrack(this, mTracks.valueAt(index));
}
-sp<MetaData> MPEG2PSExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+sp<MetaData> MPEG2PSExtractor::getTrackMetaData(
+ size_t index, uint32_t /* flags */) {
if (index >= mTracks.size()) {
return NULL;
}
@@ -408,7 +411,7 @@ ssize_t MPEG2PSExtractor::dequeuePES() {
PTS |= br.getBits(15);
CHECK_EQ(br.getBits(1), 1u);
- ALOGV("PTS = %llu", PTS);
+ ALOGV("PTS = %" PRIu64, PTS);
// ALOGI("PTS = %.2f secs", PTS / 90000.0f);
optional_bytes_remaining -= 5;
@@ -425,7 +428,7 @@ ssize_t MPEG2PSExtractor::dequeuePES() {
DTS |= br.getBits(15);
CHECK_EQ(br.getBits(1), 1u);
- ALOGV("DTS = %llu", DTS);
+ ALOGV("DTS = %" PRIu64, DTS);
optional_bytes_remaining -= 5;
}
@@ -443,7 +446,7 @@ ssize_t MPEG2PSExtractor::dequeuePES() {
ESCR |= br.getBits(15);
CHECK_EQ(br.getBits(1), 1u);
- ALOGV("ESCR = %llu", ESCR);
+ ALOGV("ESCR = %" PRIu64, ESCR);
/* unsigned ESCR_extension = */br.getBits(9);
CHECK_EQ(br.getBits(1), 1u);
@@ -472,7 +475,7 @@ ssize_t MPEG2PSExtractor::dequeuePES() {
if (br.numBitsLeft() < dataLength * 8) {
ALOGE("PES packet does not carry enough data to contain "
- "payload. (numBitsLeft = %d, required = %d)",
+ "payload. (numBitsLeft = %zu, required = %u)",
br.numBitsLeft(), dataLength * 8);
return ERROR_MALFORMED;
@@ -625,7 +628,7 @@ status_t MPEG2PSExtractor::Track::read(
status_t MPEG2PSExtractor::Track::appendPESData(
unsigned PTS_DTS_flags,
- uint64_t PTS, uint64_t DTS,
+ uint64_t PTS, uint64_t /* DTS */,
const uint8_t *data, size_t size) {
if (mQueue == NULL) {
return OK;
diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
index d449c34..33cfd1d 100644
--- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
+++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp
@@ -85,12 +85,6 @@ status_t MPEG2TSSource::read(
MediaBuffer **out, const ReadOptions *options) {
*out = NULL;
- int64_t seekTimeUs;
- ReadOptions::SeekMode seekMode;
- if (mSeekable && options && options->getSeekTo(&seekTimeUs, &seekMode)) {
- return ERROR_UNSUPPORTED;
- }
-
status_t finalResult;
while (!mImpl->hasBufferAvailable(&finalResult)) {
if (finalResult != OK) {
@@ -103,6 +97,17 @@ status_t MPEG2TSSource::read(
}
}
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode seekMode;
+ if (mSeekable && options && options->getSeekTo(&seekTimeUs, &seekMode)) {
+ // A seek was requested, but we don't actually support seeking and so can only "seek" to
+ // the current position
+ int64_t nextBufTimeUs;
+ if (mImpl->nextBufferTime(&nextBufTimeUs) != OK || seekTimeUs != nextBufTimeUs) {
+ return ERROR_UNSUPPORTED;
+ }
+ }
+
return mImpl->read(out, options);
}
@@ -141,7 +146,7 @@ sp<MediaSource> MPEG2TSExtractor::getTrack(size_t index) {
}
sp<MetaData> MPEG2TSExtractor::getTrackMetaData(
- size_t index, uint32_t flags) {
+ size_t index, uint32_t /* flags */) {
return index < mSourceImpls.size()
? mSourceImpls.editItemAt(index)->getFormat() : NULL;
}
@@ -159,7 +164,6 @@ void MPEG2TSExtractor::init() {
int numPacketsParsed = 0;
while (feedMore() == OK) {
- ATSParser::SourceType type;
if (haveAudio && haveVideo) {
break;
}
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index cd912e7..aaa8334 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -1,6 +1,10 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
+ifeq ($(TARGET_DEVICE), manta)
+ LOCAL_CFLAGS += -DSURFACE_IS_BGR32
+endif
+
LOCAL_SRC_FILES:= \
GraphicBufferSource.cpp \
OMX.cpp \
@@ -10,6 +14,7 @@ LOCAL_SRC_FILES:= \
SoftOMXComponent.cpp \
SoftOMXPlugin.cpp \
SoftVideoDecoderOMXComponent.cpp \
+ SoftVideoEncoderOMXComponent.cpp \
LOCAL_C_INCLUDES += \
$(TOP)/frameworks/av/media/libstagefright \
@@ -18,6 +23,7 @@ LOCAL_C_INCLUDES += \
LOCAL_SHARED_LIBRARIES := \
libbinder \
+ libhardware \
libmedia \
libutils \
liblog \
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index b8970ad..2945644 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
#define LOG_TAG "GraphicBufferSource"
//#define LOG_NDEBUG 0
#include <utils/Log.h>
@@ -26,6 +28,9 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
+#include <gui/BufferItem.h>
+
+#include <inttypes.h>
namespace android {
@@ -33,7 +38,8 @@ static const bool EXTRA_CHECK = true;
GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
- uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount) :
+ uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount,
+ bool useGraphicBufferInMeta) :
mInitCheck(UNKNOWN_ERROR),
mNodeInstance(nodeInstance),
mExecuting(false),
@@ -41,12 +47,22 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
mNumFramesAvailable(0),
mEndOfStream(false),
mEndOfStreamSent(false),
+ mMaxTimestampGapUs(-1ll),
+ mPrevOriginalTimeUs(-1ll),
+ mPrevModifiedTimeUs(-1ll),
+ mSkipFramesBeforeNs(-1ll),
mRepeatAfterUs(-1ll),
mRepeatLastFrameGeneration(0),
+ mRepeatLastFrameTimestamp(-1ll),
mLatestSubmittedBufferId(-1),
mLatestSubmittedBufferFrameNum(0),
mLatestSubmittedBufferUseCount(0),
- mRepeatBufferDeferred(false) {
+ mRepeatBufferDeferred(false),
+ mTimePerCaptureUs(-1ll),
+ mTimePerFrameUs(-1ll),
+ mPrevCaptureUs(-1ll),
+ mPrevFrameUs(-1ll),
+ mUseGraphicBufferInMeta(useGraphicBufferInMeta) {
ALOGV("GraphicBufferSource w=%u h=%u c=%u",
bufferWidth, bufferHeight, bufferCount);
@@ -59,13 +75,12 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
String8 name("GraphicBufferSource");
- mBufferQueue = new BufferQueue();
- mBufferQueue->setConsumerName(name);
- mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
- mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
- GRALLOC_USAGE_HW_TEXTURE);
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setConsumerName(name);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER);
- mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+ mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount);
if (mInitCheck != NO_ERROR) {
ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
bufferCount, mInitCheck);
@@ -79,7 +94,7 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
- mInitCheck = mBufferQueue->consumerConnect(proxy, false);
+ mInitCheck = mConsumer->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
ALOGE("Error connecting to BufferQueue: %s (%d)",
strerror(-mInitCheck), mInitCheck);
@@ -91,8 +106,8 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
GraphicBufferSource::~GraphicBufferSource() {
ALOGV("~GraphicBufferSource");
- if (mBufferQueue != NULL) {
- status_t err = mBufferQueue->consumerDisconnect();
+ if (mConsumer != NULL) {
+ status_t err = mConsumer->consumerDisconnect();
if (err != NO_ERROR) {
ALOGW("consumerDisconnect failed: %d", err);
}
@@ -101,7 +116,7 @@ GraphicBufferSource::~GraphicBufferSource() {
void GraphicBufferSource::omxExecuting() {
Mutex::Autolock autoLock(mMutex);
- ALOGV("--> executing; avail=%d, codec vec size=%zd",
+ ALOGV("--> executing; avail=%zu, codec vec size=%zd",
mNumFramesAvailable, mCodecBuffers.size());
CHECK(!mExecuting);
mExecuting = true;
@@ -123,7 +138,7 @@ void GraphicBufferSource::omxExecuting() {
}
}
- ALOGV("done loading initial frames, avail=%d", mNumFramesAvailable);
+ ALOGV("done loading initial frames, avail=%zu", mNumFramesAvailable);
// If EOS has already been signaled, and there are no more frames to
// submit, try to send EOS now as well.
@@ -175,7 +190,7 @@ void GraphicBufferSource::omxLoaded(){
mLooper.clear();
}
- ALOGV("--> loaded; avail=%d eos=%d eosSent=%d",
+ ALOGV("--> loaded; avail=%zu eos=%d eosSent=%d",
mNumFramesAvailable, mEndOfStream, mEndOfStreamSent);
// Codec is no longer executing. Discard all codec-related state.
@@ -196,7 +211,7 @@ void GraphicBufferSource::addCodecBuffer(OMX_BUFFERHEADERTYPE* header) {
return;
}
- ALOGV("addCodecBuffer h=%p size=%lu p=%p",
+ ALOGV("addCodecBuffer h=%p size=%" PRIu32 " p=%p",
header, header->nAllocLen, header->pBuffer);
CodecBuffer codecBuffer;
codecBuffer.mHeader = header;
@@ -217,7 +232,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
return;
}
- ALOGV("codecBufferEmptied h=%p size=%lu filled=%lu p=%p",
+ ALOGV("codecBufferEmptied h=%p size=%" PRIu32 " filled=%" PRIu32 " p=%p",
header, header->nAllocLen, header->nFilledLen,
header->pBuffer);
CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
@@ -242,13 +257,25 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
// Pull the graphic buffer handle back out of the buffer, and confirm
// that it matches expectations.
OMX_U8* data = header->pBuffer;
- buffer_handle_t bufferHandle;
- memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t));
- if (bufferHandle != codecBuffer.mGraphicBuffer->handle) {
- // should never happen
- ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
- bufferHandle, codecBuffer.mGraphicBuffer->handle);
- CHECK(!"codecBufferEmptied: mismatched buffer");
+ MetadataBufferType type = *(MetadataBufferType *)data;
+ if (type == kMetadataBufferTypeGrallocSource) {
+ buffer_handle_t bufferHandle;
+ memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t));
+ if (bufferHandle != codecBuffer.mGraphicBuffer->handle) {
+ // should never happen
+ ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
+ bufferHandle, codecBuffer.mGraphicBuffer->handle);
+ CHECK(!"codecBufferEmptied: mismatched buffer");
+ }
+ } else if (type == kMetadataBufferTypeGraphicBuffer) {
+ GraphicBuffer *buffer;
+ memcpy(&buffer, data + 4, sizeof(buffer));
+ if (buffer != codecBuffer.mGraphicBuffer.get()) {
+ // should never happen
+ ALOGE("codecBufferEmptied: buffer is %p, expected %p",
+ buffer, codecBuffer.mGraphicBuffer.get());
+ CHECK(!"codecBufferEmptied: mismatched buffer");
+ }
}
}
@@ -264,7 +291,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
if (id == mLatestSubmittedBufferId) {
CHECK_GT(mLatestSubmittedBufferUseCount--, 0);
} else {
- mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber,
+ mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
}
} else {
@@ -278,7 +305,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
if (mNumFramesAvailable) {
// Fill this codec buffer.
CHECK(!mEndOfStreamSent);
- ALOGV("buffer freed, %d frames avail (eos=%d)",
+ ALOGV("buffer freed, %zu frames avail (eos=%d)",
mNumFramesAvailable, mEndOfStream);
fillCodecBuffer_l();
} else if (mEndOfStream) {
@@ -299,6 +326,33 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
return;
}
+void GraphicBufferSource::codecBufferFilled(OMX_BUFFERHEADERTYPE* header) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mMaxTimestampGapUs > 0ll
+ && !(header->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ ssize_t index = mOriginalTimeUs.indexOfKey(header->nTimeStamp);
+ if (index >= 0) {
+ ALOGV("OUT timestamp: %lld -> %lld",
+ static_cast<long long>(header->nTimeStamp),
+ static_cast<long long>(mOriginalTimeUs[index]));
+ header->nTimeStamp = mOriginalTimeUs[index];
+ mOriginalTimeUs.removeItemsAt(index);
+ } else {
+ // giving up the effort as encoder doesn't appear to preserve pts
+ ALOGW("giving up limiting timestamp gap (pts = %lld)",
+ header->nTimeStamp);
+ mMaxTimestampGapUs = -1ll;
+ }
+ if (mOriginalTimeUs.size() > BufferQueue::NUM_BUFFER_SLOTS) {
+ // something terribly wrong must have happened, giving up...
+ ALOGE("mOriginalTimeUs has too many entries (%zu)",
+ mOriginalTimeUs.size());
+ mMaxTimestampGapUs = -1ll;
+ }
+ }
+}
+
void GraphicBufferSource::suspend(bool suspend) {
Mutex::Autolock autoLock(mMutex);
@@ -306,8 +360,8 @@ void GraphicBufferSource::suspend(bool suspend) {
mSuspended = true;
while (mNumFramesAvailable > 0) {
- BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ BufferItem item;
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen.
@@ -320,7 +374,7 @@ void GraphicBufferSource::suspend(bool suspend) {
--mNumFramesAvailable;
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
return;
@@ -349,15 +403,15 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
int cbi = findAvailableCodecBuffer_l();
if (cbi < 0) {
// No buffers available, bail.
- ALOGV("fillCodecBuffer_l: no codec buffers, avail now %d",
+ ALOGV("fillCodecBuffer_l: no codec buffers, avail now %zu",
mNumFramesAvailable);
return false;
}
- ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d",
+ ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%zu",
mNumFramesAvailable);
- BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ BufferItem item;
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen
ALOGW("fillCodecBuffer_l: frame was not available");
@@ -384,10 +438,21 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
mBufferSlot[item.mBuf] = item.mGraphicBuffer;
}
- err = submitBuffer_l(item, cbi);
+ err = UNKNOWN_ERROR;
+
+ // only submit sample if start time is unspecified, or sample
+ // is queued after the specified start time
+ if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {
+ // if start time is set, offset time stamp by start time
+ if (mSkipFramesBeforeNs > 0) {
+ item.mTimestamp -= mSkipFramesBeforeNs;
+ }
+ err = submitBuffer_l(item, cbi);
+ }
+
if (err != OK) {
ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
} else {
ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
@@ -410,7 +475,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
//
// To be on the safe side we try to release the buffer.
ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL");
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
mLatestSubmittedBufferId,
mLatestSubmittedBufferFrameNum,
EGL_NO_DISPLAY,
@@ -428,9 +493,10 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
return false;
}
- BufferQueue::BufferItem item;
+ BufferItem item;
item.mBuf = mLatestSubmittedBufferId;
item.mFrameNumber = mLatestSubmittedBufferFrameNum;
+ item.mTimestamp = mRepeatLastFrameTimestamp;
status_t err = submitBuffer_l(item, cbi);
@@ -440,16 +506,30 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
++mLatestSubmittedBufferUseCount;
+ /* repeat last frame up to kRepeatLastFrameCount times.
+ * in case of static scene, a single repeat might not get rid of encoder
+ * ghosting completely, refresh a couple more times to get better quality
+ */
+ if (--mRepeatLastFrameCount > 0) {
+ mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
+
+ if (mReflector != NULL) {
+ sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
+ msg->setInt32("generation", ++mRepeatLastFrameGeneration);
+ msg->post(mRepeatAfterUs);
+ }
+ }
+
return true;
}
void GraphicBufferSource::setLatestSubmittedBuffer_l(
- const BufferQueue::BufferItem &item) {
+ const BufferItem &item) {
ALOGV("setLatestSubmittedBuffer_l");
if (mLatestSubmittedBufferId >= 0) {
if (mLatestSubmittedBufferUseCount == 0) {
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
mLatestSubmittedBufferId,
mLatestSubmittedBufferFrameNum,
EGL_NO_DISPLAY,
@@ -460,8 +540,11 @@ void GraphicBufferSource::setLatestSubmittedBuffer_l(
mLatestSubmittedBufferId = item.mBuf;
mLatestSubmittedBufferFrameNum = item.mFrameNumber;
+ mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;
+
mLatestSubmittedBufferUseCount = 1;
mRepeatBufferDeferred = false;
+ mRepeatLastFrameCount = kRepeatLastFrameCount;
if (mReflector != NULL) {
sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id());
@@ -472,7 +555,7 @@ void GraphicBufferSource::setLatestSubmittedBuffer_l(
status_t GraphicBufferSource::signalEndOfInputStream() {
Mutex::Autolock autoLock(mMutex);
- ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d",
+ ALOGV("signalEndOfInputStream: exec=%d avail=%zu eos=%d",
mExecuting, mNumFramesAvailable, mEndOfStream);
if (mEndOfStream) {
@@ -497,9 +580,75 @@ status_t GraphicBufferSource::signalEndOfInputStream() {
return OK;
}
+int64_t GraphicBufferSource::getTimestamp(const BufferItem &item) {
+ int64_t timeUs = item.mTimestamp / 1000;
+
+ if (mTimePerCaptureUs > 0ll) {
+ // Time lapse or slow motion mode
+ if (mPrevCaptureUs < 0ll) {
+ // first capture
+ mPrevCaptureUs = timeUs;
+ mPrevFrameUs = timeUs;
+ } else {
+ // snap to nearest capture point
+ int64_t nFrames = (timeUs + mTimePerCaptureUs / 2 - mPrevCaptureUs)
+ / mTimePerCaptureUs;
+ if (nFrames <= 0) {
+ // skip this frame as it's too close to previous capture
+ ALOGV("skipping frame, timeUs %lld", static_cast<long long>(timeUs));
+ return -1;
+ }
+ mPrevCaptureUs = mPrevCaptureUs + nFrames * mTimePerCaptureUs;
+ mPrevFrameUs += mTimePerFrameUs * nFrames;
+ }
+
+ ALOGV("timeUs %lld, captureUs %lld, frameUs %lld",
+ static_cast<long long>(timeUs),
+ static_cast<long long>(mPrevCaptureUs),
+ static_cast<long long>(mPrevFrameUs));
+
+ return mPrevFrameUs;
+ } else if (mMaxTimestampGapUs > 0ll) {
+ /* Cap timestamp gap between adjacent frames to specified max
+ *
+ * In the scenario of cast mirroring, encoding could be suspended for
+ * prolonged periods. Limiting the pts gap to workaround the problem
+ * where encoder's rate control logic produces huge frames after a
+ * long period of suspension.
+ */
+
+ int64_t originalTimeUs = timeUs;
+ if (mPrevOriginalTimeUs >= 0ll) {
+ if (originalTimeUs < mPrevOriginalTimeUs) {
+ // Drop the frame if it's going backward in time. Bad timestamp
+ // could disrupt encoder's rate control completely.
+ ALOGW("Dropping frame that's going backward in time");
+ return -1;
+ }
+ int64_t timestampGapUs = originalTimeUs - mPrevOriginalTimeUs;
+ timeUs = (timestampGapUs < mMaxTimestampGapUs ?
+ timestampGapUs : mMaxTimestampGapUs) + mPrevModifiedTimeUs;
+ }
+ mPrevOriginalTimeUs = originalTimeUs;
+ mPrevModifiedTimeUs = timeUs;
+ mOriginalTimeUs.add(timeUs, originalTimeUs);
+ ALOGV("IN timestamp: %lld -> %lld",
+ static_cast<long long>(originalTimeUs),
+ static_cast<long long>(timeUs));
+ }
+
+ return timeUs;
+}
+
status_t GraphicBufferSource::submitBuffer_l(
- const BufferQueue::BufferItem &item, int cbi) {
+ const BufferItem &item, int cbi) {
ALOGV("submitBuffer_l cbi=%d", cbi);
+
+ int64_t timeUs = getTimestamp(item);
+ if (timeUs < 0ll) {
+ return UNKNOWN_ERROR;
+ }
+
CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
codecBuffer.mGraphicBuffer = mBufferSlot[item.mBuf];
codecBuffer.mBuf = item.mBuf;
@@ -508,14 +657,26 @@ status_t GraphicBufferSource::submitBuffer_l(
OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t));
OMX_U8* data = header->pBuffer;
- const OMX_U32 type = kMetadataBufferTypeGrallocSource;
- buffer_handle_t handle = codecBuffer.mGraphicBuffer->handle;
- memcpy(data, &type, 4);
- memcpy(data + 4, &handle, sizeof(buffer_handle_t));
+ buffer_handle_t handle;
+ if (!mUseGraphicBufferInMeta) {
+ const OMX_U32 type = kMetadataBufferTypeGrallocSource;
+ handle = codecBuffer.mGraphicBuffer->handle;
+ memcpy(data, &type, 4);
+ memcpy(data + 4, &handle, sizeof(buffer_handle_t));
+ } else {
+ // codecBuffer holds a reference to the GraphicBuffer, so
+ // it is valid while it is with the OMX component
+ const OMX_U32 type = kMetadataBufferTypeGraphicBuffer;
+ memcpy(data, &type, 4);
+ // passing a non-reference-counted graphicBuffer
+ GraphicBuffer *buffer = codecBuffer.mGraphicBuffer.get();
+ handle = buffer->handle;
+ memcpy(data + 4, &buffer, sizeof(buffer));
+ }
status_t err = mNodeInstance->emptyDirectBuffer(header, 0,
4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME,
- item.mTimestamp / 1000);
+ timeUs);
if (err != OK) {
ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err);
codecBuffer.mGraphicBuffer = NULL;
@@ -590,10 +751,10 @@ int GraphicBufferSource::findMatchingCodecBuffer_l(
}
// BufferQueue::ConsumerListener callback
-void GraphicBufferSource::onFrameAvailable() {
+void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock autoLock(mMutex);
- ALOGV("onFrameAvailable exec=%d avail=%d",
+ ALOGV("onFrameAvailable exec=%d avail=%zu",
mExecuting, mNumFramesAvailable);
if (mEndOfStream || mSuspended) {
@@ -606,10 +767,16 @@ void GraphicBufferSource::onFrameAvailable() {
ALOGV("onFrameAvailable: suspended, ignoring frame");
}
- BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ BufferItem item;
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == OK) {
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ // If this is the first time we're seeing this buffer, add it to our
+ // slot table.
+ if (item.mGraphicBuffer != NULL) {
+ ALOGV("onFrameAvailable: setting mBufferSlot %d", item.mBuf);
+ mBufferSlot[item.mBuf] = item.mGraphicBuffer;
+ }
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
return;
@@ -629,13 +796,13 @@ void GraphicBufferSource::onFrameAvailable() {
void GraphicBufferSource::onBuffersReleased() {
Mutex::Autolock lock(mMutex);
- uint32_t slotMask;
- if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) {
+ uint64_t slotMask;
+ if (mConsumer->getReleasedBuffers(&slotMask) != NO_ERROR) {
ALOGW("onBuffersReleased: unable to get released buffer set");
- slotMask = 0xffffffff;
+ slotMask = 0xffffffffffffffffULL;
}
- ALOGV("onBuffersReleased: 0x%08x", slotMask);
+ ALOGV("onBuffersReleased: 0x%016" PRIx64, slotMask);
for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
if ((slotMask & 0x01) != 0) {
@@ -645,6 +812,11 @@ void GraphicBufferSource::onBuffersReleased() {
}
}
+// BufferQueue::ConsumerListener callback
+void GraphicBufferSource::onSidebandStreamChanged() {
+ ALOG_ASSERT(false, "GraphicBufferSource can't consume sideband streams");
+}
+
status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
int64_t repeatAfterUs) {
Mutex::Autolock autoLock(mMutex);
@@ -658,6 +830,38 @@ status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
return OK;
}
+status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting || maxGapUs <= 0ll) {
+ return INVALID_OPERATION;
+ }
+
+ mMaxTimestampGapUs = maxGapUs;
+
+ return OK;
+}
+
+void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mSkipFramesBeforeNs =
+ (skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
+}
+
+status_t GraphicBufferSource::setTimeLapseUs(int64_t* data) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting || data[0] <= 0ll || data[1] <= 0ll) {
+ return INVALID_OPERATION;
+ }
+
+ mTimePerFrameUs = data[0];
+ mTimePerCaptureUs = data[1];
+
+ return OK;
+}
+
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatRepeatLastFrame:
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 9e5eee6..401bbc3 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -49,7 +49,8 @@ namespace android {
class GraphicBufferSource : public BufferQueue::ConsumerListener {
public:
GraphicBufferSource(OMXNodeInstance* nodeInstance,
- uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount);
+ uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount,
+ bool useGraphicBufferInMeta = false);
virtual ~GraphicBufferSource();
// We can't throw an exception if the constructor fails, so we just set
@@ -61,7 +62,7 @@ public:
// Returns the handle to the producer side of the BufferQueue. Buffers
// queued on this will be received by GraphicBufferSource.
sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
- return mBufferQueue;
+ return mProducer;
}
// This is called when OMX transitions to OMX_StateExecuting, which means
@@ -87,6 +88,10 @@ public:
// fill it with a new frame of data; otherwise, just mark it as available.
void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header);
+ // Called when omx_message::FILL_BUFFER_DONE is received. (Currently the
+ // buffer source will fix timestamp in the header if needed.)
+ void codecBufferFilled(OMX_BUFFERHEADERTYPE* header);
+
// This is called after the last input frame has been submitted. We
// need to submit an empty buffer with the EOS flag set. If we don't
// have a codec buffer ready, we just set the mEndOfStream flag.
@@ -105,6 +110,26 @@ public:
// state and once this behaviour is specified it cannot be reset.
status_t setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs);
+ // When set, the timestamp fed to the encoder will be modified such that
+ // the gap between two adjacent frames is capped at maxGapUs. Timestamp
+ // will be restored to the original when the encoded frame is returned to
+ // the client.
+ // This is to solve a problem in certain real-time streaming case, where
+ // encoder's rate control logic produces huge frames after a long period
+ // of suspension on input.
+ status_t setMaxTimestampGapUs(int64_t maxGapUs);
+
+ // Sets the time lapse (or slow motion) parameters.
+ // data[0] is the time (us) between two frames for playback
+ // data[1] is the time (us) between two frames for capture
+ // When set, the sample's timestamp will be modified to playback framerate,
+ // and capture timestamp will be modified to capture rate.
+ status_t setTimeLapseUs(int64_t* data);
+
+ // Sets the start time us (in system time), samples before which should
+ // be dropped and not submitted to encoder
+ void setSkipFramesBeforeUs(int64_t startTimeUs);
+
protected:
// BufferQueue::ConsumerListener interface, called when a new frame of
// data is available. If we're executing and a codec buffer is
@@ -112,13 +137,18 @@ protected:
// into the codec buffer, and call Empty[This]Buffer. If we're not yet
// executing or there's no codec buffer available, we just increment
// mNumFramesAvailable and return.
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
// BufferQueue::ConsumerListener interface, called when the client has
// released one or more GraphicBuffers. We clear out the appropriate
// set of mBufferSlot entries.
virtual void onBuffersReleased();
+ // BufferQueue::ConsumerListener interface, called when the client has
+ // changed the sideband stream. GraphicBufferSource doesn't handle sideband
+ // streams so this is a no-op (and should never be called).
+ virtual void onSidebandStreamChanged();
+
private:
// Keep track of codec input buffers. They may either be available
// (mGraphicBuffer == NULL) or in use by the codec.
@@ -157,14 +187,15 @@ private:
// Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer
// reference into the codec buffer, and submits the data to the codec.
- status_t submitBuffer_l(const BufferQueue::BufferItem &item, int cbi);
+ status_t submitBuffer_l(const BufferItem &item, int cbi);
// Submits an empty buffer, with the EOS flag set. Returns without
// doing anything if we don't have a codec buffer available.
void submitEndOfInputStream_l();
- void setLatestSubmittedBuffer_l(const BufferQueue::BufferItem &item);
+ void setLatestSubmittedBuffer_l(const BufferItem &item);
bool repeatLatestSubmittedBuffer_l();
+ int64_t getTimestamp(const BufferItem &item);
// Lock, covers all member variables.
mutable Mutex mMutex;
@@ -180,8 +211,11 @@ private:
bool mSuspended;
- // We consume graphic buffers from this.
- sp<BufferQueue> mBufferQueue;
+ // Our BufferQueue interfaces. mProducer is passed to the producer through
+ // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
+ // the buffers queued by the producer.
+ sp<IGraphicBufferProducer> mProducer;
+ sp<IGraphicBufferConsumer> mConsumer;
// Number of frames pending in BufferQueue that haven't yet been
// forwarded to the codec.
@@ -206,13 +240,23 @@ private:
enum {
kWhatRepeatLastFrame,
};
+ enum {
+ kRepeatLastFrameCount = 10,
+ };
- int64_t mRepeatAfterUs;
+ KeyedVector<int64_t, int64_t> mOriginalTimeUs;
+ int64_t mMaxTimestampGapUs;
+ int64_t mPrevOriginalTimeUs;
+ int64_t mPrevModifiedTimeUs;
+ int64_t mSkipFramesBeforeNs;
sp<ALooper> mLooper;
sp<AHandlerReflector<GraphicBufferSource> > mReflector;
+ int64_t mRepeatAfterUs;
int32_t mRepeatLastFrameGeneration;
+ int64_t mRepeatLastFrameTimestamp;
+ int32_t mRepeatLastFrameCount;
int mLatestSubmittedBufferId;
uint64_t mLatestSubmittedBufferFrameNum;
@@ -222,6 +266,14 @@ private:
// no codec buffer was available at the time.
bool mRepeatBufferDeferred;
+ // Time lapse / slow motion configuration
+ int64_t mTimePerCaptureUs;
+ int64_t mTimePerFrameUs;
+ int64_t mPrevCaptureUs;
+ int64_t mPrevFrameUs;
+
+ bool mUseGraphicBufferInMeta;
+
void onMessageReceived(const sp<AMessage> &msg);
DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 274f2eb..f8d38ff 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "OMX"
#include <utils/Log.h>
@@ -185,7 +187,7 @@ void OMX::binderDied(const wp<IBinder> &the_late_who) {
instance->onObserverDied(mMaster);
}
-bool OMX::livesLocally(node_id node, pid_t pid) {
+bool OMX::livesLocally(node_id /* node */, pid_t pid) {
return pid == getpid();
}
@@ -223,7 +225,7 @@ status_t OMX::allocateNode(
*node = 0;
- OMXNodeInstance *instance = new OMXNodeInstance(this, observer);
+ OMXNodeInstance *instance = new OMXNodeInstance(this, observer, name);
OMX_COMPONENTTYPE *handle;
OMX_ERRORTYPE err = mMaster->makeComponentInstance(
@@ -231,7 +233,7 @@ status_t OMX::allocateNode(
instance, &handle);
if (err != OMX_ErrorNone) {
- ALOGV("FAILED to allocate omx component '%s'", name);
+ ALOGE("FAILED to allocate omx component '%s'", name);
instance->onGetHandleFailed();
@@ -243,8 +245,8 @@ status_t OMX::allocateNode(
instance->setHandle(*node, handle);
- mLiveNodes.add(observer->asBinder(), instance);
- observer->asBinder()->linkToDeath(this);
+ mLiveNodes.add(IInterface::asBinder(observer), instance);
+ IInterface::asBinder(observer)->linkToDeath(this);
return OK;
}
@@ -254,7 +256,7 @@ status_t OMX::freeNode(node_id node) {
{
Mutex::Autolock autoLock(mLock);
- ssize_t index = mLiveNodes.indexOfKey(instance->observer()->asBinder());
+ ssize_t index = mLiveNodes.indexOfKey(IInterface::asBinder(instance->observer()));
if (index < 0) {
// This could conceivably happen if the observer dies at roughly the
// same time that a client attempts to free the node explicitly.
@@ -263,7 +265,7 @@ status_t OMX::freeNode(node_id node) {
mLiveNodes.removeItemsAt(index);
}
- instance->observer()->asBinder()->unlinkToDeath(this);
+ IInterface::asBinder(instance->observer())->unlinkToDeath(this);
status_t err = instance->freeNode(mMaster);
@@ -285,6 +287,7 @@ status_t OMX::sendCommand(
status_t OMX::getParameter(
node_id node, OMX_INDEXTYPE index,
void *params, size_t size) {
+ ALOGV("getParameter(%u %#x %p %zd)", node, index, params, size);
return findInstance(node)->getParameter(
index, params, size);
}
@@ -292,6 +295,7 @@ status_t OMX::getParameter(
status_t OMX::setParameter(
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size) {
+ ALOGV("setParameter(%u %#x %p %zd)", node, index, params, size);
return findInstance(node)->setParameter(
index, params, size);
}
@@ -338,6 +342,13 @@ status_t OMX::prepareForAdaptivePlayback(
portIndex, enable, maxFrameWidth, maxFrameHeight);
}
+status_t OMX::configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
+ return findInstance(node)->configureVideoTunnelMode(
+ portIndex, tunneled, audioHwSync, sidebandHandle);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -424,8 +435,8 @@ OMX_ERRORTYPE OMX::OnEvent(
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN OMX_PTR pEventData) {
- ALOGV("OnEvent(%d, %ld, %ld)", eEvent, nData1, nData2);
+ OMX_IN OMX_PTR /* pEventData */) {
+ ALOGV("OnEvent(%d, %" PRIu32", %" PRIu32 ")", eEvent, nData1, nData2);
// Forward to OMXNodeInstance.
findInstance(node)->onEvent(eEvent, nData1, nData2);
@@ -443,13 +454,13 @@ OMX_ERRORTYPE OMX::OnEvent(
}
OMX_ERRORTYPE OMX::OnEmptyBufferDone(
- node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
ALOGV("OnEmptyBufferDone buffer=%p", pBuffer);
omx_message msg;
msg.type = omx_message::EMPTY_BUFFER_DONE;
msg.node = node;
- msg.u.buffer_data.buffer = pBuffer;
+ msg.u.buffer_data.buffer = buffer;
findDispatcher(node)->post(msg);
@@ -457,19 +468,17 @@ OMX_ERRORTYPE OMX::OnEmptyBufferDone(
}
OMX_ERRORTYPE OMX::OnFillBufferDone(
- node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
+ node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
ALOGV("OnFillBufferDone buffer=%p", pBuffer);
omx_message msg;
msg.type = omx_message::FILL_BUFFER_DONE;
msg.node = node;
- msg.u.extended_buffer_data.buffer = pBuffer;
+ msg.u.extended_buffer_data.buffer = buffer;
msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;
msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
msg.u.extended_buffer_data.flags = pBuffer->nFlags;
msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
- msg.u.extended_buffer_data.platform_private = pBuffer->pPlatformPrivate;
- msg.u.extended_buffer_data.data_ptr = pBuffer->pBuffer;
findDispatcher(node)->post(msg);
@@ -479,7 +488,7 @@ OMX_ERRORTYPE OMX::OnFillBufferDone(
OMX::node_id OMX::makeNodeID(OMXNodeInstance *instance) {
// mLock is already held.
- node_id node = (node_id)(uintptr_t)++mNodeCounter;
+ node_id node = (node_id)++mNodeCounter;
mNodeIDToInstance.add(node, instance);
return node;
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index 6b6d0ab..ae3cb33 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -91,7 +91,7 @@ void OMXMaster::addPlugin(OMXPluginBase *plugin) {
}
if (err != OMX_ErrorNoMore) {
- ALOGE("OMX plugin failed w/ error 0x%08x after registering %d "
+ ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
"components", err, mPluginByComponentName.size());
}
}
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5f104fc..ab7419f 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -18,11 +18,15 @@
#define LOG_TAG "OMXNodeInstance"
#include <utils/Log.h>
+#include <inttypes.h>
+
#include "../include/OMXNodeInstance.h"
#include "OMXMaster.h"
#include "GraphicBufferSource.h"
#include <OMX_Component.h>
+#include <OMX_IndexExt.h>
+#include <OMX_AsString.h>
#include <binder/IMemory.h>
#include <gui/BufferQueue.h>
@@ -30,7 +34,68 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
+#include <utils/misc.h>
+
static const OMX_U32 kPortIndexInput = 0;
+static const OMX_U32 kPortIndexOutput = 1;
+
+#define CLOGW(fmt, ...) ALOGW("[%x:%s] " fmt, mNodeID, mName, ##__VA_ARGS__)
+
+#define CLOG_ERROR_IF(cond, fn, err, fmt, ...) \
+ ALOGE_IF(cond, #fn "(%x:%s, " fmt ") ERROR: %s(%#x)", \
+ mNodeID, mName, ##__VA_ARGS__, asString(err), err)
+#define CLOG_ERROR(fn, err, fmt, ...) CLOG_ERROR_IF(true, fn, err, fmt, ##__VA_ARGS__)
+#define CLOG_IF_ERROR(fn, err, fmt, ...) \
+ CLOG_ERROR_IF((err) != OMX_ErrorNone, fn, err, fmt, ##__VA_ARGS__)
+
+#define CLOGI_(level, fn, fmt, ...) \
+ ALOGI_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+#define CLOGD_(level, fn, fmt, ...) \
+ ALOGD_IF(DEBUG >= (level), #fn "(%x:%s, " fmt ")", mNodeID, mName, ##__VA_ARGS__)
+
+#define CLOG_LIFE(fn, fmt, ...) CLOGI_(ADebug::kDebugLifeCycle, fn, fmt, ##__VA_ARGS__)
+#define CLOG_STATE(fn, fmt, ...) CLOGI_(ADebug::kDebugState, fn, fmt, ##__VA_ARGS__)
+#define CLOG_CONFIG(fn, fmt, ...) CLOGI_(ADebug::kDebugConfig, fn, fmt, ##__VA_ARGS__)
+#define CLOG_INTERNAL(fn, fmt, ...) CLOGD_(ADebug::kDebugInternalState, fn, fmt, ##__VA_ARGS__)
+
+#define CLOG_DEBUG_IF(cond, fn, fmt, ...) \
+ ALOGD_IF(cond, #fn "(%x, " fmt ")", mNodeID, ##__VA_ARGS__)
+
+#define CLOG_BUFFER(fn, fmt, ...) \
+ CLOG_DEBUG_IF(DEBUG >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
+#define CLOG_BUMPED_BUFFER(fn, fmt, ...) \
+ CLOG_DEBUG_IF(DEBUG_BUMP >= ADebug::kDebugAll, fn, fmt, ##__VA_ARGS__)
+
+/* buffer formatting */
+#define BUFFER_FMT(port, fmt, ...) "%s:%u " fmt, portString(port), (port), ##__VA_ARGS__
+#define NEW_BUFFER_FMT(buffer_id, port, fmt, ...) \
+ BUFFER_FMT(port, fmt ") (#%zu => %#x", ##__VA_ARGS__, mActiveBuffers.size(), (buffer_id))
+
+#define SIMPLE_BUFFER(port, size, data) BUFFER_FMT(port, "%zu@%p", (size), (data))
+#define SIMPLE_NEW_BUFFER(buffer_id, port, size, data) \
+ NEW_BUFFER_FMT(buffer_id, port, "%zu@%p", (size), (data))
+
+#define EMPTY_BUFFER(addr, header) "%#x [%u@%p]", \
+ (addr), (header)->nAllocLen, (header)->pBuffer
+#define FULL_BUFFER(addr, header) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld]", \
+ (intptr_t)(addr), (header)->nAllocLen, (header)->pBuffer, \
+ (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp
+
+#define WITH_STATS_WRAPPER(fmt, ...) fmt " { IN=%zu/%zu OUT=%zu/%zu }", ##__VA_ARGS__, \
+ mInputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexInput], \
+ mOutputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexOutput]
+// TRICKY: this is needed so formatting macros expand before substitution
+#define WITH_STATS(fmt, ...) WITH_STATS_WRAPPER(fmt, ##__VA_ARGS__)
+
+template<class T>
+static void InitOMXParams(T *params) {
+ memset(params, 0, sizeof(T));
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
namespace android {
@@ -56,8 +121,8 @@ struct BufferMeta {
}
memcpy((OMX_U8 *)mMem->pointer() + header->nOffset,
- header->pBuffer + header->nOffset,
- header->nFilledLen);
+ header->pBuffer + header->nOffset,
+ header->nFilledLen);
}
void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) {
@@ -66,8 +131,8 @@ struct BufferMeta {
}
memcpy(header->pBuffer + header->nOffset,
- (const OMX_U8 *)mMem->pointer() + header->nOffset,
- header->nFilledLen);
+ (const OMX_U8 *)mMem->pointer() + header->nOffset,
+ header->nFilledLen);
}
void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {
@@ -89,22 +154,45 @@ OMX_CALLBACKTYPE OMXNodeInstance::kCallbacks = {
&OnEvent, &OnEmptyBufferDone, &OnFillBufferDone
};
+static inline const char *portString(OMX_U32 portIndex) {
+ switch (portIndex) {
+ case kPortIndexInput: return "Input";
+ case kPortIndexOutput: return "Output";
+ case ~0U: return "All";
+ default: return "port";
+ }
+}
+
OMXNodeInstance::OMXNodeInstance(
- OMX *owner, const sp<IOMXObserver> &observer)
+ OMX *owner, const sp<IOMXObserver> &observer, const char *name)
: mOwner(owner),
- mNodeID(NULL),
+ mNodeID(0),
mHandle(NULL),
mObserver(observer),
- mDying(false) {
+ mDying(false)
+#ifdef __LP64__
+ , mBufferIDCount(0)
+#endif
+{
+ mName = ADebug::GetDebugName(name);
+ DEBUG = ADebug::GetDebugLevelFromProperty(name, "debug.stagefright.omx-debug");
+ ALOGV("debug level for %s is %d", name, DEBUG);
+ DEBUG_BUMP = DEBUG;
+ mNumPortBuffers[0] = 0;
+ mNumPortBuffers[1] = 0;
+ mDebugLevelBumpPendingBuffers[0] = 0;
+ mDebugLevelBumpPendingBuffers[1] = 0;
}
OMXNodeInstance::~OMXNodeInstance() {
+ free(mName);
CHECK(mHandle == NULL);
}
void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) {
- CHECK(mHandle == NULL);
mNodeID = node_id;
+ CLOG_LIFE(allocateNode, "handle=%p", handle);
+ CHECK(mHandle == NULL);
mHandle = handle;
}
@@ -116,6 +204,7 @@ sp<GraphicBufferSource> OMXNodeInstance::getGraphicBufferSource() {
void OMXNodeInstance::setGraphicBufferSource(
const sp<GraphicBufferSource>& bufferSource) {
Mutex::Autolock autoLock(mGraphicBufferSourceLock);
+ CLOG_INTERNAL(setGraphicBufferSource, "%p", bufferSource.get());
mGraphicBufferSource = bufferSource;
}
@@ -136,6 +225,7 @@ static status_t StatusFromOMXError(OMX_ERRORTYPE err) {
case OMX_ErrorNone:
return OK;
case OMX_ErrorUnsupportedSetting:
+ case OMX_ErrorUnsupportedIndex:
return ERROR_UNSUPPORTED;
default:
return UNKNOWN_ERROR;
@@ -143,8 +233,14 @@ static status_t StatusFromOMXError(OMX_ERRORTYPE err) {
}
status_t OMXNodeInstance::freeNode(OMXMaster *master) {
+ CLOG_LIFE(freeNode, "handle=%p", mHandle);
static int32_t kMaxNumIterations = 10;
+ // exit if we have already freed the node
+ if (mHandle == NULL) {
+ return OK;
+ }
+
// Transition the node from its current state all the way down
// to "Loaded".
// This ensures that all active buffers are properly freed even
@@ -166,10 +262,11 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
OMX_ERRORTYPE err;
int32_t iteration = 0;
while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
- && state != OMX_StateIdle
- && state != OMX_StateInvalid) {
+ && state != OMX_StateIdle
+ && state != OMX_StateInvalid) {
if (++iteration > kMaxNumIterations) {
- ALOGE("component failed to enter Idle state, aborting.");
+ CLOGW("failed to enter Idle state (now %s(%d), aborting.",
+ asString(state), state);
state = OMX_StateInvalid;
break;
}
@@ -195,10 +292,11 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
OMX_ERRORTYPE err;
int32_t iteration = 0;
while ((err = OMX_GetState(mHandle, &state)) == OMX_ErrorNone
- && state != OMX_StateLoaded
- && state != OMX_StateInvalid) {
+ && state != OMX_StateLoaded
+ && state != OMX_StateInvalid) {
if (++iteration > kMaxNumIterations) {
- ALOGE("component failed to enter Loaded state, aborting.");
+ CLOGW("failed to enter Loaded state (now %s(%d), aborting.",
+ asString(state), state);
state = OMX_StateInvalid;
break;
}
@@ -216,23 +314,21 @@ status_t OMXNodeInstance::freeNode(OMXMaster *master) {
break;
default:
- CHECK(!"should not be here, unknown state.");
+ LOG_ALWAYS_FATAL("unknown state %s(%#x).", asString(state), state);
break;
}
- ALOGV("calling destroyComponentInstance");
+ ALOGV("[%x:%s] calling destroyComponentInstance", mNodeID, mName);
OMX_ERRORTYPE err = master->destroyComponentInstance(
static_cast<OMX_COMPONENTTYPE *>(mHandle));
- ALOGV("destroyComponentInstance returned err %d", err);
mHandle = NULL;
-
- if (err != OMX_ErrorNone) {
- ALOGE("FreeHandle FAILED with error 0x%08x.", err);
- }
+ CLOG_IF_ERROR(freeNode, err, "");
+ free(mName);
+ mName = NULL;
mOwner->invalidateNodeID(mNodeID);
- mNodeID = NULL;
+ mNodeID = 0;
ALOGV("OMXNodeInstance going away.");
delete this;
@@ -261,44 +357,67 @@ status_t OMXNodeInstance::sendCommand(
Mutex::Autolock autoLock(mLock);
+ // bump internal-state debug level for 2 input and output frames past a command
+ {
+ Mutex::Autolock _l(mDebugLock);
+ bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+ }
+
+ const char *paramString =
+ cmd == OMX_CommandStateSet ? asString((OMX_STATETYPE)param) : portString(param);
+ CLOG_STATE(sendCommand, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
OMX_ERRORTYPE err = OMX_SendCommand(mHandle, cmd, param, NULL);
+ CLOG_IF_ERROR(sendCommand, err, "%s(%d), %s(%d)", asString(cmd), cmd, paramString, param);
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::getParameter(
- OMX_INDEXTYPE index, void *params, size_t size) {
+ OMX_INDEXTYPE index, void *params, size_t /* size */) {
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params);
-
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ // some errors are expected for getParameter
+ if (err != OMX_ErrorNoMore) {
+ CLOG_IF_ERROR(getParameter, err, "%s(%#x)", asString(extIndex), index);
+ }
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::setParameter(
OMX_INDEXTYPE index, const void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ CLOG_CONFIG(setParameter, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
OMX_ERRORTYPE err = OMX_SetParameter(
mHandle, index, const_cast<void *>(params));
-
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x)", asString(extIndex), index);
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::getConfig(
- OMX_INDEXTYPE index, void *params, size_t size) {
+ OMX_INDEXTYPE index, void *params, size_t /* size */) {
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetConfig(mHandle, index, params);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ // some errors are expected for getConfig
+ if (err != OMX_ErrorNoMore) {
+ CLOG_IF_ERROR(getConfig, err, "%s(%#x)", asString(extIndex), index);
+ }
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::setConfig(
OMX_INDEXTYPE index, const void *params, size_t size) {
Mutex::Autolock autoLock(mLock);
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ CLOG_CONFIG(setConfig, "%s(%#x), %zu@%p)", asString(extIndex), index, size, params);
OMX_ERRORTYPE err = OMX_SetConfig(
mHandle, index, const_cast<void *>(params));
-
+ CLOG_IF_ERROR(setConfig, err, "%s(%#x)", asString(extIndex), index);
return StatusFromOMXError(err);
}
@@ -306,13 +425,14 @@ status_t OMXNodeInstance::getState(OMX_STATETYPE* state) {
Mutex::Autolock autoLock(mLock);
OMX_ERRORTYPE err = OMX_GetState(mHandle, state);
-
+ CLOG_IF_ERROR(getState, err, "");
return StatusFromOMXError(err);
}
status_t OMXNodeInstance::enableGraphicBuffers(
OMX_U32 portIndex, OMX_BOOL enable) {
Mutex::Autolock autoLock(mLock);
+ CLOG_CONFIG(enableGraphicBuffers, "%s:%u, %d", portString(portIndex), portIndex, enable);
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.enableAndroidNativeBuffers");
@@ -320,32 +440,19 @@ status_t OMXNodeInstance::enableGraphicBuffers(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- if (enable) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
- }
-
+ CLOG_ERROR_IF(enable, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
- OMX_VERSIONTYPE ver;
- ver.s.nVersionMajor = 1;
- ver.s.nVersionMinor = 0;
- ver.s.nRevision = 0;
- ver.s.nStep = 0;
- EnableAndroidNativeBuffersParams params = {
- sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
- };
+ EnableAndroidNativeBuffersParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+ params.enable = enable;
err = OMX_SetParameter(mHandle, index, &params);
-
- if (err != OMX_ErrorNone) {
- ALOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
- err, err);
-
- return UNKNOWN_ERROR;
- }
-
- return OK;
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d", name, index,
+ portString(portIndex), portIndex, enable);
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::getGraphicBufferUsage(
@@ -358,26 +465,19 @@ status_t OMXNodeInstance::getGraphicBufferUsage(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
-
+ CLOG_ERROR(getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
- OMX_VERSIONTYPE ver;
- ver.s.nVersionMajor = 1;
- ver.s.nVersionMinor = 0;
- ver.s.nRevision = 0;
- ver.s.nStep = 0;
- GetAndroidNativeBufferUsageParams params = {
- sizeof(GetAndroidNativeBufferUsageParams), ver, portIndex, 0,
- };
+ GetAndroidNativeBufferUsageParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
err = OMX_GetParameter(mHandle, index, &params);
-
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetAndroidNativeBufferUsage failed with error %d (0x%08x)",
- err, err);
- return UNKNOWN_ERROR;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u", name, index,
+ portString(portIndex), portIndex);
+ return StatusFromOMXError(err);
}
*usage = params.nUsage;
@@ -389,43 +489,69 @@ status_t OMXNodeInstance::storeMetaDataInBuffers(
OMX_U32 portIndex,
OMX_BOOL enable) {
Mutex::Autolock autolock(mLock);
- return storeMetaDataInBuffers_l(portIndex, enable);
+ CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u en:%d", portString(portIndex), portIndex, enable);
+ return storeMetaDataInBuffers_l(
+ portIndex, enable,
+ OMX_FALSE /* useGraphicBuffer */, NULL /* usingGraphicBufferInMetadata */);
}
status_t OMXNodeInstance::storeMetaDataInBuffers_l(
OMX_U32 portIndex,
- OMX_BOOL enable) {
+ OMX_BOOL enable,
+ OMX_BOOL useGraphicBuffer,
+ OMX_BOOL *usingGraphicBufferInMetadata) {
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.storeMetaDataInBuffers");
- OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
- if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
+ OMX_STRING graphicBufferName = const_cast<OMX_STRING>(
+ "OMX.google.android.index.storeGraphicBufferInMetaData");
+ if (usingGraphicBufferInMetadata == NULL) {
+ usingGraphicBufferInMetadata = &useGraphicBuffer;
+ }
- return StatusFromOMXError(err);
+ OMX_ERRORTYPE err =
+ (useGraphicBuffer && portIndex == kPortIndexInput)
+ ? OMX_GetExtensionIndex(mHandle, graphicBufferName, &index)
+ : OMX_ErrorBadParameter;
+ if (err == OMX_ErrorNone) {
+ *usingGraphicBufferInMetadata = OMX_TRUE;
+ name = graphicBufferName;
+ } else {
+ err = OMX_GetExtensionIndex(mHandle, name, &index);
}
- StoreMetaDataInBuffersParams params;
- memset(&params, 0, sizeof(params));
- params.nSize = sizeof(params);
+ OMX_ERRORTYPE xerr = err;
+ if (err == OMX_ErrorNone) {
+ StoreMetaDataInBuffersParams params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
- // Version: 1.0.0.0
- params.nVersion.s.nVersionMajor = 1;
+ err = OMX_SetParameter(mHandle, index, &params);
+ }
- params.nPortIndex = portIndex;
- params.bStoreMetaData = enable;
- if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
- ALOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
- return UNKNOWN_ERROR;
+ // don't log loud error if component does not support metadata mode on the output
+ if (err != OMX_ErrorNone) {
+ *usingGraphicBufferInMetadata = OMX_FALSE;
+ if (err == OMX_ErrorUnsupportedIndex && portIndex == kPortIndexOutput) {
+ CLOGW("component does not support metadata mode; using fallback");
+ } else if (xerr != OMX_ErrorNone) {
+ CLOG_ERROR(getExtensionIndex, xerr, "%s", name);
+ } else {
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d GB=%d", name, index,
+ portString(portIndex), portIndex, enable, useGraphicBuffer);
+ }
}
- return err;
+ return StatusFromOMXError(err);
}
status_t OMXNodeInstance::prepareForAdaptivePlayback(
OMX_U32 portIndex, OMX_BOOL enable, OMX_U32 maxFrameWidth,
OMX_U32 maxFrameHeight) {
Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(prepareForAdaptivePlayback, "%s:%u en=%d max=%ux%u",
+ portString(portIndex), portIndex, enable, maxFrameWidth, maxFrameHeight);
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
@@ -433,27 +559,63 @@ status_t OMXNodeInstance::prepareForAdaptivePlayback(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGW_IF(enable, "OMX_GetExtensionIndex %s failed", name);
+ CLOG_ERROR_IF(enable, getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
PrepareForAdaptivePlaybackParams params;
- params.nSize = sizeof(params);
- params.nVersion.s.nVersionMajor = 1;
- params.nVersion.s.nVersionMinor = 0;
- params.nVersion.s.nRevision = 0;
- params.nVersion.s.nStep = 0;
-
+ InitOMXParams(&params);
params.nPortIndex = portIndex;
params.bEnable = enable;
params.nMaxFrameWidth = maxFrameWidth;
params.nMaxFrameHeight = maxFrameHeight;
- if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
- ALOGW("OMX_SetParameter failed for PrepareForAdaptivePlayback "
- "with error %d (0x%08x)", err, err);
- return UNKNOWN_ERROR;
+
+ err = OMX_SetParameter(mHandle, index, &params);
+ CLOG_IF_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d max=%ux%u", name, index,
+ portString(portIndex), portIndex, enable, maxFrameWidth, maxFrameHeight);
+ return StatusFromOMXError(err);
+}
+
+status_t OMXNodeInstance::configureVideoTunnelMode(
+ OMX_U32 portIndex, OMX_BOOL tunneled, OMX_U32 audioHwSync,
+ native_handle_t **sidebandHandle) {
+ Mutex::Autolock autolock(mLock);
+ CLOG_CONFIG(configureVideoTunnelMode, "%s:%u tun=%d sync=%u",
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.configureVideoTunnelMode");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR_IF(tunneled, getExtensionIndex, err, "%s", name);
+ return StatusFromOMXError(err);
+ }
+
+ ConfigureVideoTunnelModeParams tunnelParams;
+ InitOMXParams(&tunnelParams);
+ tunnelParams.nPortIndex = portIndex;
+ tunnelParams.bTunneled = tunneled;
+ tunnelParams.nAudioHwSync = audioHwSync;
+ err = OMX_SetParameter(mHandle, index, &tunnelParams);
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u tun=%d sync=%u", name, index,
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+ return StatusFromOMXError(err);
}
- return err;
+
+ err = OMX_GetParameter(mHandle, index, &tunnelParams);
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u tun=%d sync=%u", name, index,
+ portString(portIndex), portIndex, tunneled, audioHwSync);
+ return StatusFromOMXError(err);
+ }
+ if (sidebandHandle) {
+ *sidebandHandle = (native_handle_t*)tunnelParams.pSidebandWindow;
+ }
+
+ return OK;
}
status_t OMXNodeInstance::useBuffer(
@@ -470,19 +632,19 @@ status_t OMXNodeInstance::useBuffer(
params->size(), static_cast<OMX_U8 *>(params->pointer()));
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+ CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
- *buffer = header;
+ *buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
@@ -491,6 +653,8 @@ status_t OMXNodeInstance::useBuffer(
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT(
+ *buffer, portIndex, "%zu@%p", params->size(), params->pointer()));
return OK;
}
@@ -500,17 +664,14 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
// port definition
OMX_PARAM_PORTDEFINITIONTYPE def;
- def.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
- def.nVersion.s.nVersionMajor = 1;
- def.nVersion.s.nVersionMinor = 0;
- def.nVersion.s.nRevision = 0;
- def.nVersion.s.nStep = 0;
+ InitOMXParams(&def);
def.nPortIndex = portIndex;
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, OMX_IndexParamPortDefinition, &def);
- if (err != OMX_ErrorNone)
- {
- ALOGE("%s::%d:Error getting OMX_IndexParamPortDefinition", __FUNCTION__, __LINE__);
- return err;
+ if (err != OMX_ErrorNone) {
+ OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
+ CLOG_ERROR(getParameter, err, "%s(%#x): %s:%u",
+ asString(index), index, portString(portIndex), portIndex);
+ return UNKNOWN_ERROR;
}
BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
@@ -528,20 +689,21 @@ status_t OMXNodeInstance::useGraphicBuffer2_l(
bufferHandle);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", err, err);
+ CLOG_ERROR(useBuffer, err, BUFFER_FMT(portIndex, "%u@%p", def.nBufferSize, bufferHandle));
delete bufferMeta;
bufferMeta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pBuffer, bufferHandle);
CHECK_EQ(header->pAppPrivate, bufferMeta);
- *buffer = header;
+ *buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
-
+ CLOG_BUFFER(useGraphicBuffer2, NEW_BUFFER_FMT(
+ *buffer, portIndex, "%u@%p", def.nBufferSize, bufferHandle));
return OK;
}
@@ -565,10 +727,8 @@ status_t OMXNodeInstance::useGraphicBuffer(
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.useAndroidNativeBuffer");
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
-
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex %s failed", name);
-
+ CLOG_ERROR(getExtensionIndex, err, "%s", name);
return StatusFromOMXError(err);
}
@@ -589,23 +749,24 @@ status_t OMXNodeInstance::useGraphicBuffer(
err = OMX_SetParameter(mHandle, index, &params);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
- err);
+ CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u meta=%p GB=%p", name, index,
+ portString(portIndex), portIndex, bufferMeta, graphicBuffer->handle);
delete bufferMeta;
bufferMeta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, bufferMeta);
- *buffer = header;
+ *buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
-
+ CLOG_BUFFER(useGraphicBuffer, NEW_BUFFER_FMT(
+ *buffer, portIndex, "GB=%p", graphicBuffer->handle));
return OK;
}
@@ -614,14 +775,15 @@ status_t OMXNodeInstance::updateGraphicBufferInMeta(
OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
- OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)(buffer);
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
VideoDecoderOutputMetaData *metadata =
(VideoDecoderOutputMetaData *)(header->pBuffer);
BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
bufferMeta->setGraphicBuffer(graphicBuffer);
metadata->eType = kMetadataBufferTypeGrallocSource;
metadata->pHandle = graphicBuffer->handle;
-
+ CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x := %p",
+ portString(portIndex), portIndex, buffer, graphicBuffer->handle);
return OK;
}
@@ -636,7 +798,10 @@ status_t OMXNodeInstance::createInputSurface(
}
// Input buffers will hold meta-data (gralloc references).
- err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE);
+ OMX_BOOL usingGraphicBuffer = OMX_FALSE;
+ err = storeMetaDataInBuffers_l(
+ portIndex, OMX_TRUE,
+ OMX_TRUE /* useGraphicBuffer */, &usingGraphicBuffer);
if (err != OK) {
return err;
}
@@ -644,25 +809,27 @@ status_t OMXNodeInstance::createInputSurface(
// Retrieve the width and height of the graphic buffer, set when the
// codec was configured.
OMX_PARAM_PORTDEFINITIONTYPE def;
- def.nSize = sizeof(def);
- def.nVersion.s.nVersionMajor = 1;
- def.nVersion.s.nVersionMinor = 0;
- def.nVersion.s.nRevision = 0;
- def.nVersion.s.nStep = 0;
+ InitOMXParams(&def);
def.nPortIndex = portIndex;
OMX_ERRORTYPE oerr = OMX_GetParameter(
mHandle, OMX_IndexParamPortDefinition, &def);
- CHECK(oerr == OMX_ErrorNone);
+ if (oerr != OMX_ErrorNone) {
+ OMX_INDEXTYPE index = OMX_IndexParamPortDefinition;
+ CLOG_ERROR(getParameter, oerr, "%s(%#x): %s:%u",
+ asString(index), index, portString(portIndex), portIndex);
+ return UNKNOWN_ERROR;
+ }
if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) {
- ALOGE("createInputSurface requires COLOR_FormatSurface "
- "(AndroidOpaque) color format");
+ CLOGW("createInputSurface requires COLOR_FormatSurface "
+ "(AndroidOpaque) color format instead of %s(%#x)",
+ asString(def.format.video.eColorFormat), def.format.video.eColorFormat);
return INVALID_OPERATION;
}
GraphicBufferSource* bufferSource = new GraphicBufferSource(
this, def.format.video.nFrameWidth, def.format.video.nFrameHeight,
- def.nBufferCountActual);
+ def.nBufferCountActual, usingGraphicBuffer);
if ((err = bufferSource->initCheck()) != OK) {
delete bufferSource;
return err;
@@ -679,9 +846,9 @@ status_t OMXNodeInstance::signalEndOfInputStream() {
// flag set). Seems easier than doing the equivalent from here.
sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
if (bufferSource == NULL) {
- ALOGW("signalEndOfInputStream can only be used with Surface input");
+ CLOGW("signalEndOfInputStream can only be used with Surface input");
return INVALID_OPERATION;
- };
+ }
return bufferSource->signalEndOfInputStream();
}
@@ -698,19 +865,18 @@ status_t OMXNodeInstance::allocateBuffer(
mHandle, &header, portIndex, buffer_meta, size);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err);
-
+ CLOG_ERROR(allocateBuffer, err, BUFFER_FMT(portIndex, "%zu@", size));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
- *buffer = header;
+ *buffer = makeBufferID(header);
*buffer_data = header->pBuffer;
addActiveBuffer(portIndex, *buffer);
@@ -719,6 +885,7 @@ status_t OMXNodeInstance::allocateBuffer(
if (bufferSource != NULL && portIndex == kPortIndexInput) {
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(allocateBuffer, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p", size, *buffer_data));
return OK;
}
@@ -736,19 +903,19 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
mHandle, &header, portIndex, buffer_meta, params->size());
if (err != OMX_ErrorNone) {
- ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", err, err);
-
+ CLOG_ERROR(allocateBufferWithBackup, err,
+ SIMPLE_BUFFER(portIndex, params->size(), params->pointer()));
delete buffer_meta;
buffer_meta = NULL;
*buffer = 0;
- return UNKNOWN_ERROR;
+ return StatusFromOMXError(err);
}
CHECK_EQ(header->pAppPrivate, buffer_meta);
- *buffer = header;
+ *buffer = makeBufferID(header);
addActiveBuffer(portIndex, *buffer);
@@ -757,22 +924,28 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
bufferSource->addCodecBuffer(header);
}
+ CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %p",
+ params->size(), params->pointer(), header->pBuffer));
+
return OK;
}
status_t OMXNodeInstance::freeBuffer(
OMX_U32 portIndex, OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
+ CLOG_BUFFER(freeBuffer, "%s:%u %#x", portString(portIndex), portIndex, buffer);
removeActiveBuffer(portIndex, buffer);
- OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
BufferMeta *buffer_meta = static_cast<BufferMeta *>(header->pAppPrivate);
OMX_ERRORTYPE err = OMX_FreeBuffer(mHandle, portIndex, header);
+ CLOG_IF_ERROR(freeBuffer, err, "%s:%u %#x", portString(portIndex), portIndex, buffer);
delete buffer_meta;
buffer_meta = NULL;
+ invalidateBufferID(buffer);
return StatusFromOMXError(err);
}
@@ -780,13 +953,23 @@ status_t OMXNodeInstance::freeBuffer(
status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) {
Mutex::Autolock autoLock(mLock);
- OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
header->nFilledLen = 0;
header->nOffset = 0;
header->nFlags = 0;
- OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.add(header);
+ CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header)));
+ }
+ OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);
+ if (err != OMX_ErrorNone) {
+ CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header));
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.remove(header);
+ }
return StatusFromOMXError(err);
}
@@ -796,17 +979,69 @@ status_t OMXNodeInstance::emptyBuffer(
OMX_U32 flags, OMX_TICKS timestamp) {
Mutex::Autolock autoLock(mLock);
- OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
+ OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
header->nFilledLen = rangeLength;
header->nOffset = rangeOffset;
- header->nFlags = flags;
- header->nTimeStamp = timestamp;
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(header->pAppPrivate);
buffer_meta->CopyToOMX(header);
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer);
+}
+
+// log queued buffer activity for the next few input and/or output frames
+// if logging at internal state level
+void OMXNodeInstance::bumpDebugLevel_l(size_t numInputBuffers, size_t numOutputBuffers) {
+ if (DEBUG == ADebug::kDebugInternalState) {
+ DEBUG_BUMP = ADebug::kDebugAll;
+ if (numInputBuffers > 0) {
+ mDebugLevelBumpPendingBuffers[kPortIndexInput] = numInputBuffers;
+ }
+ if (numOutputBuffers > 0) {
+ mDebugLevelBumpPendingBuffers[kPortIndexOutput] = numOutputBuffers;
+ }
+ }
+}
+
+void OMXNodeInstance::unbumpDebugLevel_l(size_t portIndex) {
+ if (mDebugLevelBumpPendingBuffers[portIndex]) {
+ --mDebugLevelBumpPendingBuffers[portIndex];
+ }
+ if (!mDebugLevelBumpPendingBuffers[0]
+ && !mDebugLevelBumpPendingBuffers[1]) {
+ DEBUG_BUMP = DEBUG;
+ }
+}
+
+status_t OMXNodeInstance::emptyBuffer_l(
+ OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr) {
+ header->nFlags = flags;
+ header->nTimeStamp = timestamp;
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mInputBuffersWithCodec.add(header);
+
+ // bump internal-state debug level for 2 input frames past a buffer with CSD
+ if ((flags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+ bumpDebugLevel_l(2 /* numInputBuffers */, 0 /* numOutputBuffers */);
+ }
+
+ CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header)));
+ }
+
OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
+ CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header));
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ if (err != OMX_ErrorNone) {
+ mInputBuffersWithCodec.remove(header);
+ } else if (!(flags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ unbumpDebugLevel_l(kPortIndexInput);
+ }
+ }
return StatusFromOMXError(err);
}
@@ -820,15 +1055,8 @@ status_t OMXNodeInstance::emptyDirectBuffer(
header->nFilledLen = rangeLength;
header->nOffset = rangeOffset;
- header->nFlags = flags;
- header->nTimeStamp = timestamp;
-
- OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
- if (err != OMX_ErrorNone) {
- ALOGW("emptyDirectBuffer failed, OMX err=0x%x", err);
- }
- return StatusFromOMXError(err);
+ return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer);
}
status_t OMXNodeInstance::getExtensionIndex(
@@ -841,19 +1069,37 @@ status_t OMXNodeInstance::getExtensionIndex(
return StatusFromOMXError(err);
}
+inline static const char *asString(IOMX::InternalOptionType i, const char *def = "??") {
+ switch (i) {
+ case IOMX::INTERNAL_OPTION_SUSPEND: return "SUSPEND";
+ case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
+ return "REPEAT_PREVIOUS_FRAME_DELAY";
+ case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP";
+ case IOMX::INTERNAL_OPTION_START_TIME: return "START_TIME";
+ case IOMX::INTERNAL_OPTION_TIME_LAPSE: return "TIME_LAPSE";
+ default: return def;
+ }
+}
+
status_t OMXNodeInstance::setInternalOption(
OMX_U32 portIndex,
IOMX::InternalOptionType type,
const void *data,
size_t size) {
+ CLOG_CONFIG(setInternalOption, "%s(%d): %s:%u %zu@%p",
+ asString(type), type, portString(portIndex), portIndex, size, data);
switch (type) {
case IOMX::INTERNAL_OPTION_SUSPEND:
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
+ case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP:
+ case IOMX::INTERNAL_OPTION_START_TIME:
+ case IOMX::INTERNAL_OPTION_TIME_LAPSE:
{
const sp<GraphicBufferSource> &bufferSource =
getGraphicBufferSource();
if (bufferSource == NULL || portIndex != kPortIndexInput) {
+ CLOGW("setInternalOption is only for Surface input");
return ERROR_UNSUPPORTED;
}
@@ -863,15 +1109,45 @@ status_t OMXNodeInstance::setInternalOption(
}
bool suspend = *(bool *)data;
+ CLOG_CONFIG(setInternalOption, "suspend=%d", suspend);
bufferSource->suspend(suspend);
- } else {
+ } else if (type ==
+ IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY){
if (size != sizeof(int64_t)) {
return INVALID_OPERATION;
}
int64_t delayUs = *(int64_t *)data;
-
+ CLOG_CONFIG(setInternalOption, "delayUs=%lld", (long long)delayUs);
return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
+ } else if (type ==
+ IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
+ if (size != sizeof(int64_t)) {
+ return INVALID_OPERATION;
+ }
+
+ int64_t maxGapUs = *(int64_t *)data;
+ CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);
+ return bufferSource->setMaxTimestampGapUs(maxGapUs);
+ } else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
+ if (size != sizeof(int64_t)) {
+ return INVALID_OPERATION;
+ }
+
+ int64_t skipFramesBeforeUs = *(int64_t *)data;
+ CLOG_CONFIG(setInternalOption, "beforeUs=%lld", (long long)skipFramesBeforeUs);
+ bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
+ } else { // IOMX::INTERNAL_OPTION_TIME_LAPSE
+ if (size != sizeof(int64_t) * 2) {
+ return INVALID_OPERATION;
+ }
+
+ int64_t timePerFrameUs = ((int64_t *)data)[0];
+ int64_t timePerCaptureUs = ((int64_t *)data)[1];
+ CLOG_CONFIG(setInternalOption, "perFrameUs=%lld perCaptureUs=%lld",
+ (long long)timePerFrameUs, (long long)timePerCaptureUs);
+
+ bufferSource->setTimeLapseUs((int64_t *)data);
}
return OK;
@@ -883,17 +1159,47 @@ status_t OMXNodeInstance::setInternalOption(
}
void OMXNodeInstance::onMessage(const omx_message &msg) {
+ const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
+
if (msg.type == omx_message::FILL_BUFFER_DONE) {
OMX_BUFFERHEADERTYPE *buffer =
- static_cast<OMX_BUFFERHEADERTYPE *>(
- msg.u.extended_buffer_data.buffer);
+ findBufferHeader(msg.u.extended_buffer_data.buffer);
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mOutputBuffersWithCodec.remove(buffer);
+
+ CLOG_BUMPED_BUFFER(
+ FBD, WITH_STATS(FULL_BUFFER(msg.u.extended_buffer_data.buffer, buffer)));
+
+ unbumpDebugLevel_l(kPortIndexOutput);
+ }
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(buffer->pAppPrivate);
buffer_meta->CopyFromOMX(buffer);
+
+ if (bufferSource != NULL) {
+ // fix up the buffer info (especially timestamp) if needed
+ bufferSource->codecBufferFilled(buffer);
+
+ omx_message newMsg = msg;
+ newMsg.u.extended_buffer_data.timestamp = buffer->nTimeStamp;
+ mObserver->onMessage(newMsg);
+ return;
+ }
} else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
- const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
+ OMX_BUFFERHEADERTYPE *buffer =
+ findBufferHeader(msg.u.buffer_data.buffer);
+
+ {
+ Mutex::Autolock _l(mDebugLock);
+ mInputBuffersWithCodec.remove(buffer);
+
+ CLOG_BUMPED_BUFFER(
+ EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer)));
+ }
if (bufferSource != NULL) {
// This is one of the buffers used exclusively by
@@ -901,11 +1207,6 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {
// Don't dispatch a message back to ACodec, since it doesn't
// know that anyone asked to have the buffer emptied and will
// be very confused.
-
- OMX_BUFFERHEADERTYPE *buffer =
- static_cast<OMX_BUFFERHEADERTYPE *>(
- msg.u.buffer_data.buffer);
-
bufferSource->codecBufferEmptied(buffer);
return;
}
@@ -929,6 +1230,43 @@ void OMXNodeInstance::onGetHandleFailed() {
// Don't try to acquire mLock here -- in rare circumstances this will hang.
void OMXNodeInstance::onEvent(
OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) {
+ const char *arg1String = "??";
+ const char *arg2String = "??";
+ ADebug::Level level = ADebug::kDebugInternalState;
+
+ switch (event) {
+ case OMX_EventCmdComplete:
+ arg1String = asString((OMX_COMMANDTYPE)arg1);
+ switch (arg1) {
+ case OMX_CommandStateSet:
+ arg2String = asString((OMX_STATETYPE)arg2);
+ level = ADebug::kDebugState;
+ break;
+ case OMX_CommandFlush:
+ case OMX_CommandPortEnable:
+ {
+ // bump internal-state debug level for 2 input and output frames
+ Mutex::Autolock _l(mDebugLock);
+ bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+ }
+ // fall through
+ default:
+ arg2String = portString(arg2);
+ }
+ break;
+ case OMX_EventError:
+ arg1String = asString((OMX_ERRORTYPE)arg1);
+ level = ADebug::kDebugLifeCycle;
+ break;
+ case OMX_EventPortSettingsChanged:
+ arg2String = asString((OMX_INDEXEXTTYPE)arg2);
+ // fall through
+ default:
+ arg1String = portString(arg1);
+ }
+
+ CLOGI_(level, onEvent, "%s(%x), %s(%x), %s(%x)",
+ asString(event), event, arg1String, arg1, arg2String, arg2);
const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
if (bufferSource != NULL
@@ -941,7 +1279,7 @@ void OMXNodeInstance::onEvent(
// static
OMX_ERRORTYPE OMXNodeInstance::OnEvent(
- OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_HANDLETYPE /* hComponent */,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
@@ -957,26 +1295,28 @@ OMX_ERRORTYPE OMXNodeInstance::OnEvent(
// static
OMX_ERRORTYPE OMXNodeInstance::OnEmptyBufferDone(
- OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_HANDLETYPE /* hComponent */,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) {
OMXNodeInstance *instance = static_cast<OMXNodeInstance *>(pAppData);
if (instance->mDying) {
return OMX_ErrorNone;
}
- return instance->owner()->OnEmptyBufferDone(instance->nodeID(), pBuffer);
+ return instance->owner()->OnEmptyBufferDone(instance->nodeID(),
+ instance->findBufferID(pBuffer), pBuffer);
}
// static
OMX_ERRORTYPE OMXNodeInstance::OnFillBufferDone(
- OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_HANDLETYPE /* hComponent */,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) {
OMXNodeInstance *instance = static_cast<OMXNodeInstance *>(pAppData);
if (instance->mDying) {
return OMX_ErrorNone;
}
- return instance->owner()->OnFillBufferDone(instance->nodeID(), pBuffer);
+ return instance->owner()->OnFillBufferDone(instance->nodeID(),
+ instance->findBufferID(pBuffer), pBuffer);
}
void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) {
@@ -984,23 +1324,27 @@ void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) {
active.mPortIndex = portIndex;
active.mID = id;
mActiveBuffers.push(active);
+
+ if (portIndex < NELEM(mNumPortBuffers)) {
+ ++mNumPortBuffers[portIndex];
+ }
}
void OMXNodeInstance::removeActiveBuffer(
OMX_U32 portIndex, OMX::buffer_id id) {
- bool found = false;
for (size_t i = 0; i < mActiveBuffers.size(); ++i) {
if (mActiveBuffers[i].mPortIndex == portIndex
- && mActiveBuffers[i].mID == id) {
- found = true;
+ && mActiveBuffers[i].mID == id) {
mActiveBuffers.removeItemsAt(i);
- break;
+
+ if (portIndex < NELEM(mNumPortBuffers)) {
+ --mNumPortBuffers[portIndex];
+ }
+ return;
}
}
- if (!found) {
- ALOGW("Attempt to remove an active buffer we know nothing about...");
- }
+ CLOGW("Attempt to remove an active buffer [%#x] we know nothing about...", id);
}
void OMXNodeInstance::freeActiveBuffers() {
@@ -1011,4 +1355,67 @@ void OMXNodeInstance::freeActiveBuffers() {
}
}
+#ifdef __LP64__
+
+OMX::buffer_id OMXNodeInstance::makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
+ if (bufferHeader == NULL) {
+ return 0;
+ }
+ Mutex::Autolock autoLock(mBufferIDLock);
+ OMX::buffer_id buffer;
+ do { // handle the very unlikely case of ID overflow
+ if (++mBufferIDCount == 0) {
+ ++mBufferIDCount;
+ }
+ buffer = (OMX::buffer_id)mBufferIDCount;
+ } while (mBufferIDToBufferHeader.indexOfKey(buffer) >= 0);
+ mBufferIDToBufferHeader.add(buffer, bufferHeader);
+ mBufferHeaderToBufferID.add(bufferHeader, buffer);
+ return buffer;
+}
+
+OMX_BUFFERHEADERTYPE *OMXNodeInstance::findBufferHeader(OMX::buffer_id buffer) {
+ if (buffer == 0) {
+ return NULL;
+ }
+ Mutex::Autolock autoLock(mBufferIDLock);
+ return mBufferIDToBufferHeader.valueFor(buffer);
+}
+
+OMX::buffer_id OMXNodeInstance::findBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
+ if (bufferHeader == NULL) {
+ return 0;
+ }
+ Mutex::Autolock autoLock(mBufferIDLock);
+ return mBufferHeaderToBufferID.valueFor(bufferHeader);
+}
+
+void OMXNodeInstance::invalidateBufferID(OMX::buffer_id buffer) {
+ if (buffer == 0) {
+ return;
+ }
+ Mutex::Autolock autoLock(mBufferIDLock);
+ mBufferHeaderToBufferID.removeItem(mBufferIDToBufferHeader.valueFor(buffer));
+ mBufferIDToBufferHeader.removeItem(buffer);
+}
+
+#else
+
+OMX::buffer_id OMXNodeInstance::makeBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
+ return (OMX::buffer_id)bufferHeader;
+}
+
+OMX_BUFFERHEADERTYPE *OMXNodeInstance::findBufferHeader(OMX::buffer_id buffer) {
+ return (OMX_BUFFERHEADERTYPE *)buffer;
+}
+
+OMX::buffer_id OMXNodeInstance::findBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) {
+ return (OMX::buffer_id)bufferHeader;
+}
+
+void OMXNodeInstance::invalidateBufferID(OMX::buffer_id buffer __unused) {
+}
+
+#endif
+
} // namespace android
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index 4999663..7f99dcd 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -152,28 +152,28 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::internalSetParameter(
OMX_PARAM_PORTDEFINITIONTYPE *defParams =
(OMX_PARAM_PORTDEFINITIONTYPE *)params;
- if (defParams->nPortIndex >= mPorts.size()
- || defParams->nSize
- != sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
- return OMX_ErrorUndefined;
+ if (defParams->nPortIndex >= mPorts.size()) {
+ return OMX_ErrorBadPortIndex;
+ }
+ if (defParams->nSize != sizeof(OMX_PARAM_PORTDEFINITIONTYPE)) {
+ return OMX_ErrorUnsupportedSetting;
}
PortInfo *port =
&mPorts.editItemAt(defParams->nPortIndex);
- if (defParams->nBufferSize != port->mDef.nBufferSize) {
- CHECK_GE(defParams->nBufferSize, port->mDef.nBufferSize);
+ // default behavior is that we only allow buffer size to increase
+ if (defParams->nBufferSize > port->mDef.nBufferSize) {
port->mDef.nBufferSize = defParams->nBufferSize;
}
- if (defParams->nBufferCountActual
- != port->mDef.nBufferCountActual) {
- CHECK_GE(defParams->nBufferCountActual,
- port->mDef.nBufferCountMin);
-
- port->mDef.nBufferCountActual = defParams->nBufferCountActual;
+ if (defParams->nBufferCountActual < port->mDef.nBufferCountMin) {
+ ALOGW("component requires at least %u buffers (%u requested)",
+ port->mDef.nBufferCountMin, defParams->nBufferCountActual);
+ return OMX_ErrorUnsupportedSetting;
}
+ port->mDef.nBufferCountActual = defParams->nBufferCountActual;
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/omx/SoftOMXComponent.cpp b/media/libstagefright/omx/SoftOMXComponent.cpp
index b1c34dc..df978f8 100644
--- a/media/libstagefright/omx/SoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftOMXComponent.cpp
@@ -257,69 +257,69 @@ OMX_ERRORTYPE SoftOMXComponent::GetStateWrapper(
////////////////////////////////////////////////////////////////////////////////
OMX_ERRORTYPE SoftOMXComponent::sendCommand(
- OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data) {
+ OMX_COMMANDTYPE /* cmd */, OMX_U32 /* param */, OMX_PTR /* data */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::getParameter(
- OMX_INDEXTYPE index, OMX_PTR params) {
+ OMX_INDEXTYPE /* index */, OMX_PTR /* params */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::setParameter(
- OMX_INDEXTYPE index, const OMX_PTR params) {
+ OMX_INDEXTYPE /* index */, const OMX_PTR /* params */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::getConfig(
- OMX_INDEXTYPE index, OMX_PTR params) {
+ OMX_INDEXTYPE /* index */, OMX_PTR /* params */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::setConfig(
- OMX_INDEXTYPE index, const OMX_PTR params) {
+ OMX_INDEXTYPE /* index */, const OMX_PTR /* params */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- return OMX_ErrorUndefined;
+ const char * /* name */, OMX_INDEXTYPE * /* index */) {
+ return OMX_ErrorUnsupportedIndex;
}
OMX_ERRORTYPE SoftOMXComponent::useBuffer(
- OMX_BUFFERHEADERTYPE **buffer,
- OMX_U32 portIndex,
- OMX_PTR appPrivate,
- OMX_U32 size,
- OMX_U8 *ptr) {
+ OMX_BUFFERHEADERTYPE ** /* buffer */,
+ OMX_U32 /* portIndex */,
+ OMX_PTR /* appPrivate */,
+ OMX_U32 /* size */,
+ OMX_U8 * /* ptr */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::allocateBuffer(
- OMX_BUFFERHEADERTYPE **buffer,
- OMX_U32 portIndex,
- OMX_PTR appPrivate,
- OMX_U32 size) {
+ OMX_BUFFERHEADERTYPE ** /* buffer */,
+ OMX_U32 /* portIndex */,
+ OMX_PTR /* appPrivate */,
+ OMX_U32 /* size */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::freeBuffer(
- OMX_U32 portIndex,
- OMX_BUFFERHEADERTYPE *buffer) {
+ OMX_U32 /* portIndex */,
+ OMX_BUFFERHEADERTYPE * /* buffer */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::emptyThisBuffer(
- OMX_BUFFERHEADERTYPE *buffer) {
+ OMX_BUFFERHEADERTYPE * /* buffer */) {
return OMX_ErrorUndefined;
}
OMX_ERRORTYPE SoftOMXComponent::fillThisBuffer(
- OMX_BUFFERHEADERTYPE *buffer) {
+ OMX_BUFFERHEADERTYPE * /* buffer */) {
return OMX_ErrorUndefined;
}
-OMX_ERRORTYPE SoftOMXComponent::getState(OMX_STATETYPE *state) {
+OMX_ERRORTYPE SoftOMXComponent::getState(OMX_STATETYPE * /* state */) {
return OMX_ErrorUndefined;
}
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index d6cde73..0f9c00c 100644..100755
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -40,16 +40,19 @@ static const struct {
{ "OMX.google.amrnb.encoder", "amrnbenc", "audio_encoder.amrnb" },
{ "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" },
{ "OMX.google.amrwb.encoder", "amrwbenc", "audio_encoder.amrwb" },
- { "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" },
- { "OMX.google.h264.encoder", "h264enc", "video_encoder.avc" },
+ { "OMX.google.h264.decoder", "avcdec", "video_decoder.avc" },
+ { "OMX.google.h264.encoder", "avcenc", "video_encoder.avc" },
+ { "OMX.google.hevc.decoder", "hevcdec", "video_decoder.hevc" },
{ "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
{ "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
+ { "OMX.google.mpeg2.decoder", "mpeg2dec", "video_decoder.mpeg2" },
{ "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },
{ "OMX.google.h263.encoder", "mpeg4enc", "video_encoder.h263" },
{ "OMX.google.mpeg4.decoder", "mpeg4dec", "video_decoder.mpeg4" },
{ "OMX.google.mpeg4.encoder", "mpeg4enc", "video_encoder.mpeg4" },
{ "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" },
{ "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" },
+ { "OMX.google.opus.decoder", "opusdec", "audio_decoder.opus" },
{ "OMX.google.vp8.decoder", "vpxdec", "video_decoder.vp8" },
{ "OMX.google.vp9.decoder", "vpxdec", "video_decoder.vp9" },
{ "OMX.google.vp8.encoder", "vpxenc", "video_encoder.vp8" },
@@ -83,7 +86,7 @@ OMX_ERRORTYPE SoftOMXPlugin::makeComponentInstance(
void *libHandle = dlopen(libName.c_str(), RTLD_NOW);
if (libHandle == NULL) {
- ALOGE("unable to dlopen %s", libName.c_str());
+ ALOGE("unable to dlopen %s: %s", libName.c_str(), dlerror());
return OMX_ErrorComponentNotFound;
}
@@ -154,7 +157,7 @@ OMX_ERRORTYPE SoftOMXPlugin::destroyComponentInstance(
OMX_ERRORTYPE SoftOMXPlugin::enumerateComponents(
OMX_STRING name,
- size_t size,
+ size_t /* size */,
OMX_U32 index) {
if (index >= kNumComponents) {
return OMX_ErrorNoMore;
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 08a3d42..4ce165b 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -14,15 +14,19 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
//#define LOG_NDEBUG 0
#define LOG_TAG "SoftVideoDecoderOMXComponent"
#include <utils/Log.h>
#include "include/SoftVideoDecoderOMXComponent.h"
+#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/MediaDefs.h>
namespace android {
@@ -48,6 +52,9 @@ SoftVideoDecoderOMXComponent::SoftVideoDecoderOMXComponent(
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
: SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mIsAdaptive(false),
+ mAdaptiveMaxWidth(0),
+ mAdaptiveMaxHeight(0),
mWidth(width),
mHeight(height),
mCropLeft(0),
@@ -55,6 +62,8 @@ SoftVideoDecoderOMXComponent::SoftVideoDecoderOMXComponent(
mCropWidth(width),
mCropHeight(height),
mOutputPortSettingsChange(NONE),
+ mMinInputBufferSize(384), // arbitrary, using one uncompressed macroblock
+ mMinCompressionRatio(1), // max input size is normally the output size
mComponentRole(componentRole),
mCodingType(codingType),
mProfileLevels(profileLevels),
@@ -65,7 +74,11 @@ void SoftVideoDecoderOMXComponent::initPorts(
OMX_U32 numInputBuffers,
OMX_U32 inputBufferSize,
OMX_U32 numOutputBuffers,
- const char *mimeType) {
+ const char *mimeType,
+ OMX_U32 minCompressionRatio) {
+ mMinInputBufferSize = inputBufferSize;
+ mMinCompressionRatio = minCompressionRatio;
+
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
@@ -114,30 +127,133 @@ void SoftVideoDecoderOMXComponent::initPorts(
addPort(def);
- updatePortDefinitions();
+ updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
+}
+
+void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop, bool updateInputSize) {
+ OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+ outDef->format.video.nFrameWidth = outputBufferWidth();
+ outDef->format.video.nFrameHeight = outputBufferHeight();
+ outDef->format.video.nStride = outDef->format.video.nFrameWidth;
+ outDef->format.video.nSliceHeight = outDef->format.video.nFrameHeight;
+
+ outDef->nBufferSize =
+ (outDef->format.video.nStride * outDef->format.video.nSliceHeight * 3) / 2;
+
+ OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+ inDef->format.video.nFrameWidth = mWidth;
+ inDef->format.video.nFrameHeight = mHeight;
+ // input port is compressed, hence it has no stride
+ inDef->format.video.nStride = 0;
+ inDef->format.video.nSliceHeight = 0;
+
+ // when output format changes, input buffer size does not actually change
+ if (updateInputSize) {
+ inDef->nBufferSize = max(
+ outDef->nBufferSize / mMinCompressionRatio,
+ max(mMinInputBufferSize, inDef->nBufferSize));
+ }
+
+ if (updateCrop) {
+ mCropLeft = 0;
+ mCropTop = 0;
+ mCropWidth = mWidth;
+ mCropHeight = mHeight;
+ }
}
-void SoftVideoDecoderOMXComponent::updatePortDefinitions() {
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def = &editPortInfo(kOutputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def->nBufferSize =
- (def->format.video.nFrameWidth *
- def->format.video.nFrameHeight * 3) / 2;
-
- mCropLeft = 0;
- mCropTop = 0;
- mCropWidth = mWidth;
- mCropHeight = mHeight;
+
+uint32_t SoftVideoDecoderOMXComponent::outputBufferWidth() {
+ return mIsAdaptive ? mAdaptiveMaxWidth : mWidth;
+}
+
+uint32_t SoftVideoDecoderOMXComponent::outputBufferHeight() {
+ return mIsAdaptive ? mAdaptiveMaxHeight : mHeight;
+}
+
+void SoftVideoDecoderOMXComponent::handlePortSettingsChange(
+ bool *portWillReset, uint32_t width, uint32_t height,
+ CropSettingsMode cropSettingsMode, bool fakeStride) {
+ *portWillReset = false;
+ bool sizeChanged = (width != mWidth || height != mHeight);
+ bool updateCrop = (cropSettingsMode == kCropUnSet);
+ bool cropChanged = (cropSettingsMode == kCropChanged);
+ bool strideChanged = false;
+ if (fakeStride) {
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
+ if (def->format.video.nStride != (OMX_S32)width
+ || def->format.video.nSliceHeight != (OMX_U32)height) {
+ strideChanged = true;
+ }
+ }
+
+ if (sizeChanged || cropChanged || strideChanged) {
+ mWidth = width;
+ mHeight = height;
+
+ if ((sizeChanged && !mIsAdaptive)
+ || width > mAdaptiveMaxWidth
+ || height > mAdaptiveMaxHeight) {
+ if (mIsAdaptive) {
+ if (width > mAdaptiveMaxWidth) {
+ mAdaptiveMaxWidth = width;
+ }
+ if (height > mAdaptiveMaxHeight) {
+ mAdaptiveMaxHeight = height;
+ }
+ }
+ updatePortDefinitions(updateCrop);
+ notify(OMX_EventPortSettingsChanged, kOutputPortIndex, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ *portWillReset = true;
+ } else {
+ updatePortDefinitions(updateCrop);
+
+ if (fakeStride) {
+ // MAJOR HACK that is not pretty, it's just to fool the renderer to read the correct
+ // data.
+ // Some software decoders (e.g. SoftMPEG4) fill decoded frame directly to output
+ // buffer without considering the output buffer stride and slice height. So this is
+ // used to signal how the buffer is arranged. The alternative is to re-arrange the
+ // output buffer in SoftMPEG4, but that results in memcopies.
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
+ def->format.video.nStride = mWidth;
+ def->format.video.nSliceHeight = mHeight;
+ }
+
+ notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+ OMX_IndexConfigCommonOutputCrop, NULL);
+ }
+ }
+}
+
+void SoftVideoDecoderOMXComponent::copyYV12FrameToOutputBuffer(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride) {
+ size_t dstYStride = outputBufferWidth();
+ size_t dstUVStride = dstYStride / 2;
+ size_t dstHeight = outputBufferHeight();
+ uint8_t *dstStart = dst;
+
+ for (size_t i = 0; i < mHeight; ++i) {
+ memcpy(dst, srcY, mWidth);
+ srcY += srcYStride;
+ dst += dstYStride;
+ }
+
+ dst = dstStart + dstYStride * dstHeight;
+ for (size_t i = 0; i < mHeight / 2; ++i) {
+ memcpy(dst, srcU, mWidth / 2);
+ srcU += srcUStride;
+ dst += dstUVStride;
+ }
+
+ dst = dstStart + (5 * dstYStride * dstHeight) / 4;
+ for (size_t i = 0; i < mHeight / 2; ++i) {
+ memcpy(dst, srcV, mWidth / 2);
+ srcV += srcVStride;
+ dst += dstUVStride;
+ }
}
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
@@ -149,7 +265,7 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
(OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
if (formatParams->nPortIndex > kMaxPortIndex) {
- return OMX_ErrorUndefined;
+ return OMX_ErrorBadPortIndex;
}
if (formatParams->nIndex != 0) {
@@ -177,16 +293,16 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
(OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params;
if (profileLevel->nPortIndex != kInputPortIndex) {
- ALOGE("Invalid port index: %ld", profileLevel->nPortIndex);
+ ALOGE("Invalid port index: %" PRIu32, profileLevel->nPortIndex);
return OMX_ErrorUnsupportedIndex;
}
- if (index >= mNumProfileLevels) {
+ if (profileLevel->nProfileIndex >= mNumProfileLevels) {
return OMX_ErrorNoMore;
}
- profileLevel->eProfile = mProfileLevels[index].mProfile;
- profileLevel->eLevel = mProfileLevels[index].mLevel;
+ profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
+ profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel;
return OMX_ErrorNone;
}
@@ -197,7 +313,10 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
+ // Include extension index OMX_INDEXEXTTYPE.
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
case OMX_IndexParamStandardComponentRole:
{
const OMX_PARAM_COMPONENTROLETYPE *roleParams =
@@ -218,16 +337,78 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
(OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
if (formatParams->nPortIndex > kMaxPortIndex) {
- return OMX_ErrorUndefined;
+ return OMX_ErrorBadPortIndex;
}
if (formatParams->nIndex != 0) {
return OMX_ErrorNoMore;
}
+ if (formatParams->nPortIndex == kInputPortIndex) {
+ if (formatParams->eCompressionFormat != mCodingType
+ || formatParams->eColorFormat != OMX_COLOR_FormatUnused) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else {
+ if (formatParams->eCompressionFormat != OMX_VIDEO_CodingUnused
+ || formatParams->eColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ }
+
return OMX_ErrorNone;
}
+ case kPrepareForAdaptivePlaybackIndex:
+ {
+ const PrepareForAdaptivePlaybackParams* adaptivePlaybackParams =
+ (const PrepareForAdaptivePlaybackParams *)params;
+ mIsAdaptive = adaptivePlaybackParams->bEnable;
+ if (mIsAdaptive) {
+ mAdaptiveMaxWidth = adaptivePlaybackParams->nMaxFrameWidth;
+ mAdaptiveMaxHeight = adaptivePlaybackParams->nMaxFrameHeight;
+ mWidth = mAdaptiveMaxWidth;
+ mHeight = mAdaptiveMaxHeight;
+ } else {
+ mAdaptiveMaxWidth = 0;
+ mAdaptiveMaxHeight = 0;
+ }
+ updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamPortDefinition:
+ {
+ OMX_PARAM_PORTDEFINITIONTYPE *newParams =
+ (OMX_PARAM_PORTDEFINITIONTYPE *)params;
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &newParams->format.video;
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(newParams->nPortIndex)->mDef;
+
+ uint32_t oldWidth = def->format.video.nFrameWidth;
+ uint32_t oldHeight = def->format.video.nFrameHeight;
+ uint32_t newWidth = video_def->nFrameWidth;
+ uint32_t newHeight = video_def->nFrameHeight;
+ if (newWidth != oldWidth || newHeight != oldHeight) {
+ bool outputPort = (newParams->nPortIndex == kOutputPortIndex);
+ if (outputPort) {
+ // only update (essentially crop) if size changes
+ mWidth = newWidth;
+ mHeight = newHeight;
+
+ updatePortDefinitions(true /* updateCrop */, true /* updateInputSize */);
+ // reset buffer size based on frame size
+ newParams->nBufferSize = def->nBufferSize;
+ } else {
+ // For input port, we only set nFrameWidth and nFrameHeight. Buffer size
+ // is updated when configuring the output port using the max-frame-size,
+ // though client can still request a larger size.
+ def->format.video.nFrameWidth = newWidth;
+ def->format.video.nFrameHeight = newHeight;
+ }
+ }
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+
default:
return SimpleSoftOMXComponent::internalSetParameter(index, params);
}
@@ -257,6 +438,16 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getConfig(
}
}
+OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index) {
+ if (!strcmp(name, "OMX.google.android.index.prepareForAdaptivePlayback")) {
+ *(int32_t*)index = kPrepareForAdaptivePlaybackIndex;
+ return OMX_ErrorNone;
+ }
+
+ return SimpleSoftOMXComponent::getExtensionIndex(name, index);
+}
+
void SoftVideoDecoderOMXComponent::onReset() {
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
new file mode 100644
index 0000000..d4d6217
--- /dev/null
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -0,0 +1,616 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftVideoEncoderOMXComponent"
+#include <utils/Log.h>
+#include <utils/misc.h>
+
+#include "include/SoftVideoEncoderOMXComponent.h"
+
+#include <hardware/gralloc.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaDefs.h>
+
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+namespace android {
+
+const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = {
+ OMX_COLOR_FormatYUV420Planar,
+ OMX_COLOR_FormatYUV420SemiPlanar,
+ OMX_COLOR_FormatAndroidOpaque
+};
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent(
+ const char *name,
+ const char *componentRole,
+ OMX_VIDEO_CODINGTYPE codingType,
+ const CodecProfileLevel *profileLevels,
+ size_t numProfileLevels,
+ int32_t width,
+ int32_t height,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mInputDataIsMeta(false),
+ mWidth(width),
+ mHeight(height),
+ mBitrate(192000),
+ mFramerate(30 << 16), // Q16 format
+ mColorFormat(OMX_COLOR_FormatYUV420Planar),
+ mGrallocModule(NULL),
+ mMinOutputBufferSize(384), // arbitrary, using one uncompressed macroblock
+ mMinCompressionRatio(1), // max output size is normally the input size
+ mComponentRole(componentRole),
+ mCodingType(codingType),
+ mProfileLevels(profileLevels),
+ mNumProfileLevels(numProfileLevels) {
+}
+
+void SoftVideoEncoderOMXComponent::initPorts(
+ OMX_U32 numInputBuffers, OMX_U32 numOutputBuffers, OMX_U32 outputBufferSize,
+ const char *mime, OMX_U32 minCompressionRatio) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+
+ mMinOutputBufferSize = outputBufferSize;
+ mMinCompressionRatio = minCompressionRatio;
+
+ InitOMXParams(&def);
+
+ def.nPortIndex = kInputPortIndex;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = numInputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ // frameRate is in Q16 format.
+ def.format.video.xFramerate = mFramerate;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.nBufferAlignment = kInputBufferAlignment;
+ def.format.video.cMIMEType = const_cast<char *>("video/raw");
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ def.format.video.eColorFormat = mColorFormat;
+ def.format.video.pNativeWindow = NULL;
+ // buffersize set in updatePortParams
+
+ addPort(def);
+
+ InitOMXParams(&def);
+
+ def.nPortIndex = kOutputPortIndex;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = numOutputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = 0;
+ def.format.video.nSliceHeight = 0;
+ def.format.video.nBitrate = mBitrate;
+ def.format.video.xFramerate = 0 << 16;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.nBufferAlignment = kOutputBufferAlignment;
+ def.format.video.cMIMEType = const_cast<char *>(mime);
+ def.format.video.eCompressionFormat = mCodingType;
+ def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ def.format.video.pNativeWindow = NULL;
+ // buffersize set in updatePortParams
+
+ addPort(def);
+
+ updatePortParams();
+}
+
+void SoftVideoEncoderOMXComponent::updatePortParams() {
+ OMX_PARAM_PORTDEFINITIONTYPE *inDef = &editPortInfo(kInputPortIndex)->mDef;
+ inDef->format.video.nFrameWidth = mWidth;
+ inDef->format.video.nFrameHeight = mHeight;
+ inDef->format.video.nStride = inDef->format.video.nFrameWidth;
+ inDef->format.video.nSliceHeight = inDef->format.video.nFrameHeight;
+ inDef->format.video.xFramerate = mFramerate;
+ inDef->format.video.eColorFormat = mColorFormat;
+ uint32_t rawBufferSize =
+ inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2;
+ if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ inDef->nBufferSize = 4 + max(sizeof(buffer_handle_t), sizeof(GraphicBuffer *));
+ } else {
+ inDef->nBufferSize = rawBufferSize;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE *outDef = &editPortInfo(kOutputPortIndex)->mDef;
+ outDef->format.video.nFrameWidth = mWidth;
+ outDef->format.video.nFrameHeight = mHeight;
+ outDef->format.video.nBitrate = mBitrate;
+
+ outDef->nBufferSize = max(mMinOutputBufferSize, rawBufferSize / mMinCompressionRatio);
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetPortParams(
+ const OMX_PARAM_PORTDEFINITIONTYPE *port) {
+ if (port->nPortIndex == kInputPortIndex) {
+ mWidth = port->format.video.nFrameWidth;
+ mHeight = port->format.video.nFrameHeight;
+
+ // xFramerate comes in Q16 format, in frames per second unit
+ mFramerate = port->format.video.xFramerate;
+
+ if (port->format.video.eCompressionFormat != OMX_VIDEO_CodingUnused
+ || (port->format.video.eColorFormat != OMX_COLOR_FormatYUV420Planar
+ && port->format.video.eColorFormat != OMX_COLOR_FormatYUV420SemiPlanar
+ && port->format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque)) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ mColorFormat = port->format.video.eColorFormat;
+ } else if (port->nPortIndex == kOutputPortIndex) {
+ if (port->format.video.eCompressionFormat != mCodingType
+ || port->format.video.eColorFormat != OMX_COLOR_FormatUnused) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ mBitrate = port->format.video.nBitrate;
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+
+ updatePortParams();
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoErrorCorrection:
+ {
+ return OMX_ErrorNotImplemented;
+ }
+
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)param;
+
+ if (strncmp((const char *)roleParams->cRole,
+ mComponentRole,
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamPortDefinition:
+ {
+ OMX_ERRORTYPE err = internalSetPortParams((const OMX_PARAM_PORTDEFINITIONTYPE *)param);
+
+ if (err != OMX_ErrorNone) {
+ return err;
+ }
+
+ return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ const OMX_VIDEO_PARAM_PORTFORMATTYPE* format =
+ (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+ if (format->nPortIndex == kInputPortIndex) {
+ if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
+ format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ mColorFormat = format->eColorFormat;
+
+ updatePortParams();
+ return OMX_ErrorNone;
+ } else {
+ ALOGE("Unsupported color format %i", format->eColorFormat);
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else if (format->nPortIndex == kOutputPortIndex) {
+ if (format->eCompressionFormat == mCodingType) {
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+ }
+
+ case kStoreMetaDataExtensionIndex:
+ {
+ // storeMetaDataInBuffers
+ const StoreMetaDataInBuffersParams *storeParam =
+ (const StoreMetaDataInBuffersParams *)param;
+
+ if (storeParam->nPortIndex == kOutputPortIndex) {
+ return storeParam->bStoreMetaData ? OMX_ErrorUnsupportedSetting : OMX_ErrorNone;
+ } else if (storeParam->nPortIndex != kInputPortIndex) {
+ return OMX_ErrorBadPortIndex;
+ }
+
+ mInputDataIsMeta = (storeParam->bStoreMetaData == OMX_TRUE);
+ if (mInputDataIsMeta) {
+ mColorFormat = OMX_COLOR_FormatAndroidOpaque;
+ } else if (mColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ mColorFormat = OMX_COLOR_FormatYUV420Planar;
+ }
+ updatePortParams();
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR param) {
+ switch (index) {
+ case OMX_IndexParamVideoErrorCorrection:
+ {
+ return OMX_ErrorNotImplemented;
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+ if (formatParams->nPortIndex == kInputPortIndex) {
+ if (formatParams->nIndex >= NELEM(kSupportedColorFormats)) {
+ return OMX_ErrorNoMore;
+ }
+
+ // Color formats, in order of preference
+ formatParams->eColorFormat = kSupportedColorFormats[formatParams->nIndex];
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ formatParams->xFramerate = mFramerate;
+ return OMX_ErrorNone;
+ } else if (formatParams->nPortIndex == kOutputPortIndex) {
+ formatParams->eCompressionFormat = mCodingType;
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+ }
+
+ case OMX_IndexParamVideoProfileLevelQuerySupported:
+ {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
+ (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) param;
+
+ if (profileLevel->nPortIndex != kOutputPortIndex) {
+ ALOGE("Invalid port index: %u", profileLevel->nPortIndex);
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (profileLevel->nProfileIndex >= mNumProfileLevels) {
+ return OMX_ErrorNoMore;
+ }
+
+ profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
+ profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel;
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, param);
+ }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertFlexYUVToPlanar(
+ uint8_t *dst, size_t dstStride, size_t dstVStride,
+ struct android_ycbcr *ycbcr, int32_t width, int32_t height) {
+ const uint8_t *src = (const uint8_t *)ycbcr->y;
+ const uint8_t *srcU = (const uint8_t *)ycbcr->cb;
+ const uint8_t *srcV = (const uint8_t *)ycbcr->cr;
+ uint8_t *dstU = dst + dstVStride * dstStride;
+ uint8_t *dstV = dstU + (dstVStride >> 1) * (dstStride >> 1);
+
+ for (size_t y = height; y > 0; --y) {
+ memcpy(dst, src, width);
+ dst += dstStride;
+ src += ycbcr->ystride;
+ }
+ if (ycbcr->cstride == ycbcr->ystride >> 1 && ycbcr->chroma_step == 1) {
+ // planar
+ for (size_t y = height >> 1; y > 0; --y) {
+ memcpy(dstU, srcU, width >> 1);
+ dstU += dstStride >> 1;
+ srcU += ycbcr->cstride;
+ memcpy(dstV, srcV, width >> 1);
+ dstV += dstStride >> 1;
+ srcV += ycbcr->cstride;
+ }
+ } else {
+ // arbitrary
+ for (size_t y = height >> 1; y > 0; --y) {
+ for (size_t x = width >> 1; x > 0; --x) {
+ *dstU++ = *srcU;
+ *dstV++ = *srcV;
+ srcU += ycbcr->chroma_step;
+ srcV += ycbcr->chroma_step;
+ }
+ dstU += (dstStride >> 1) - (width >> 1);
+ dstV += (dstStride >> 1) - (width >> 1);
+ srcU += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
+ srcV += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
+ }
+ }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertYUV420SemiPlanarToYUV420Planar(
+ const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height) {
+ // TODO: add support for stride
+ int32_t outYsize = width * height;
+ uint32_t *outY = (uint32_t *) outYUV;
+ uint16_t *outCb = (uint16_t *) (outYUV + outYsize);
+ uint16_t *outCr = (uint16_t *) (outYUV + outYsize + (outYsize >> 2));
+
+ /* Y copying */
+ memcpy(outY, inYVU, outYsize);
+
+ /* U & V copying */
+ // FIXME this only works if width is multiple of 4
+ uint32_t *inYVU_4 = (uint32_t *) (inYVU + outYsize);
+ for (int32_t i = height >> 1; i > 0; --i) {
+ for (int32_t j = width >> 2; j > 0; --j) {
+ uint32_t temp = *inYVU_4++;
+ uint32_t tempU = temp & 0xFF;
+ tempU = tempU | ((temp >> 8) & 0xFF00);
+
+ uint32_t tempV = (temp >> 8) & 0xFF;
+ tempV = tempV | ((temp >> 16) & 0xFF00);
+
+ *outCb++ = tempU;
+ *outCr++ = tempV;
+ }
+ }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertRGB32ToPlanar(
+ uint8_t *dstY, size_t dstStride, size_t dstVStride,
+ const uint8_t *src, size_t width, size_t height, size_t srcStride,
+ bool bgr) {
+ CHECK((width & 1) == 0);
+ CHECK((height & 1) == 0);
+
+ uint8_t *dstU = dstY + dstStride * dstVStride;
+ uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1);
+
+#ifdef SURFACE_IS_BGR32
+ bgr = !bgr;
+#endif
+
+ const size_t redOffset = bgr ? 2 : 0;
+ const size_t greenOffset = 1;
+ const size_t blueOffset = bgr ? 0 : 2;
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ unsigned red = src[redOffset];
+ unsigned green = src[greenOffset];
+ unsigned blue = src[blueOffset];
+
+ // using ITU-R BT.601 conversion matrix
+ unsigned luma =
+ ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
+
+ dstY[x] = luma;
+
+ if ((x & 1) == 0 && (y & 1) == 0) {
+ unsigned U =
+ ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
+
+ unsigned V =
+ ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
+
+ dstU[x >> 1] = U;
+ dstV[x >> 1] = V;
+ }
+ src += 4;
+ }
+
+ if ((y & 1) == 0) {
+ dstU += dstStride >> 1;
+ dstV += dstStride >> 1;
+ }
+
+ src += srcStride - 4 * width;
+ dstY += dstStride;
+ }
+}
+
+const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(
+ uint8_t *dst, size_t dstSize,
+ const uint8_t *src, size_t srcSize,
+ size_t width, size_t height) const {
+ size_t dstStride = width;
+ size_t dstVStride = height;
+
+ MetadataBufferType bufferType = *(MetadataBufferType *)src;
+ bool usingGraphicBuffer = bufferType == kMetadataBufferTypeGraphicBuffer;
+ if (!usingGraphicBuffer && bufferType != kMetadataBufferTypeGrallocSource) {
+ ALOGE("Unsupported metadata type (%d)", bufferType);
+ return NULL;
+ }
+
+ if (mGrallocModule == NULL) {
+ CHECK_EQ(0, hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule));
+ }
+
+ const gralloc_module_t *grmodule =
+ (const gralloc_module_t *)mGrallocModule;
+
+ buffer_handle_t handle;
+ int format;
+ size_t srcStride;
+ size_t srcVStride;
+ if (usingGraphicBuffer) {
+ if (srcSize < sizeof(OMX_U32) + sizeof(GraphicBuffer *)) {
+ ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(OMX_U32) + sizeof(GraphicBuffer *));
+ return NULL;
+ }
+
+ GraphicBuffer *buffer = *(GraphicBuffer **)(src + sizeof(OMX_U32));
+ handle = buffer->handle;
+ format = buffer->format;
+ srcStride = buffer->stride;
+ srcVStride = buffer->height;
+ // convert stride from pixels to bytes
+ if (format != HAL_PIXEL_FORMAT_YV12 &&
+ format != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // TODO do we need to support other formats?
+ srcStride *= 4;
+ }
+ } else {
+ // TODO: remove this part. Check if anyone uses this.
+
+ if (srcSize < sizeof(OMX_U32) + sizeof(buffer_handle_t)) {
+ ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(OMX_U32) + sizeof(buffer_handle_t));
+ return NULL;
+ }
+
+ handle = *(buffer_handle_t *)(src + sizeof(OMX_U32));
+ // assume HAL_PIXEL_FORMAT_RGBA_8888
+ // there is no way to get the src stride without the graphic buffer
+ format = HAL_PIXEL_FORMAT_RGBA_8888;
+ srcStride = width * 4;
+ srcVStride = height;
+ }
+
+ size_t neededSize =
+ dstStride * dstVStride + (width >> 1)
+ + (dstStride >> 1) * ((dstVStride >> 1) + (height >> 1) - 1);
+ if (dstSize < neededSize) {
+ ALOGE("destination buffer is too small (%zu vs %zu)", dstSize, neededSize);
+ return NULL;
+ }
+
+ void *bits = NULL;
+ struct android_ycbcr ycbcr;
+ status_t res;
+ if (format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ res = grmodule->lock_ycbcr(
+ grmodule, handle,
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
+ 0, 0, width, height, &ycbcr);
+ } else {
+ res = grmodule->lock(
+ grmodule, handle,
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
+ 0, 0, width, height, &bits);
+ }
+ if (res != OK) {
+ ALOGE("Unable to lock image buffer %p for access", handle);
+ return NULL;
+ }
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_YV12: // YCrCb / YVU planar
+ // convert to flex YUV
+ ycbcr.y = bits;
+ ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
+ ycbcr.cb = (uint8_t *)ycbcr.cr + (srcStride >> 1) * (srcVStride >> 1);
+ ycbcr.chroma_step = 1;
+ ycbcr.cstride = srcVStride >> 1;
+ ycbcr.ystride = srcVStride;
+ ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+ break;
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP: // YCrCb / YVU semiplanar, NV21
+ // convert to flex YUV
+ ycbcr.y = bits;
+ ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
+ ycbcr.cb = (uint8_t *)ycbcr.cr + 1;
+ ycbcr.chroma_step = 2;
+ ycbcr.cstride = srcVStride;
+ ycbcr.ystride = srcVStride;
+ ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+ break;
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+ break;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ ConvertRGB32ToPlanar(
+ dst, dstStride, dstVStride,
+ (const uint8_t *)bits, width, height, srcStride,
+ format == HAL_PIXEL_FORMAT_BGRA_8888);
+ break;
+ default:
+ ALOGE("Unsupported pixel format %#x", format);
+ dst = NULL;
+ break;
+ }
+
+ if (grmodule->unlock(grmodule, handle) != OK) {
+ ALOGE("Unable to unlock image buffer %p for access", handle);
+ }
+
+ return dst;
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index) {
+ if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") ||
+ !strcmp(name, "OMX.google.android.index.storeGraphicBufferInMetaData")) {
+ *(int32_t*)index = kStoreMetaDataExtensionIndex;
+ return OMX_ErrorNone;
+ }
+ return SimpleSoftOMXComponent::getExtensionIndex(name, index);
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk
index 1061c39..447b29e 100644
--- a/media/libstagefright/omx/tests/Android.mk
+++ b/media/libstagefright/omx/tests/Android.mk
@@ -11,8 +11,12 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := omx_tests
LOCAL_MODULE_TAGS := tests
+LOCAL_32_BIT_ONLY := true
+
include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 44e4f9d..67ff145 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -26,6 +26,7 @@
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -242,7 +243,8 @@ private:
};
static sp<MediaExtractor> CreateExtractorFromURI(const char *uri) {
- sp<DataSource> source = DataSource::CreateFromURI(uri);
+ sp<DataSource> source =
+ DataSource::CreateFromURI(NULL /* httpService */, uri);
if (source == NULL) {
return NULL;
@@ -251,29 +253,6 @@ static sp<MediaExtractor> CreateExtractorFromURI(const char *uri) {
return MediaExtractor::Create(source);
}
-static sp<MediaSource> MakeSource(
- const char *uri,
- const char *mimeType) {
- sp<MediaExtractor> extractor = CreateExtractorFromURI(uri);
-
- if (extractor == NULL) {
- return NULL;
- }
-
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
- sp<MetaData> meta = extractor->getTrackMetaData(i);
-
- const char *trackMIME;
- CHECK(meta->findCString(kKeyMIMEType, &trackMIME));
-
- if (!strcasecmp(trackMIME, mimeType)) {
- return extractor->getTrack(i);
- }
- }
-
- return NULL;
-}
-
status_t Harness::testStateTransitions(
const char *componentName, const char *componentRole) {
if (strncmp(componentName, "OMX.", 4)) {
@@ -461,6 +440,7 @@ static const char *GetMimeFromComponentRole(const char *componentRole) {
{ "audio_decoder.aac", "audio/mp4a-latm" },
{ "audio_decoder.mp3", "audio/mpeg" },
{ "audio_decoder.vorbis", "audio/vorbis" },
+ { "audio_decoder.opus", "audio/opus" },
{ "audio_decoder.g711alaw", MEDIA_MIMETYPE_AUDIO_G711_ALAW },
{ "audio_decoder.g711mlaw", MEDIA_MIMETYPE_AUDIO_G711_MLAW },
};
@@ -493,6 +473,7 @@ static const char *GetURLForMime(const char *mime) {
{ "audio/mpeg",
"file:///sdcard/media_api/music/MP3_48KHz_128kbps_s_1_17_CBR.mp3" },
{ "audio/vorbis", NULL },
+ { "audio/opus", NULL },
{ "video/x-vnd.on2.vp8",
"file:///sdcard/media_api/video/big-buck-bunny_trailer.webm" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "file:///sdcard/M1F1-Alaw-AFsp.wav" },
diff --git a/media/libstagefright/rtsp/AAMRAssembler.cpp b/media/libstagefright/rtsp/AAMRAssembler.cpp
index 9e8725a..bb2a238 100644
--- a/media/libstagefright/rtsp/AAMRAssembler.cpp
+++ b/media/libstagefright/rtsp/AAMRAssembler.cpp
@@ -143,8 +143,8 @@ ARTPAssembler::AssemblyStatus AAMRAssembler::addPacket(
return MALFORMED_PACKET;
}
- unsigned payloadHeader = buffer->data()[0];
- unsigned CMR = payloadHeader >> 4;
+ unsigned payloadHeader __unused = buffer->data()[0];
+ unsigned CMR __unused = payloadHeader >> 4;
Vector<uint8_t> tableOfContents;
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index a6825eb..4bc67e8 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -124,7 +124,7 @@ ARTPAssembler::AssemblyStatus AAVCAssembler::addNALUnit(
}
void AAVCAssembler::addSingleNALUnit(const sp<ABuffer> &buffer) {
- ALOGV("addSingleNALUnit of size %d", buffer->size());
+ ALOGV("addSingleNALUnit of size %zu", buffer->size());
#if !LOG_NDEBUG
hexdump(buffer->data(), buffer->size());
#endif
@@ -191,7 +191,7 @@ ARTPAssembler::AssemblyStatus AAVCAssembler::addFragmentedNALUnit(
CHECK((indicator & 0x1f) == 28);
if (size < 2) {
- ALOGV("Ignoring malformed FU buffer (size = %d)", size);
+ ALOGV("Ignoring malformed FU buffer (size = %zu)", size);
queue->erase(queue->begin());
++mNextExpectedSeqNo;
@@ -225,7 +225,7 @@ ARTPAssembler::AssemblyStatus AAVCAssembler::addFragmentedNALUnit(
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
while (it != queue->end()) {
- ALOGV("sequence length %d", totalCount);
+ ALOGV("sequence length %zu", totalCount);
const sp<ABuffer> &buffer = *it;
@@ -294,7 +294,7 @@ ARTPAssembler::AssemblyStatus AAVCAssembler::addFragmentedNALUnit(
for (size_t i = 0; i < totalCount; ++i) {
const sp<ABuffer> &buffer = *it;
- ALOGV("piece #%d/%d", i + 1, totalCount);
+ ALOGV("piece #%zu/%zu", i + 1, totalCount);
#if !LOG_NDEBUG
hexdump(buffer->data(), buffer->size());
#endif
@@ -317,7 +317,7 @@ ARTPAssembler::AssemblyStatus AAVCAssembler::addFragmentedNALUnit(
void AAVCAssembler::submitAccessUnit() {
CHECK(!mNALUnits.empty());
- ALOGV("Access unit complete (%d nal units)", mNALUnits.size());
+ ALOGV("Access unit complete (%zu nal units)", mNALUnits.size());
size_t totalSize = 0;
for (List<sp<ABuffer> >::iterator it = mNALUnits.begin();
diff --git a/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp b/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
index 4c9bf5b..dca5c89 100644
--- a/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG2TSAssembler.cpp
@@ -34,7 +34,9 @@
namespace android {
AMPEG2TSAssembler::AMPEG2TSAssembler(
- const sp<AMessage> &notify, const char *desc, const AString &params)
+ const sp<AMessage> &notify,
+ const char * /* desc */,
+ const AString & /* params */)
: mNotifyMsg(notify),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0) {
diff --git a/media/libstagefright/rtsp/AMPEG2TSAssembler.h b/media/libstagefright/rtsp/AMPEG2TSAssembler.h
index 712e18e..f39c2b5 100644
--- a/media/libstagefright/rtsp/AMPEG2TSAssembler.h
+++ b/media/libstagefright/rtsp/AMPEG2TSAssembler.h
@@ -24,7 +24,7 @@ namespace android {
struct AMessage;
struct AString;
-struct MetaData;
+class MetaData;
struct AMPEG2TSAssembler : public ARTPAssembler {
AMPEG2TSAssembler(
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index aa8ffc6..1f76068 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -108,7 +108,7 @@ static status_t parseAudioObjectType(
static status_t parseGASpecificConfig(
ABitReader *bits,
unsigned audioObjectType, unsigned channelConfiguration) {
- unsigned frameLengthFlag = bits->getBits(1);
+ unsigned frameLengthFlag __unused = bits->getBits(1);
unsigned dependsOnCoreCoder = bits->getBits(1);
if (dependsOnCoreCoder) {
/* unsigned coreCoderDelay = */bits->getBits(1);
@@ -217,7 +217,7 @@ static status_t parseAudioSpecificConfig(ABitReader *bits, sp<ABuffer> *asc) {
// Apparently an extension is always considered an even
// multiple of 8 bits long.
- ALOGI("Skipping %d bits after sync extension",
+ ALOGI("Skipping %zu bits after sync extension",
8 - (numBitsInExtension & 7));
bits->skipBits(8 - (numBitsInExtension & 7));
@@ -422,7 +422,7 @@ sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
}
if (offset < buffer->size()) {
- ALOGI("ignoring %d bytes of trailing data", buffer->size() - offset);
+ ALOGI("ignoring %zu bytes of trailing data", buffer->size() - offset);
}
CHECK_LE(offset, buffer->size());
diff --git a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
index eefceba..156004c 100644
--- a/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4ElementaryAssembler.cpp
@@ -249,11 +249,15 @@ ARTPAssembler::AssemblyStatus AMPEG4ElementaryAssembler::addPacket(
mPackets.push_back(buffer);
} else {
// hexdump(buffer->data(), buffer->size());
+ if (buffer->size() < 2) {
+ return MALFORMED_PACKET;
+ }
- CHECK_GE(buffer->size(), 2u);
unsigned AU_headers_length = U16_AT(buffer->data()); // in bits
- CHECK_GE(buffer->size(), 2 + (AU_headers_length + 7) / 8);
+ if (buffer->size() < 2 + (AU_headers_length + 7) / 8) {
+ return MALFORMED_PACKET;
+ }
List<AUHeader> headers;
@@ -342,7 +346,9 @@ ARTPAssembler::AssemblyStatus AMPEG4ElementaryAssembler::addPacket(
it != headers.end(); ++it) {
const AUHeader &header = *it;
- CHECK_LE(offset + header.mSize, buffer->size());
+ if (buffer->size() < offset + header.mSize) {
+ return MALFORMED_PACKET;
+ }
sp<ABuffer> accessUnit = new ABuffer(header.mSize);
memcpy(accessUnit->data(), buffer->data() + offset, header.mSize);
@@ -353,7 +359,10 @@ ARTPAssembler::AssemblyStatus AMPEG4ElementaryAssembler::addPacket(
mPackets.push_back(accessUnit);
}
- CHECK_EQ(offset, buffer->size());
+ if (offset != buffer->size()) {
+ ALOGW("potentially malformed packet (offset %zu, size %zu)",
+ offset, buffer->size());
+ }
}
queue->erase(queue->begin());
@@ -365,7 +374,7 @@ ARTPAssembler::AssemblyStatus AMPEG4ElementaryAssembler::addPacket(
void AMPEG4ElementaryAssembler::submitAccessUnit() {
CHECK(!mPackets.empty());
- ALOGV("Access unit complete (%d nal units)", mPackets.size());
+ ALOGV("Access unit complete (%zu nal units)", mPackets.size());
sp<ABuffer> accessUnit;
@@ -400,6 +409,7 @@ ARTPAssembler::AssemblyStatus AMPEG4ElementaryAssembler::assembleMore(
const sp<ARTPSource> &source) {
AssemblyStatus status = addPacket(source);
if (status == MALFORMED_PACKET) {
+ ALOGI("access unit is damaged");
mAccessUnitDamaged = true;
}
return status;
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 462c384..cfafaa7 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -23,7 +23,7 @@
#include "ARawAudioAssembler.h"
#include "ASessionDescription.h"
-#include "avc_utils.h"
+#include "include/avc_utils.h"
#include <ctype.h>
@@ -279,8 +279,6 @@ sp<ABuffer> MakeAACCodecSpecificData2(const char *params) {
// be encoded.
CHECK_LT(20 + config->size(), 128u);
- const uint8_t *data = config->data();
-
static const uint8_t kStaticESDS[] = {
0x03, 22,
0x00, 0x00, // ES_ID
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index af369b5..a6bd824 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -563,7 +563,7 @@ status_t ARTPConnection::parseRTCP(StreamInfo *s, const sp<ABuffer> &buffer) {
default:
{
- ALOGW("Unknown RTCP packet type %u of size %d",
+ ALOGW("Unknown RTCP packet type %u of size %zu",
(unsigned)data[1], headerLength);
break;
}
@@ -664,11 +664,10 @@ void ARTPConnection::onInjectPacket(const sp<AMessage> &msg) {
StreamInfo *s = &*it;
- status_t err;
if (it->mRTPSocket == index) {
- err = parseRTP(s, buffer);
+ parseRTP(s, buffer);
} else {
- err = parseRTCP(s, buffer);
+ parseRTCP(s, buffer);
}
}
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 0d07043..e1607bf 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -114,7 +114,7 @@ bool ARTPWriter::reachedEOS() {
return (mFlags & kFlagEOS) != 0;
}
-status_t ARTPWriter::start(MetaData *params) {
+status_t ARTPWriter::start(MetaData * /* params */) {
Mutex::Autolock autoLock(mLock);
if (mFlags & kFlagStarted) {
return INVALID_OPERATION;
@@ -277,7 +277,7 @@ void ARTPWriter::onRead(const sp<AMessage> &msg) {
}
if (mediaBuf->range_length() > 0) {
- ALOGV("read buffer of size %d", mediaBuf->range_length());
+ ALOGV("read buffer of size %zu", mediaBuf->range_length());
if (mMode == H264) {
StripStartcode(mediaBuf);
@@ -461,7 +461,7 @@ void ARTPWriter::dumpSessionDesc() {
sdp.append("m=audio ");
}
- sdp.append(StringPrintf("%d", ntohs(mRTPAddr.sin_port)));
+ sdp.append(AStringPrintf("%d", ntohs(mRTPAddr.sin_port)));
sdp.append(
" RTP/AVP " PT_STR "\r\n"
"b=AS 320000\r\n"
@@ -480,7 +480,7 @@ void ARTPWriter::dumpSessionDesc() {
CHECK_EQ(sampleRate, (mMode == AMR_NB) ? 8000 : 16000);
sdp.append(mMode == AMR_NB ? "AMR" : "AMR-WB");
- sdp.append(StringPrintf("/%d/%d", sampleRate, numChannels));
+ sdp.append(AStringPrintf("/%d/%d", sampleRate, numChannels));
} else {
TRESPASS();
}
@@ -543,7 +543,7 @@ void ARTPWriter::makeH264SPropParamSets(MediaBuffer *buffer) {
CHECK_EQ((unsigned)data[0], 0x67u);
mProfileLevel =
- StringPrintf("%02X%02X%02X", data[1], data[2], data[3]);
+ AStringPrintf("%02X%02X%02X", data[1], data[2], data[3]);
encodeBase64(data, startCodePos, &mSeqParamSet);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index efde7a9..60b3aaf 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -33,7 +33,7 @@
#include <openssl/md5.h>
#include <sys/socket.h>
-#include "HTTPBase.h"
+#include "include/HTTPBase.h"
namespace android {
@@ -42,7 +42,7 @@ const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll;
// static
const AString ARTSPConnection::sUserAgent =
- StringPrintf("User-Agent: %s\r\n", MakeUserAgent().c_str());
+ AStringPrintf("User-Agent: %s\r\n", MakeUserAgent().c_str());
ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid)
: mUIDValid(uidValid),
@@ -239,7 +239,7 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
// right here, since we currently have no way of asking the user
// for this information.
- ALOGE("Malformed rtsp url %s", url.c_str());
+ ALOGE("Malformed rtsp url %s", uriDebugString(url).c_str());
reply->setInt32("result", ERROR_MALFORMED);
reply->post();
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.cpp b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
index 0da5dd2..167f7a4 100644
--- a/media/libstagefright/rtsp/ARawAudioAssembler.cpp
+++ b/media/libstagefright/rtsp/ARawAudioAssembler.cpp
@@ -34,7 +34,9 @@
namespace android {
ARawAudioAssembler::ARawAudioAssembler(
- const sp<AMessage> &notify, const char *desc, const AString &params)
+ const sp<AMessage> &notify,
+ const char * /* desc */,
+ const AString & /* params */)
: mNotifyMsg(notify),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0) {
diff --git a/media/libstagefright/rtsp/ARawAudioAssembler.h b/media/libstagefright/rtsp/ARawAudioAssembler.h
index ed7af08..bc1dea6 100644
--- a/media/libstagefright/rtsp/ARawAudioAssembler.h
+++ b/media/libstagefright/rtsp/ARawAudioAssembler.h
@@ -24,7 +24,7 @@ namespace android {
struct AMessage;
struct AString;
-struct MetaData;
+class MetaData;
struct ARawAudioAssembler : public ARTPAssembler {
ARawAudioAssembler(
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index a9b3330..98498e9 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -319,6 +319,11 @@ bool ASessionDescription::parseNTPRange(
s = end + 1; // skip the dash.
+ if (*s == '\0') {
+ *npt2 = FLT_MAX; // open ended.
+ return true;
+ }
+
if (!strncmp("now", s, 3)) {
return false; // no absolute end time available
}
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index e77c69c..9fedb71 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -19,10 +19,11 @@ LOCAL_SRC_FILES:= \
ASessionDescription.cpp \
SDPLoader.cpp \
+LOCAL_SHARED_LIBRARIES += libcrypto
+
LOCAL_C_INCLUDES:= \
- $(TOP)/frameworks/av/media/libstagefright/include \
- $(TOP)/frameworks/native/include/media/openmax \
- $(TOP)/external/openssl/include
+ $(TOP)/frameworks/av/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
LOCAL_MODULE:= libstagefright_rtsp
@@ -30,6 +31,10 @@ ifeq ($(TARGET_ARCH),arm)
LOCAL_CFLAGS += -Wno-psabi
endif
+LOCAL_CFLAGS += -Werror
+
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -55,4 +60,6 @@ LOCAL_MODULE_TAGS := optional
LOCAL_MODULE:= rtp_test
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
# include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index cd77aa0..3bf489b 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -19,7 +19,11 @@
#define MY_HANDLER_H_
//#define LOG_NDEBUG 0
+
+#ifndef LOG_TAG
#define LOG_TAG "MyHandler"
+#endif
+
#include <utils/Log.h>
#include "APacketSource.h"
@@ -42,6 +46,12 @@
#include "HTTPBase.h"
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
// If no access units are received within 5 secs, assume that the rtp
// stream has ended and signal end of stream.
static int64_t kAccessUnitTimeoutUs = 10000000ll;
@@ -146,10 +156,10 @@ struct MyHandler : public AHandler {
mSessionURL.append("rtsp://");
mSessionURL.append(host);
mSessionURL.append(":");
- mSessionURL.append(StringPrintf("%u", port));
+ mSessionURL.append(AStringPrintf("%u", port));
mSessionURL.append(path);
- ALOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ ALOGV("rewritten session url: '%s'", mSessionURL.c_str());
}
mSessionHost = host;
@@ -178,7 +188,7 @@ struct MyHandler : public AHandler {
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
- AString getControlURL(sp<ASessionDescription> desc) {
+ AString getControlURL() {
AString sessionLevelControlURL;
if (mSessionDesc->findAttribute(
0,
@@ -244,7 +254,9 @@ struct MyHandler : public AHandler {
static void addSDES(int s, const sp<ABuffer> &buffer) {
struct sockaddr_in addr;
socklen_t addrSize = sizeof(addr);
- CHECK_EQ(0, getsockname(s, (sockaddr *)&addr, &addrSize));
+ if (getsockname(s, (sockaddr *)&addr, &addrSize) != 0) {
+ inet_aton("0.0.0.0", &(addr.sin_addr));
+ }
uint8_t *data = buffer->data() + buffer->size();
data[0] = 0x80 | 1;
@@ -478,21 +490,32 @@ struct MyHandler : public AHandler {
sp<ARTSPResponse> response =
static_cast<ARTSPResponse *>(obj.get());
- if (response->mStatusCode == 302) {
+ if (response->mStatusCode == 301 || response->mStatusCode == 302) {
ssize_t i = response->mHeaders.indexOfKey("location");
CHECK_GE(i, 0);
- mSessionURL = response->mHeaders.valueAt(i);
-
- AString request;
- request = "DESCRIBE ";
- request.append(mSessionURL);
- request.append(" RTSP/1.0\r\n");
- request.append("Accept: application/sdp\r\n");
- request.append("\r\n");
+ mOriginalSessionURL = response->mHeaders.valueAt(i);
+ mSessionURL = mOriginalSessionURL;
+
+ // Strip any authentication info from the session url, we don't
+ // want to transmit user/pass in cleartext.
+ AString host, path, user, pass;
+ unsigned port;
+ if (ARTSPConnection::ParseURL(
+ mSessionURL.c_str(), &host, &port, &path, &user, &pass)
+ && user.size() > 0) {
+ mSessionURL.clear();
+ mSessionURL.append("rtsp://");
+ mSessionURL.append(host);
+ mSessionURL.append(":");
+ mSessionURL.append(AStringPrintf("%u", port));
+ mSessionURL.append(path);
+
+ ALOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ }
- sp<AMessage> reply = new AMessage('desc', id());
- mConn->sendRequest(request.c_str(), reply);
+ sp<AMessage> reply = new AMessage('conn', id());
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
break;
}
@@ -545,7 +568,7 @@ struct MyHandler : public AHandler {
mBaseURL = tmp;
}
- mControlURL = getControlURL(mSessionDesc);
+ mControlURL = getControlURL();
if (mSessionDesc->countTracks() < 2) {
// There's no actual tracks in this session.
@@ -591,7 +614,7 @@ struct MyHandler : public AHandler {
mSeekable = !isLiveStream(mSessionDesc);
- mControlURL = getControlURL(mSessionDesc);
+ mControlURL = getControlURL();
if (mSessionDesc->countTracks() < 2) {
// There's no actual tracks in this session.
@@ -1215,7 +1238,7 @@ struct MyHandler : public AHandler {
request.append("\r\n");
request.append(
- StringPrintf(
+ AStringPrintf(
"Range: npt=%lld-\r\n", timeUs / 1000000ll));
request.append("\r\n");
@@ -1805,6 +1828,8 @@ private:
bool addMediaTimestamp(
int32_t trackIndex, const TrackInfo *track,
const sp<ABuffer> &accessUnit) {
+ UNUSED_UNLESS_VERBOSE(trackIndex);
+
uint32_t rtpTime;
CHECK(accessUnit->meta()->findInt32(
"rtp-time", (int32_t *)&rtpTime));
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
index ed3fa7e..a24eb69 100644
--- a/media/libstagefright/rtsp/SDPLoader.cpp
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -18,34 +18,30 @@
#define LOG_TAG "SDPLoader"
#include <utils/Log.h>
-#include "SDPLoader.h"
+#include "include/SDPLoader.h"
#include "ASessionDescription.h"
-#include "HTTPBase.h"
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/MediaHTTP.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/Utils.h>
#define DEFAULT_SDP_SIZE 100000
namespace android {
-SDPLoader::SDPLoader(const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+SDPLoader::SDPLoader(
+ const sp<AMessage> &notify,
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService)
: mNotify(notify),
mFlags(flags),
- mUIDValid(uidValid),
- mUID(uid),
mNetLooper(new ALooper),
mCancelled(false),
- mHTTPDataSource(
- HTTPBase::Create(
- (mFlags & kFlagIncognito)
- ? HTTPBase::kFlagIncognito
- : 0)) {
- if (mUIDValid) {
- mHTTPDataSource->setUID(mUID);
- }
-
+ mHTTPDataSource(new MediaHTTP(httpService->makeHTTPConnection())) {
mNetLooper->setName("sdp net");
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
@@ -94,11 +90,7 @@ void SDPLoader::onLoad(const sp<AMessage> &msg) {
KeyedVector<String8, String8> *headers = NULL;
msg->findPointer("headers", (void **)&headers);
- if (!(mFlags & kFlagIncognito)) {
- ALOGI("onLoad '%s'", url.c_str());
- } else {
- ALOGI("onLoad <URL suppressed>");
- }
+ ALOGV("onLoad %s", uriDebugString(url, mFlags & kFlagIncognito).c_str());
if (!mCancelled) {
err = mHTTPDataSource->connect(url.c_str(), headers);
@@ -113,7 +105,7 @@ void SDPLoader::onLoad(const sp<AMessage> &msg) {
headers = NULL;
}
- off64_t sdpSize;
+ off64_t sdpSize = 0;
if (err == OK && !mCancelled) {
err = mHTTPDataSource->getSize(&sdpSize);
@@ -130,7 +122,7 @@ void SDPLoader::onLoad(const sp<AMessage> &msg) {
ssize_t readSize = mHTTPDataSource->readAt(0, buffer->data(), sdpSize);
if (readSize < 0) {
- ALOGE("Failed to read SDP, error code = %ld", readSize);
+ ALOGE("Failed to read SDP, error code = %zu", readSize);
err = UNKNOWN_ERROR;
} else {
desc = new ASessionDescription;
diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk
index 06ce16b..8d6ff5b 100644
--- a/media/libstagefright/tests/Android.mk
+++ b/media/libstagefright/tests/Android.mk
@@ -1,15 +1,14 @@
# Build the unit tests.
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
-
-ifneq ($(TARGET_SIMULATOR),true)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
LOCAL_MODULE := SurfaceMediaSource_test
LOCAL_MODULE_TAGS := tests
LOCAL_SRC_FILES := \
- SurfaceMediaSource_test.cpp \
+ SurfaceMediaSource_test.cpp \
DummyRecorder.cpp \
LOCAL_SHARED_LIBRARIES := \
@@ -22,28 +21,46 @@ LOCAL_SHARED_LIBRARIES := \
libstagefright \
libstagefright_foundation \
libstagefright_omx \
- libstlport \
libsync \
libui \
libutils \
liblog
-LOCAL_STATIC_LIBRARIES := \
- libgtest \
- libgtest_main \
-
LOCAL_C_INCLUDES := \
- bionic \
- bionic/libstdc++/include \
- external/gtest/include \
- external/stlport/stlport \
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/native/include/media/openmax \
-include $(BUILD_EXECUTABLE)
+LOCAL_32_BIT_ONLY := true
-endif
+include $(BUILD_NATIVE_TEST)
+
+
+include $(CLEAR_VARS)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
+LOCAL_MODULE := Utils_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ Utils_test.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ liblog \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ libstagefright_omx \
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/include \
+ frameworks/av/media/libstagefright \
+ frameworks/av/media/libstagefright/include \
+ $(TOP)/frameworks/native/include/media/openmax \
+
+include $(BUILD_NATIVE_TEST)
# Include subdirectory makefiles
# ============================================================
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index 49ffcd6..fd889f9 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -35,7 +35,6 @@
#include <gui/SurfaceComposerClient.h>
#include <binder/ProcessState.h>
-#include <ui/FramebufferNativeWindow.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaBufferGroup.h>
@@ -110,7 +109,7 @@ protected:
} else {
ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource");
sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource(
- getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue();
+ getSurfaceWidth(), getSurfaceHeight()))->getProducer();
sp<Surface> stc = new Surface(sms);
sp<ANativeWindow> window = stc;
@@ -361,9 +360,7 @@ protected:
virtual void SetUp() {
android::ProcessState::self()->startThreadPool();
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
-
- // Manual cast is required to avoid constructor ambiguity
- mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(mSMS->getProducer());
mANW = mSTC;
}
@@ -398,7 +395,7 @@ protected:
ALOGV("SMS-GLTest::SetUp()");
android::ProcessState::self()->startThreadPool();
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
- mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(mSMS->getProducer());
mANW = mSTC;
// Doing the setup related to the GL Side
@@ -527,7 +524,8 @@ void SurfaceMediaSourceTest::oneBufferPass(int width, int height ) {
}
// Dequeuing and queuing the buffer without really filling it in.
-void SurfaceMediaSourceTest::oneBufferPassNoFill(int width, int height ) {
+void SurfaceMediaSourceTest::oneBufferPassNoFill(
+ int /* width */, int /* height */) {
ANativeWindowBuffer* anb;
ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
ASSERT_TRUE(anb != NULL);
@@ -746,9 +744,8 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS
CHECK(fd >= 0);
sp<MediaRecorder> mr = SurfaceMediaSourceGLTest::setUpMediaRecorder(fd,
- VIDEO_SOURCE_GRALLOC_BUFFER,
- OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth,
- mYuvTexHeight, 30);
+ VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264,
+ mYuvTexWidth, mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
// mediaserver that is created by stagefrightrecorder
sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
@@ -783,7 +780,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) {
ALOGV("Verify creating a surface w/ right config + dummy writer*********");
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
- mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(mSMS->getProducer());
mANW = mSTC;
DummyRecorder writer(mSMS);
@@ -880,7 +877,7 @@ TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaSameImageEachBufNpotWrite) {
}
CHECK(fd >= 0);
- sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER,
+ sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
@@ -923,7 +920,7 @@ TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaDiffImageEachBufNpotWrite) {
}
CHECK(fd >= 0);
- sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER,
+ sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp
new file mode 100644
index 0000000..5c323c1
--- /dev/null
+++ b/media/libstagefright/tests/Utils_test.cpp
@@ -0,0 +1,203 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Utils_test"
+
+#include <gtest/gtest.h>
+#include <utils/String8.h>
+#include <utils/Errors.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AStringUtils.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+class UtilsTest : public ::testing::Test {
+};
+
+TEST_F(UtilsTest, TestStringUtils) {
+ ASSERT_EQ(AStringUtils::Compare("Audio", "AudioExt", 5, false), 0);
+ ASSERT_EQ(AStringUtils::Compare("Audio", "audiOExt", 5, true), 0);
+ ASSERT_NE(AStringUtils::Compare("Audio", "audioExt", 5, false), 0);
+ ASSERT_NE(AStringUtils::Compare("Audio", "AudiOExt", 5, false), 0);
+
+ ASSERT_LT(AStringUtils::Compare("Audio", "AudioExt", 7, false), 0);
+ ASSERT_LT(AStringUtils::Compare("Audio", "audiOExt", 7, true), 0);
+
+ ASSERT_GT(AStringUtils::Compare("AudioExt", "Audio", 7, false), 0);
+ ASSERT_GT(AStringUtils::Compare("audiOext", "Audio", 7, true), 0);
+
+ ASSERT_LT(AStringUtils::Compare("Audio", "Video", 5, false), 0);
+ ASSERT_LT(AStringUtils::Compare("Audio1", "Audio2", 6, false), 0);
+ ASSERT_LT(AStringUtils::Compare("audio", "VIDEO", 5, true), 0);
+ ASSERT_LT(AStringUtils::Compare("audio1", "AUDIO2", 6, true), 0);
+
+ ASSERT_GT(AStringUtils::Compare("Video", "Audio", 5, false), 0);
+ ASSERT_GT(AStringUtils::Compare("Audio2", "Audio1", 6, false), 0);
+ ASSERT_GT(AStringUtils::Compare("VIDEO", "audio", 5, true), 0);
+ ASSERT_GT(AStringUtils::Compare("AUDIO2", "audio1", 6, true), 0);
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("AudioA", 5, "AudioB", 5, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 6, "AudioA", 5, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "AudioA", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "audiOB", 5, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("AudioA", 5, "audiOB", 5, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 6, "AudioA", 5, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("AudioA", 5, "AudioA", 6, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 6, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 0, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*1", 1, "String8", 0, false));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*ring1", 5, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*ring2", 5, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring4", 5, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring5", 5, "StrinG8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring8", 5, "String8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*ring8", 5, "String8", 7, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*1", 4, "String8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*2", 4, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*3", 4, "string8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*4", 4, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*5", 4, "AString8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*6", 4, "AString8", 7, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ng1", 6, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng2", 6, "string8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng3", 6, "StRing8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng4", 6, "StriNg8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng5", 6, "StrinG8", 6, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ng6", 6, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng8", 6, "AString8", 7, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng1", 6, "String16", 7, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ing9", 7, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ringA", 8, "String8", 6, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng8", 6, "AString8", 7, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ng1", 6, "String16", 7, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("Str*ing9", 7, "STRING8", 6, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("Str*ringA", 8, "String8", 6, true));
+
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "bestrestroom", 9, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "bestrestrestroom", 13, false));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*str*stro", 8, "bestrestrestroom", 14, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str*1", 9, "bestrestrestroom", 14, false));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "beSTReSTRoom", 9, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str1", 8, "beSTRestreSTRoom", 13, true));
+ ASSERT_FALSE(AStringUtils::MatchesGlob("*str*stro", 8, "bestreSTReSTRoom", 14, true));
+ ASSERT_TRUE(AStringUtils::MatchesGlob("*str*str*1", 9, "bestreSTReSTRoom", 14, true));
+}
+
+TEST_F(UtilsTest, TestDebug) {
+#define LVL(x) (ADebug::Level)(x)
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "", LVL(5)), LVL(5));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", " \t \n ", LVL(2)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "video", "\t\n 3 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo,2:vid*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "avideo", "\t\n 3 \t\n:\t\n avideo \t\n,\t\n 2 \t\n:\t\n video \t\n", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "audio.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(2));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString(
+ "video.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3));
+ ASSERT_EQ(ADebug::GetDebugLevelFromString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4));
+#undef LVL
+}
+
+TEST_F(UtilsTest, TestFourCC) {
+ ASSERT_EQ(FOURCC('s', 't', 'm' , 'u'), 'stmu');
+}
+
+TEST_F(UtilsTest, TestMathTemplates) {
+ ASSERT_EQ(divRound(-10, -4), 3);
+ ASSERT_EQ(divRound(-11, -4), 3);
+ ASSERT_EQ(divRound(-12, -4), 3);
+ ASSERT_EQ(divRound(-13, -4), 3);
+ ASSERT_EQ(divRound(-14, -4), 4);
+
+ ASSERT_EQ(divRound(10, -4), -3);
+ ASSERT_EQ(divRound(11, -4), -3);
+ ASSERT_EQ(divRound(12, -4), -3);
+ ASSERT_EQ(divRound(13, -4), -3);
+ ASSERT_EQ(divRound(14, -4), -4);
+
+ ASSERT_EQ(divRound(-10, 4), -3);
+ ASSERT_EQ(divRound(-11, 4), -3);
+ ASSERT_EQ(divRound(-12, 4), -3);
+ ASSERT_EQ(divRound(-13, 4), -3);
+ ASSERT_EQ(divRound(-14, 4), -4);
+
+ ASSERT_EQ(divRound(10, 4), 3);
+ ASSERT_EQ(divRound(11, 4), 3);
+ ASSERT_EQ(divRound(12, 4), 3);
+ ASSERT_EQ(divRound(13, 4), 3);
+ ASSERT_EQ(divRound(14, 4), 4);
+
+ ASSERT_EQ(divUp(-11, -4), 3);
+ ASSERT_EQ(divUp(-12, -4), 3);
+ ASSERT_EQ(divUp(-13, -4), 4);
+
+ ASSERT_EQ(divUp(11, -4), -2);
+ ASSERT_EQ(divUp(12, -4), -3);
+ ASSERT_EQ(divUp(13, -4), -3);
+
+ ASSERT_EQ(divUp(-11, 4), -2);
+ ASSERT_EQ(divUp(-12, 4), -3);
+ ASSERT_EQ(divUp(-13, 4), -3);
+
+ ASSERT_EQ(divUp(11, 4), 3);
+ ASSERT_EQ(divUp(12, 4), 3);
+ ASSERT_EQ(divUp(13, 4), 4);
+
+ ASSERT_EQ(align(11, 4), 12);
+ ASSERT_EQ(align(12, 4), 12);
+ ASSERT_EQ(align(13, 4), 16);
+ ASSERT_EQ(align(11, 8), 16);
+ ASSERT_EQ(align(11, 2), 12);
+ ASSERT_EQ(align(11, 1), 11);
+
+ ASSERT_EQ(abs(5L), 5L);
+ ASSERT_EQ(abs(-25), 25);
+
+ ASSERT_EQ(min(5.6f, 6.0f), 5.6f);
+ ASSERT_EQ(min(6.0f, 5.6f), 5.6f);
+ ASSERT_EQ(min(-4.3, 8.6), -4.3);
+ ASSERT_EQ(min(8.6, -4.3), -4.3);
+
+ ASSERT_EQ(max(5.6f, 6.0f), 6.0f);
+ ASSERT_EQ(max(6.0f, 5.6f), 6.0f);
+ ASSERT_EQ(max(-4.3, 8.6), 8.6);
+ ASSERT_EQ(max(8.6, -4.3), 8.6);
+
+ ASSERT_EQ(periodicError(124, 100), 24);
+ ASSERT_EQ(periodicError(288, 100), 12);
+ ASSERT_EQ(periodicError(-345, 100), 45);
+ ASSERT_EQ(periodicError(-493, 100), 7);
+ ASSERT_EQ(periodicError(-550, 100), 50);
+ ASSERT_EQ(periodicError(-600, 100), 0);
+}
+
+} // namespace android
diff --git a/media/libstagefright/timedtext/Android.mk b/media/libstagefright/timedtext/Android.mk
index f099bbd..6a8b9fc 100644
--- a/media/libstagefright/timedtext/Android.mk
+++ b/media/libstagefright/timedtext/Android.mk
@@ -9,7 +9,8 @@ LOCAL_SRC_FILES:= \
TimedTextSRTSource.cpp \
TimedTextPlayer.cpp
-LOCAL_CFLAGS += -Wno-multichar
+LOCAL_CFLAGS += -Wno-multichar -Werror
+
LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/include/media/stagefright/timedtext \
$(TOP)/frameworks/av/media/libstagefright
diff --git a/media/libstagefright/timedtext/TimedTextDriver.cpp b/media/libstagefright/timedtext/TimedTextDriver.cpp
index 12fd7f4..55a9803 100644
--- a/media/libstagefright/timedtext/TimedTextDriver.cpp
+++ b/media/libstagefright/timedtext/TimedTextDriver.cpp
@@ -20,6 +20,7 @@
#include <binder/IPCThreadState.h>
+#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/DataSource.h>
@@ -40,9 +41,11 @@
namespace android {
TimedTextDriver::TimedTextDriver(
- const wp<MediaPlayerBase> &listener)
+ const wp<MediaPlayerBase> &listener,
+ const sp<IMediaHTTPService> &httpService)
: mLooper(new ALooper),
mListener(listener),
+ mHTTPService(httpService),
mState(UNINITIALIZED),
mCurrentTrackIndex(UINT_MAX) {
mLooper->setName("TimedTextDriver");
@@ -130,7 +133,7 @@ status_t TimedTextDriver::selectTrack(size_t index) {
}
mPlayer->start();
break;
- defaut:
+ default:
TRESPASS();
}
return ret;
@@ -178,7 +181,7 @@ status_t TimedTextDriver::seekToAsync(int64_t timeUs) {
case PLAYING:
mPlayer->seekToAsync(timeUs);
return OK;
- defaut:
+ default:
TRESPASS();
}
return UNKNOWN_ERROR;
@@ -207,7 +210,7 @@ status_t TimedTextDriver::addOutOfBandTextSource(
}
sp<DataSource> dataSource =
- DataSource::CreateFromURI(uri);
+ DataSource::CreateFromURI(mHTTPService, uri);
return createOutOfBandTextSource(trackIndex, mimeType, dataSource);
}
diff --git a/media/libstagefright/timedtext/TimedTextPlayer.cpp b/media/libstagefright/timedtext/TimedTextPlayer.cpp
index 9fb0afe..a070487 100644
--- a/media/libstagefright/timedtext/TimedTextPlayer.cpp
+++ b/media/libstagefright/timedtext/TimedTextPlayer.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "TimedTextPlayer"
#include <utils/Log.h>
+#include <inttypes.h>
#include <limits.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -271,7 +272,7 @@ int64_t TimedTextPlayer::delayUsFromCurrentTime(int64_t fireTimeUs) {
sp<MediaPlayerBase> listener = mListener.promote();
if (listener == NULL) {
// TODO: it may be better to return kInvalidTimeUs
- ALOGE("%s: Listener is NULL. (fireTimeUs = %lld)",
+ ALOGE("%s: Listener is NULL. (fireTimeUs = %" PRId64" )",
__FUNCTION__, fireTimeUs);
return 0;
}
diff --git a/media/libstagefright/timedtext/TimedTextPlayer.h b/media/libstagefright/timedtext/TimedTextPlayer.h
index ec8ed25..9cb49ec 100644
--- a/media/libstagefright/timedtext/TimedTextPlayer.h
+++ b/media/libstagefright/timedtext/TimedTextPlayer.h
@@ -27,7 +27,7 @@
namespace android {
-class AMessage;
+struct AMessage;
class MediaPlayerBase;
class TimedTextDriver;
class TimedTextSource;
diff --git a/media/libstagefright/timedtext/TimedTextSRTSource.h b/media/libstagefright/timedtext/TimedTextSRTSource.h
index 598c200..232675e 100644
--- a/media/libstagefright/timedtext/TimedTextSRTSource.h
+++ b/media/libstagefright/timedtext/TimedTextSRTSource.h
@@ -25,7 +25,7 @@
namespace android {
-class AString;
+struct AString;
class DataSource;
class MediaBuffer;
class Parcel;
diff --git a/media/libstagefright/timedtext/TimedTextSource.h b/media/libstagefright/timedtext/TimedTextSource.h
index 756cc31..8c1c1cd 100644
--- a/media/libstagefright/timedtext/TimedTextSource.h
+++ b/media/libstagefright/timedtext/TimedTextSource.h
@@ -47,7 +47,7 @@ class TimedTextSource : public RefBase {
int64_t *endTimeUs,
Parcel *parcel,
const MediaSource::ReadOptions *options = NULL) = 0;
- virtual status_t extractGlobalDescriptions(Parcel *parcel) {
+ virtual status_t extractGlobalDescriptions(Parcel * /* parcel */) {
return INVALID_OPERATION;
}
virtual sp<MetaData> getFormat();
diff --git a/media/libstagefright/timedtext/test/Android.mk b/media/libstagefright/timedtext/test/Android.mk
index a5e7ba2..9a9fde2 100644
--- a/media/libstagefright/timedtext/test/Android.mk
+++ b/media/libstagefright/timedtext/test/Android.mk
@@ -2,7 +2,6 @@ LOCAL_PATH:= $(call my-dir)
# ================================================================
# Unit tests for libstagefright_timedtext
-# See also /development/testrunner/test_defs.xml
# ================================================================
# ================================================================
@@ -18,10 +17,13 @@ LOCAL_SRC_FILES := TimedTextSRTSource_test.cpp
LOCAL_C_INCLUDES := \
$(TOP)/external/expat/lib \
- $(TOP)/frameworks/base/media/libstagefright/timedtext
+ $(TOP)/frameworks/av/media/libstagefright/timedtext
LOCAL_SHARED_LIBRARIES := \
+ libbinder \
libexpat \
- libstagefright
+ libstagefright \
+ libstagefright_foundation \
+ libutils
include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp b/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp
index 40e93c7..3a06d61 100644
--- a/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp
+++ b/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp
@@ -120,26 +120,26 @@ TEST_F(TimedTextSRTSourceTest, readAll) {
err = mSource->read(&startTimeUs, &endTimeUs, &parcel);
EXPECT_EQ(OK, err);
CheckStartTimeMs(parcel, i * kSecToMsec);
- subtitle = StringPrintf("%d\n\n", i);
+ subtitle = AStringPrintf("%d\n\n", i);
CheckDataEquals(parcel, subtitle.c_str());
}
// read edge cases
err = mSource->read(&startTimeUs, &endTimeUs, &parcel);
EXPECT_EQ(OK, err);
CheckStartTimeMs(parcel, 5500);
- subtitle = StringPrintf("6\n\n");
+ subtitle = AStringPrintf("6\n\n");
CheckDataEquals(parcel, subtitle.c_str());
err = mSource->read(&startTimeUs, &endTimeUs, &parcel);
EXPECT_EQ(OK, err);
CheckStartTimeMs(parcel, 5800);
- subtitle = StringPrintf("7\n\n");
+ subtitle = AStringPrintf("7\n\n");
CheckDataEquals(parcel, subtitle.c_str());
err = mSource->read(&startTimeUs, &endTimeUs, &parcel);
EXPECT_EQ(OK, err);
CheckStartTimeMs(parcel, 6000);
- subtitle = StringPrintf("8\n\n");
+ subtitle = AStringPrintf("8\n\n");
CheckDataEquals(parcel, subtitle.c_str());
err = mSource->read(&startTimeUs, &endTimeUs, &parcel);
@@ -202,21 +202,21 @@ TEST_F(TimedTextSRTSourceTest, checkEdgeCase) {
err = mSource->read(&startTimeUs, &endTimeUs, &parcel, &options);
EXPECT_EQ(OK, err);
EXPECT_EQ(5500 * kMsecToUsec, startTimeUs);
- subtitle = StringPrintf("6\n\n");
+ subtitle = AStringPrintf("6\n\n");
CheckDataEquals(parcel, subtitle.c_str());
options.setSeekTo(5800 * kMsecToUsec, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
err = mSource->read(&startTimeUs, &endTimeUs, &parcel, &options);
EXPECT_EQ(OK, err);
EXPECT_EQ(5800 * kMsecToUsec, startTimeUs);
- subtitle = StringPrintf("7\n\n");
+ subtitle = AStringPrintf("7\n\n");
CheckDataEquals(parcel, subtitle.c_str());
options.setSeekTo(6000 * kMsecToUsec, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
err = mSource->read(&startTimeUs, &endTimeUs, &parcel, &options);
EXPECT_EQ(OK, err);
EXPECT_EQ(6000 * kMsecToUsec, startTimeUs);
- subtitle = StringPrintf("8\n\n");
+ subtitle = AStringPrintf("8\n\n");
CheckDataEquals(parcel, subtitle.c_str());
}
diff --git a/media/libstagefright/webm/Android.mk b/media/libstagefright/webm/Android.mk
new file mode 100644
index 0000000..7081463
--- /dev/null
+++ b/media/libstagefright/webm/Android.mk
@@ -0,0 +1,23 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_CPPFLAGS += -D__STDINT_LIMITS \
+ -Werror
+
+LOCAL_SRC_FILES:= EbmlUtil.cpp \
+ WebmElement.cpp \
+ WebmFrame.cpp \
+ WebmFrameThread.cpp \
+ WebmWriter.cpp
+
+
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/include
+
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation \
+ libstagefright \
+ libutils \
+ liblog
+
+LOCAL_MODULE:= libstagefright_webm
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/webm/EbmlUtil.cpp b/media/libstagefright/webm/EbmlUtil.cpp
new file mode 100644
index 0000000..449fec6
--- /dev/null
+++ b/media/libstagefright/webm/EbmlUtil.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+namespace {
+
+// Table for Seal's algorithm for Number of Trailing Zeros. Hacker's Delight
+// online, Figure 5-18 (http://www.hackersdelight.org/revisions.pdf)
+// The entries whose value is -1 are never referenced.
+int NTZ_TABLE[] = {
+ 32, 0, 1, 12, 2, 6, -1, 13, 3, -1, 7, -1, -1, -1, -1, 14,
+ 10, 4, -1, -1, 8, -1, -1, 25, -1, -1, -1, -1, -1, 21, 27, 15,
+ 31, 11, 5, -1, -1, -1, -1, -1, 9, -1, -1, 24, -1, -1, 20, 26,
+ 30, -1, -1, -1, -1, 23, -1, 19, 29, -1, 22, 18, 28, 17, 16, -1
+};
+
+int numberOfTrailingZeros32(int32_t i) {
+ uint32_t u = (i & -i) * 0x0450FBAF;
+ return NTZ_TABLE[(u) >> 26];
+}
+
+uint64_t highestOneBit(uint64_t n) {
+ n |= (n >> 1);
+ n |= (n >> 2);
+ n |= (n >> 4);
+ n |= (n >> 8);
+ n |= (n >> 16);
+ n |= (n >> 32);
+ return n - (n >> 1);
+}
+
+uint64_t _powerOf2(uint64_t u) {
+ uint64_t powerOf2 = highestOneBit(u);
+ return powerOf2 ? powerOf2 : 1;
+}
+
+// Based on Long.numberOfTrailingZeros in Long.java
+int numberOfTrailingZeros(uint64_t u) {
+ int32_t low = u;
+ return low !=0 ? numberOfTrailingZeros32(low)
+ : 32 + numberOfTrailingZeros32((int32_t) (u >> 32));
+}
+}
+
+namespace webm {
+
+// Encode the id and/or size of an EBML element bytes by setting a leading length descriptor bit:
+//
+// 1xxxxxxx - 1-byte values
+// 01xxxxxx xxxxxxxx -
+// 001xxxxx xxxxxxxx xxxxxxxx -
+// 0001xxxx xxxxxxxx xxxxxxxx xxxxxxxx - ...
+// 00001xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 000001xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 0000001x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 00000001 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - 8-byte values
+//
+// This function uses the least the number of bytes possible.
+uint64_t encodeUnsigned(uint64_t u) {
+ uint64_t powerOf2 = _powerOf2(u);
+ if (u + 1 == powerOf2 << 1)
+ powerOf2 <<= 1;
+ int shiftWidth = (7 + numberOfTrailingZeros(powerOf2)) / 7 * 7;
+ long lengthDescriptor = 1 << shiftWidth;
+ return lengthDescriptor | u;
+}
+
+// Like above but pads the input value with leading zeros up to the specified width. The length
+// descriptor is calculated based on width.
+uint64_t encodeUnsigned(uint64_t u, int width) {
+ int shiftWidth = 7 * width;
+ uint64_t lengthDescriptor = 1;
+ lengthDescriptor <<= shiftWidth;
+ return lengthDescriptor | u;
+}
+
+// Calculate the length of an EBML coded id or size from its length descriptor.
+int sizeOf(uint64_t u) {
+ uint64_t powerOf2 = _powerOf2(u);
+ int unsignedLength = numberOfTrailingZeros(powerOf2) / 8 + 1;
+ return unsignedLength;
+}
+
+// Serialize an EBML coded id or size in big-endian order.
+int serializeCodedUnsigned(uint64_t u, uint8_t* bary) {
+ int unsignedLength = sizeOf(u);
+ for (int i = unsignedLength - 1; i >= 0; i--) {
+ bary[i] = u & 0xff;
+ u >>= 8;
+ }
+ return unsignedLength;
+}
+
+}
diff --git a/media/libstagefright/webm/EbmlUtil.h b/media/libstagefright/webm/EbmlUtil.h
new file mode 100644
index 0000000..eb9c37c
--- /dev/null
+++ b/media/libstagefright/webm/EbmlUtil.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef EBMLUTIL_H_
+#define EBMLUTIL_H_
+
+#include <stdint.h>
+
+namespace webm {
+
+// Encode the id and/or size of an EBML element bytes by setting a leading length descriptor bit:
+//
+// 1xxxxxxx - 1-byte values
+// 01xxxxxx xxxxxxxx -
+// 001xxxxx xxxxxxxx xxxxxxxx -
+// 0001xxxx xxxxxxxx xxxxxxxx xxxxxxxx - ...
+// 00001xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 000001xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 0000001x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 00000001 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - 8-byte values
+//
+// This function uses the least the number of bytes possible.
+uint64_t encodeUnsigned(uint64_t u);
+
+// Like above but pads the input value with leading zeros up to the specified width. The length
+// descriptor is calculated based on width.
+uint64_t encodeUnsigned(uint64_t u, int width);
+
+// Serialize an EBML coded id or size in big-endian order.
+int serializeCodedUnsigned(uint64_t u, uint8_t* bary);
+
+// Calculate the length of an EBML coded id or size from its length descriptor.
+int sizeOf(uint64_t u);
+
+}
+
+#endif /* EBMLUTIL_H_ */
diff --git a/media/libstagefright/webm/LinkedBlockingQueue.h b/media/libstagefright/webm/LinkedBlockingQueue.h
new file mode 100644
index 0000000..0b6a9a1
--- /dev/null
+++ b/media/libstagefright/webm/LinkedBlockingQueue.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LINKEDBLOCKINGQUEUE_H_
+#define LINKEDBLOCKINGQUEUE_H_
+
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+
+namespace android {
+
+template<typename T>
+class LinkedBlockingQueue {
+ List<T> mList;
+ Mutex mLock;
+ Condition mContentAvailableCondition;
+
+ T front(bool remove) {
+ Mutex::Autolock autolock(mLock);
+ while (mList.empty()) {
+ mContentAvailableCondition.wait(mLock);
+ }
+ T e = *(mList.begin());
+ if (remove) {
+ mList.erase(mList.begin());
+ }
+ return e;
+ }
+
+ DISALLOW_EVIL_CONSTRUCTORS(LinkedBlockingQueue);
+
+public:
+ LinkedBlockingQueue() {
+ }
+
+ ~LinkedBlockingQueue() {
+ }
+
+ bool empty() {
+ Mutex::Autolock autolock(mLock);
+ return mList.empty();
+ }
+
+ void clear() {
+ Mutex::Autolock autolock(mLock);
+ mList.clear();
+ }
+
+ T peek() {
+ return front(false);
+ }
+
+ T take() {
+ return front(true);
+ }
+
+ void push(T e) {
+ Mutex::Autolock autolock(mLock);
+ mList.push_back(e);
+ mContentAvailableCondition.signal();
+ }
+};
+
+} /* namespace android */
+#endif /* LINKEDBLOCKINGQUEUE_H_ */
diff --git a/media/libstagefright/webm/WebmConstants.h b/media/libstagefright/webm/WebmConstants.h
new file mode 100644
index 0000000..c53f458
--- /dev/null
+++ b/media/libstagefright/webm/WebmConstants.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMCONSTANTS_H_
+#define WEBMCONSTANTS_H_
+
+#include <stdint.h>
+
+namespace webm {
+
+const int kMinEbmlVoidSize = 2;
+const int64_t kMaxMetaSeekSize = 64;
+const int64_t kMkvUnknownLength = 0x01ffffffffffffffl;
+
+// EBML element id's from http://matroska.org/technical/specs/index.html
+enum Mkv {
+ kMkvEbml = 0x1A45DFA3,
+ kMkvEbmlVersion = 0x4286,
+ kMkvEbmlReadVersion = 0x42F7,
+ kMkvEbmlMaxIdlength = 0x42F2,
+ kMkvEbmlMaxSizeLength = 0x42F3,
+ kMkvDocType = 0x4282,
+ kMkvDocTypeVersion = 0x4287,
+ kMkvDocTypeReadVersion = 0x4285,
+ kMkvVoid = 0xEC,
+ kMkvSignatureSlot = 0x1B538667,
+ kMkvSignatureAlgo = 0x7E8A,
+ kMkvSignatureHash = 0x7E9A,
+ kMkvSignaturePublicKey = 0x7EA5,
+ kMkvSignature = 0x7EB5,
+ kMkvSignatureElements = 0x7E5B,
+ kMkvSignatureElementList = 0x7E7B,
+ kMkvSignedElement = 0x6532,
+ kMkvSegment = 0x18538067,
+ kMkvSeekHead = 0x114D9B74,
+ kMkvSeek = 0x4DBB,
+ kMkvSeekId = 0x53AB,
+ kMkvSeekPosition = 0x53AC,
+ kMkvInfo = 0x1549A966,
+ kMkvTimecodeScale = 0x2AD7B1,
+ kMkvSegmentDuration = 0x4489,
+ kMkvDateUtc = 0x4461,
+ kMkvMuxingApp = 0x4D80,
+ kMkvWritingApp = 0x5741,
+ kMkvCluster = 0x1F43B675,
+ kMkvTimecode = 0xE7,
+ kMkvPrevSize = 0xAB,
+ kMkvBlockGroup = 0xA0,
+ kMkvBlock = 0xA1,
+ kMkvBlockAdditions = 0x75A1,
+ kMkvBlockMore = 0xA6,
+ kMkvBlockAddId = 0xEE,
+ kMkvBlockAdditional = 0xA5,
+ kMkvBlockDuration = 0x9B,
+ kMkvReferenceBlock = 0xFB,
+ kMkvLaceNumber = 0xCC,
+ kMkvSimpleBlock = 0xA3,
+ kMkvTracks = 0x1654AE6B,
+ kMkvTrackEntry = 0xAE,
+ kMkvTrackNumber = 0xD7,
+ kMkvTrackUid = 0x73C5,
+ kMkvTrackType = 0x83,
+ kMkvFlagEnabled = 0xB9,
+ kMkvFlagDefault = 0x88,
+ kMkvFlagForced = 0x55AA,
+ kMkvFlagLacing = 0x9C,
+ kMkvDefaultDuration = 0x23E383,
+ kMkvMaxBlockAdditionId = 0x55EE,
+ kMkvName = 0x536E,
+ kMkvLanguage = 0x22B59C,
+ kMkvCodecId = 0x86,
+ kMkvCodecPrivate = 0x63A2,
+ kMkvCodecName = 0x258688,
+ kMkvVideo = 0xE0,
+ kMkvFlagInterlaced = 0x9A,
+ kMkvStereoMode = 0x53B8,
+ kMkvAlphaMode = 0x53C0,
+ kMkvPixelWidth = 0xB0,
+ kMkvPixelHeight = 0xBA,
+ kMkvPixelCropBottom = 0x54AA,
+ kMkvPixelCropTop = 0x54BB,
+ kMkvPixelCropLeft = 0x54CC,
+ kMkvPixelCropRight = 0x54DD,
+ kMkvDisplayWidth = 0x54B0,
+ kMkvDisplayHeight = 0x54BA,
+ kMkvDisplayUnit = 0x54B2,
+ kMkvAspectRatioType = 0x54B3,
+ kMkvFrameRate = 0x2383E3,
+ kMkvAudio = 0xE1,
+ kMkvSamplingFrequency = 0xB5,
+ kMkvOutputSamplingFrequency = 0x78B5,
+ kMkvChannels = 0x9F,
+ kMkvBitDepth = 0x6264,
+ kMkvCues = 0x1C53BB6B,
+ kMkvCuePoint = 0xBB,
+ kMkvCueTime = 0xB3,
+ kMkvCueTrackPositions = 0xB7,
+ kMkvCueTrack = 0xF7,
+ kMkvCueClusterPosition = 0xF1,
+ kMkvCueBlockNumber = 0x5378
+};
+
+enum TrackTypes {
+ kInvalidType = -1,
+ kVideoType = 0x1,
+ kAudioType = 0x2,
+ kComplexType = 0x3,
+ kLogoType = 0x10,
+ kSubtitleType = 0x11,
+ kButtonsType = 0x12,
+ kControlType = 0x20
+};
+
+enum TrackNum {
+ kVideoTrackNum = 0x1,
+ kAudioTrackNum = 0x2
+};
+}
+
+#endif /* WEBMCONSTANTS_H_ */
diff --git a/media/libstagefright/webm/WebmElement.cpp b/media/libstagefright/webm/WebmElement.cpp
new file mode 100644
index 0000000..a008cab
--- /dev/null
+++ b/media/libstagefright/webm/WebmElement.cpp
@@ -0,0 +1,367 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "WebmElement"
+
+#include "EbmlUtil.h"
+#include "WebmElement.h"
+#include "WebmConstants.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Log.h>
+
+#include <string.h>
+#include <unistd.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+using namespace android;
+using namespace webm;
+
+namespace {
+
+int64_t voidSize(int64_t totalSize) {
+ if (totalSize < 2) {
+ return -1;
+ }
+ if (totalSize < 9) {
+ return totalSize - 2;
+ }
+ return totalSize - 9;
+}
+
+uint64_t childrenSum(const List<sp<WebmElement> >& children) {
+ uint64_t total = 0;
+ for (List<sp<WebmElement> >::const_iterator it = children.begin();
+ it != children.end(); ++it) {
+ total += (*it)->totalSize();
+ }
+ return total;
+}
+
+void populateCommonTrackEntries(
+ int num,
+ uint64_t uid,
+ bool lacing,
+ const char *lang,
+ const char *codec,
+ TrackTypes type,
+ List<sp<WebmElement> > &ls) {
+ ls.push_back(new WebmUnsigned(kMkvTrackNumber, num));
+ ls.push_back(new WebmUnsigned(kMkvTrackUid, uid));
+ ls.push_back(new WebmUnsigned(kMkvFlagLacing, lacing));
+ ls.push_back(new WebmString(kMkvLanguage, lang));
+ ls.push_back(new WebmString(kMkvCodecId, codec));
+ ls.push_back(new WebmUnsigned(kMkvTrackType, type));
+}
+}
+
+namespace android {
+
+WebmElement::WebmElement(uint64_t id, uint64_t size)
+ : mId(id), mSize(size) {
+}
+
+WebmElement::~WebmElement() {
+}
+
+int WebmElement::serializePayloadSize(uint8_t *buf) {
+ return serializeCodedUnsigned(encodeUnsigned(mSize), buf);
+}
+
+uint64_t WebmElement::serializeInto(uint8_t *buf) {
+ uint8_t *cur = buf;
+ int head = serializeCodedUnsigned(mId, cur);
+ cur += head;
+ int neck = serializePayloadSize(cur);
+ cur += neck;
+ serializePayload(cur);
+ cur += mSize;
+ return cur - buf;
+}
+
+uint64_t WebmElement::totalSize() {
+ uint8_t buf[8];
+ //............... + sizeOf(encodeUnsigned(size))
+ return sizeOf(mId) + serializePayloadSize(buf) + mSize;
+}
+
+uint8_t *WebmElement::serialize(uint64_t& size) {
+ size = totalSize();
+ uint8_t *buf = new uint8_t[size];
+ serializeInto(buf);
+ return buf;
+}
+
+int WebmElement::write(int fd, uint64_t& size) {
+ uint8_t buf[8];
+ size = totalSize();
+ off64_t off = ::lseek64(fd, (size - 1), SEEK_CUR) - (size - 1);
+ ::write(fd, buf, 1); // extend file
+
+ off64_t curOff = off + size;
+ off64_t alignedOff = off & ~(::sysconf(_SC_PAGE_SIZE) - 1);
+ off64_t mapSize = curOff - alignedOff;
+ off64_t pageOff = off - alignedOff;
+ void *dst = ::mmap64(NULL, mapSize, PROT_WRITE, MAP_SHARED, fd, alignedOff);
+ if (dst == MAP_FAILED) {
+ ALOGE("mmap64 failed; errno = %d", errno);
+ ALOGE("fd %d; flags: %o", fd, ::fcntl(fd, F_GETFL, 0));
+ return errno;
+ } else {
+ serializeInto((uint8_t*) dst + pageOff);
+ ::msync(dst, mapSize, MS_SYNC);
+ return ::munmap(dst, mapSize);
+ }
+}
+
+//=================================================================================================
+
+WebmUnsigned::WebmUnsigned(uint64_t id, uint64_t value)
+ : WebmElement(id, sizeOf(value)), mValue(value) {
+}
+
+void WebmUnsigned::serializePayload(uint8_t *buf) {
+ serializeCodedUnsigned(mValue, buf);
+}
+
+//=================================================================================================
+
+WebmFloat::WebmFloat(uint64_t id, double value)
+ : WebmElement(id, sizeof(double)), mValue(value) {
+}
+
+WebmFloat::WebmFloat(uint64_t id, float value)
+ : WebmElement(id, sizeof(float)), mValue(value) {
+}
+
+void WebmFloat::serializePayload(uint8_t *buf) {
+ uint64_t data;
+ if (mSize == sizeof(float)) {
+ float f = mValue;
+ data = *reinterpret_cast<const uint32_t*>(&f);
+ } else {
+ data = *reinterpret_cast<const uint64_t*>(&mValue);
+ }
+ for (int i = mSize - 1; i >= 0; --i) {
+ buf[i] = data & 0xff;
+ data >>= 8;
+ }
+}
+
+//=================================================================================================
+
+WebmBinary::WebmBinary(uint64_t id, const sp<ABuffer> &ref)
+ : WebmElement(id, ref->size()), mRef(ref) {
+}
+
+void WebmBinary::serializePayload(uint8_t *buf) {
+ memcpy(buf, mRef->data(), mRef->size());
+}
+
+//=================================================================================================
+
+WebmString::WebmString(uint64_t id, const char *str)
+ : WebmElement(id, strlen(str)), mStr(str) {
+}
+
+void WebmString::serializePayload(uint8_t *buf) {
+ memcpy(buf, mStr, strlen(mStr));
+}
+
+//=================================================================================================
+
+WebmSimpleBlock::WebmSimpleBlock(
+ int trackNum,
+ int16_t relTimecode,
+ bool key,
+ const sp<ABuffer>& orig)
+ // ............................ trackNum*1 + timecode*2 + flags*1
+ // ^^^
+ // Only the least significant byte of trackNum is encoded
+ : WebmElement(kMkvSimpleBlock, orig->size() + 4),
+ mTrackNum(trackNum),
+ mRelTimecode(relTimecode),
+ mKey(key),
+ mRef(orig) {
+}
+
+void WebmSimpleBlock::serializePayload(uint8_t *buf) {
+ serializeCodedUnsigned(encodeUnsigned(mTrackNum), buf);
+ buf[1] = (mRelTimecode & 0xff00) >> 8;
+ buf[2] = mRelTimecode & 0xff;
+ buf[3] = mKey ? 0x80 : 0;
+ memcpy(buf + 4, mRef->data(), mSize - 4);
+}
+
+//=================================================================================================
+
+EbmlVoid::EbmlVoid(uint64_t totalSize)
+ : WebmElement(kMkvVoid, voidSize(totalSize)),
+ mSizeWidth(totalSize - sizeOf(kMkvVoid) - voidSize(totalSize)) {
+ CHECK_GE(voidSize(totalSize), 0);
+}
+
+int EbmlVoid::serializePayloadSize(uint8_t *buf) {
+ return serializeCodedUnsigned(encodeUnsigned(mSize, mSizeWidth), buf);
+}
+
+void EbmlVoid::serializePayload(uint8_t *buf) {
+ ::memset(buf, 0, mSize);
+ return;
+}
+
+//=================================================================================================
+
+WebmMaster::WebmMaster(uint64_t id, const List<sp<WebmElement> >& children)
+ : WebmElement(id, childrenSum(children)), mChildren(children) {
+}
+
+WebmMaster::WebmMaster(uint64_t id)
+ : WebmElement(id, 0) {
+}
+
+int WebmMaster::serializePayloadSize(uint8_t *buf) {
+ if (mSize == 0){
+ return serializeCodedUnsigned(kMkvUnknownLength, buf);
+ }
+ return WebmElement::serializePayloadSize(buf);
+}
+
+void WebmMaster::serializePayload(uint8_t *buf) {
+ uint64_t off = 0;
+ for (List<sp<WebmElement> >::const_iterator it = mChildren.begin(); it != mChildren.end();
+ ++it) {
+ sp<WebmElement> child = (*it);
+ child->serializeInto(buf + off);
+ off += child->totalSize();
+ }
+}
+
+//=================================================================================================
+
+sp<WebmElement> WebmElement::CuePointEntry(uint64_t time, int track, uint64_t off) {
+ List<sp<WebmElement> > cuePointEntryFields;
+ cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueTrack, track));
+ cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueClusterPosition, off));
+ WebmElement *cueTrackPositions = new WebmMaster(kMkvCueTrackPositions, cuePointEntryFields);
+
+ cuePointEntryFields.clear();
+ cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueTime, time));
+ cuePointEntryFields.push_back(cueTrackPositions);
+ return new WebmMaster(kMkvCuePoint, cuePointEntryFields);
+}
+
+sp<WebmElement> WebmElement::SeekEntry(uint64_t id, uint64_t off) {
+ List<sp<WebmElement> > seekEntryFields;
+ seekEntryFields.push_back(new WebmUnsigned(kMkvSeekId, id));
+ seekEntryFields.push_back(new WebmUnsigned(kMkvSeekPosition, off));
+ return new WebmMaster(kMkvSeek, seekEntryFields);
+}
+
+sp<WebmElement> WebmElement::EbmlHeader(
+ int ver,
+ int readVer,
+ int maxIdLen,
+ int maxSizeLen,
+ int docVer,
+ int docReadVer) {
+ List<sp<WebmElement> > headerFields;
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlVersion, ver));
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlReadVersion, readVer));
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlMaxIdlength, maxIdLen));
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlMaxSizeLength, maxSizeLen));
+ headerFields.push_back(new WebmString(kMkvDocType, "webm"));
+ headerFields.push_back(new WebmUnsigned(kMkvDocTypeVersion, docVer));
+ headerFields.push_back(new WebmUnsigned(kMkvDocTypeReadVersion, docReadVer));
+ return new WebmMaster(kMkvEbml, headerFields);
+}
+
+sp<WebmElement> WebmElement::SegmentInfo(uint64_t scale, double dur) {
+ List<sp<WebmElement> > segmentInfo;
+ // place duration first; easier to patch
+ segmentInfo.push_back(new WebmFloat(kMkvSegmentDuration, dur));
+ segmentInfo.push_back(new WebmUnsigned(kMkvTimecodeScale, scale));
+ segmentInfo.push_back(new WebmString(kMkvMuxingApp, "android"));
+ segmentInfo.push_back(new WebmString(kMkvWritingApp, "android"));
+ return new WebmMaster(kMkvInfo, segmentInfo);
+}
+
+sp<WebmElement> WebmElement::AudioTrackEntry(
+ int chans,
+ double rate,
+ const sp<ABuffer> &buf,
+ int bps,
+ uint64_t uid,
+ bool lacing,
+ const char *lang) {
+ if (uid == 0) {
+ uid = kAudioTrackNum;
+ }
+
+ List<sp<WebmElement> > trackEntryFields;
+ populateCommonTrackEntries(
+ kAudioTrackNum,
+ uid,
+ lacing,
+ lang,
+ "A_VORBIS",
+ kAudioType,
+ trackEntryFields);
+
+ List<sp<WebmElement> > audioInfo;
+ audioInfo.push_back(new WebmUnsigned(kMkvChannels, chans));
+ audioInfo.push_back(new WebmFloat(kMkvSamplingFrequency, rate));
+ if (bps) {
+ WebmElement *bitDepth = new WebmUnsigned(kMkvBitDepth, bps);
+ audioInfo.push_back(bitDepth);
+ }
+
+ trackEntryFields.push_back(new WebmMaster(kMkvAudio, audioInfo));
+ trackEntryFields.push_back(new WebmBinary(kMkvCodecPrivate, buf));
+ return new WebmMaster(kMkvTrackEntry, trackEntryFields);
+}
+
+sp<WebmElement> WebmElement::VideoTrackEntry(
+ uint64_t width,
+ uint64_t height,
+ uint64_t uid,
+ bool lacing,
+ const char *lang) {
+ if (uid == 0) {
+ uid = kVideoTrackNum;
+ }
+
+ List<sp<WebmElement> > trackEntryFields;
+ populateCommonTrackEntries(
+ kVideoTrackNum,
+ uid,
+ lacing,
+ lang,
+ "V_VP8",
+ kVideoType,
+ trackEntryFields);
+
+ List<sp<WebmElement> > videoInfo;
+ videoInfo.push_back(new WebmUnsigned(kMkvPixelWidth, width));
+ videoInfo.push_back(new WebmUnsigned(kMkvPixelHeight, height));
+
+ trackEntryFields.push_back(new WebmMaster(kMkvVideo, videoInfo));
+ return new WebmMaster(kMkvTrackEntry, trackEntryFields);
+}
+} /* namespace android */
diff --git a/media/libstagefright/webm/WebmElement.h b/media/libstagefright/webm/WebmElement.h
new file mode 100644
index 0000000..f19933e
--- /dev/null
+++ b/media/libstagefright/webm/WebmElement.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMELEMENT_H_
+#define WEBMELEMENT_H_
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <utils/List.h>
+
+namespace android {
+
+struct WebmElement : public LightRefBase<WebmElement> {
+ const uint64_t mId, mSize;
+
+ WebmElement(uint64_t id, uint64_t size);
+ virtual ~WebmElement();
+
+ virtual int serializePayloadSize(uint8_t *buf);
+ virtual void serializePayload(uint8_t *buf)=0;
+ uint64_t totalSize();
+ uint64_t serializeInto(uint8_t *buf);
+ uint8_t *serialize(uint64_t& size);
+ int write(int fd, uint64_t& size);
+
+ static sp<WebmElement> EbmlHeader(
+ int ver = 1,
+ int readVer = 1,
+ int maxIdLen = 4,
+ int maxSizeLen = 8,
+ int docVer = 2,
+ int docReadVer = 2);
+
+ static sp<WebmElement> SegmentInfo(uint64_t scale = 1000000, double dur = 0);
+
+ static sp<WebmElement> AudioTrackEntry(
+ int chans,
+ double rate,
+ const sp<ABuffer> &buf,
+ int bps = 0,
+ uint64_t uid = 0,
+ bool lacing = false,
+ const char *lang = "und");
+
+ static sp<WebmElement> VideoTrackEntry(
+ uint64_t width,
+ uint64_t height,
+ uint64_t uid = 0,
+ bool lacing = false,
+ const char *lang = "und");
+
+ static sp<WebmElement> SeekEntry(uint64_t id, uint64_t off);
+ static sp<WebmElement> CuePointEntry(uint64_t time, int track, uint64_t off);
+ static sp<WebmElement> SimpleBlock(
+ int trackNum,
+ int16_t timecode,
+ bool key,
+ const uint8_t *data,
+ uint64_t dataSize);
+};
+
+struct WebmUnsigned : public WebmElement {
+ WebmUnsigned(uint64_t id, uint64_t value);
+ const uint64_t mValue;
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmFloat : public WebmElement {
+ const double mValue;
+ WebmFloat(uint64_t id, float value);
+ WebmFloat(uint64_t id, double value);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmBinary : public WebmElement {
+ const sp<ABuffer> mRef;
+ WebmBinary(uint64_t id, const sp<ABuffer> &ref);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmString : public WebmElement {
+ const char *const mStr;
+ WebmString(uint64_t id, const char *str);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmSimpleBlock : public WebmElement {
+ const int mTrackNum;
+ const int16_t mRelTimecode;
+ const bool mKey;
+ const sp<ABuffer> mRef;
+
+ WebmSimpleBlock(int trackNum, int16_t timecode, bool key, const sp<ABuffer>& orig);
+ void serializePayload(uint8_t *buf);
+};
+
+struct EbmlVoid : public WebmElement {
+ const uint64_t mSizeWidth;
+ EbmlVoid(uint64_t totalSize);
+ int serializePayloadSize(uint8_t *buf);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmMaster : public WebmElement {
+ const List<sp<WebmElement> > mChildren;
+ WebmMaster(uint64_t id);
+ WebmMaster(uint64_t id, const List<sp<WebmElement> > &children);
+ int serializePayloadSize(uint8_t *buf);
+ void serializePayload(uint8_t *buf);
+};
+
+} /* namespace android */
+#endif /* WEBMELEMENT_H_ */
diff --git a/media/libstagefright/webm/WebmFrame.cpp b/media/libstagefright/webm/WebmFrame.cpp
new file mode 100644
index 0000000..e5134ed
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrame.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WebmFrame"
+
+#include "WebmFrame.h"
+#include "WebmConstants.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <unistd.h>
+
+using namespace android;
+using namespace webm;
+
+namespace {
+sp<ABuffer> toABuffer(MediaBuffer *mbuf) {
+ sp<ABuffer> abuf = new ABuffer(mbuf->range_length());
+ memcpy(abuf->data(), (uint8_t*) mbuf->data() + mbuf->range_offset(), mbuf->range_length());
+ return abuf;
+}
+}
+
+namespace android {
+
+const sp<WebmFrame> WebmFrame::EOS = new WebmFrame();
+
+WebmFrame::WebmFrame()
+ : mType(kInvalidType),
+ mKey(false),
+ mAbsTimecode(UINT64_MAX),
+ mData(new ABuffer(0)),
+ mEos(true) {
+}
+
+WebmFrame::WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *mbuf)
+ : mType(type),
+ mKey(key),
+ mAbsTimecode(absTimecode),
+ mData(toABuffer(mbuf)),
+ mEos(false) {
+}
+
+sp<WebmElement> WebmFrame::SimpleBlock(uint64_t baseTimecode) const {
+ return new WebmSimpleBlock(
+ mType == kVideoType ? kVideoTrackNum : kAudioTrackNum,
+ mAbsTimecode - baseTimecode,
+ mKey,
+ mData);
+}
+
+bool WebmFrame::operator<(const WebmFrame &other) const {
+ if (this->mEos) {
+ return false;
+ }
+ if (other.mEos) {
+ return true;
+ }
+ if (this->mAbsTimecode == other.mAbsTimecode) {
+ if (this->mType == kAudioType && other.mType == kVideoType) {
+ return true;
+ }
+ if (this->mType == kVideoType && other.mType == kAudioType) {
+ return false;
+ }
+ return false;
+ }
+ return this->mAbsTimecode < other.mAbsTimecode;
+}
+} /* namespace android */
diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/WebmFrame.h
new file mode 100644
index 0000000..4f0b055
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrame.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMFRAME_H_
+#define WEBMFRAME_H_
+
+#include "WebmElement.h"
+
+namespace android {
+
+struct WebmFrame : LightRefBase<WebmFrame> {
+public:
+ const int mType;
+ const bool mKey;
+ const uint64_t mAbsTimecode;
+ const sp<ABuffer> mData;
+ const bool mEos;
+
+ WebmFrame();
+ WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *buf);
+ ~WebmFrame() {}
+
+ sp<WebmElement> SimpleBlock(uint64_t baseTimecode) const;
+
+ bool operator<(const WebmFrame &other) const;
+
+ static const sp<WebmFrame> EOS;
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(WebmFrame);
+};
+
+} /* namespace android */
+#endif /* WEBMFRAME_H_ */
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
new file mode 100644
index 0000000..a4b8a42
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -0,0 +1,399 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WebmFrameThread"
+
+#include "WebmConstants.h"
+#include "WebmFrameThread.h"
+
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <utils/Log.h>
+#include <inttypes.h>
+
+using namespace webm;
+
+namespace android {
+
+void *WebmFrameThread::wrap(void *arg) {
+ WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg);
+ worker->run();
+ return NULL;
+}
+
+status_t WebmFrameThread::start() {
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+ pthread_create(&mThread, &attr, WebmFrameThread::wrap, this);
+ pthread_attr_destroy(&attr);
+ return OK;
+}
+
+status_t WebmFrameThread::stop() {
+ void *status;
+ pthread_join(mThread, &status);
+ return (status_t)(intptr_t)status;
+}
+
+//=================================================================================================
+
+WebmFrameSourceThread::WebmFrameSourceThread(
+ int type,
+ LinkedBlockingQueue<const sp<WebmFrame> >& sink)
+ : mType(type), mSink(sink) {
+}
+
+//=================================================================================================
+
+WebmFrameSinkThread::WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ sp<WebmFrameSourceThread> videoThread,
+ sp<WebmFrameSourceThread> audioThread,
+ List<sp<WebmElement> >& cues)
+ : mFd(fd),
+ mSegmentDataStart(off),
+ mVideoFrames(videoThread->mSink),
+ mAudioFrames(audioThread->mSink),
+ mCues(cues),
+ mDone(true) {
+}
+
+WebmFrameSinkThread::WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
+ LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
+ List<sp<WebmElement> >& cues)
+ : mFd(fd),
+ mSegmentDataStart(off),
+ mVideoFrames(videoSource),
+ mAudioFrames(audioSource),
+ mCues(cues),
+ mDone(true) {
+}
+
+// Initializes a webm cluster with its starting timecode.
+//
+// frames:
+// sequence of input audio/video frames received from the source.
+//
+// clusterTimecodeL:
+// the starting timecode of the cluster; this is the timecode of the first
+// frame since frames are ordered by timestamp.
+//
+// children:
+// list to hold child elements in a webm cluster (start timecode and
+// simple blocks).
+//
+// static
+void WebmFrameSinkThread::initCluster(
+ List<const sp<WebmFrame> >& frames,
+ uint64_t& clusterTimecodeL,
+ List<sp<WebmElement> >& children) {
+ CHECK(!frames.empty() && children.empty());
+
+ const sp<WebmFrame> f = *(frames.begin());
+ clusterTimecodeL = f->mAbsTimecode;
+ WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL);
+ children.clear();
+ children.push_back(clusterTimecode);
+}
+
+void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) {
+ // children must contain at least one simpleblock and its timecode
+ CHECK_GE(children.size(), 2);
+
+ uint64_t size;
+ sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children);
+ cluster->write(mFd, size);
+ children.clear();
+}
+
+// Write out (possibly multiple) webm cluster(s) from frames split on video key frames.
+//
+// last:
+// current flush is triggered by EOS instead of a second outstanding video key frame.
+void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) {
+ if (frames.empty()) {
+ return;
+ }
+
+ uint64_t clusterTimecodeL;
+ List<sp<WebmElement> > children;
+ initCluster(frames, clusterTimecodeL, children);
+
+ uint64_t cueTime = clusterTimecodeL;
+ off_t fpos = ::lseek(mFd, 0, SEEK_CUR);
+ size_t n = frames.size();
+ if (!last) {
+ // If we are not flushing the last sequence of outstanding frames, flushFrames
+ // must have been called right after we have pushed a second outstanding video key
+ // frame (the last frame), which belongs to the next cluster; also hold back on
+ // flushing the second to last frame before we check its type. A audio frame
+ // should precede the aforementioned video key frame in the next sequence, a video
+ // frame should be the last frame in the current (to-be-flushed) sequence.
+ CHECK_GE(n, 2);
+ n -= 2;
+ }
+
+ for (size_t i = 0; i < n; i++) {
+ const sp<WebmFrame> f = *(frames.begin());
+ if (f->mType == kVideoType && f->mKey) {
+ cueTime = f->mAbsTimecode;
+ }
+
+ if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) {
+ writeCluster(children);
+ initCluster(frames, clusterTimecodeL, children);
+ }
+
+ frames.erase(frames.begin());
+ children.push_back(f->SimpleBlock(clusterTimecodeL));
+ }
+
+ // equivalent to last==false
+ if (!frames.empty()) {
+ // decide whether to write out the second to last frame.
+ const sp<WebmFrame> secondLastFrame = *(frames.begin());
+ if (secondLastFrame->mType == kVideoType) {
+ frames.erase(frames.begin());
+ children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL));
+ }
+ }
+
+ writeCluster(children);
+ sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart);
+ mCues.push_back(cuePoint);
+}
+
+status_t WebmFrameSinkThread::start() {
+ mDone = false;
+ return WebmFrameThread::start();
+}
+
+status_t WebmFrameSinkThread::stop() {
+ mDone = true;
+ mVideoFrames.push(WebmFrame::EOS);
+ mAudioFrames.push(WebmFrame::EOS);
+ return WebmFrameThread::stop();
+}
+
+void WebmFrameSinkThread::run() {
+ int numVideoKeyFrames = 0;
+ List<const sp<WebmFrame> > outstandingFrames;
+ while (!mDone) {
+ ALOGV("wait v frame");
+ const sp<WebmFrame> videoFrame = mVideoFrames.peek();
+ ALOGV("v frame: %p", videoFrame.get());
+
+ ALOGV("wait a frame");
+ const sp<WebmFrame> audioFrame = mAudioFrames.peek();
+ ALOGV("a frame: %p", audioFrame.get());
+
+ if (videoFrame->mEos && audioFrame->mEos) {
+ break;
+ }
+
+ if (*audioFrame < *videoFrame) {
+ ALOGV("take a frame");
+ mAudioFrames.take();
+ outstandingFrames.push_back(audioFrame);
+ } else {
+ ALOGV("take v frame");
+ mVideoFrames.take();
+ outstandingFrames.push_back(videoFrame);
+ if (videoFrame->mKey)
+ numVideoKeyFrames++;
+ }
+
+ if (numVideoKeyFrames == 2) {
+ flushFrames(outstandingFrames, /* last = */ false);
+ numVideoKeyFrames--;
+ }
+ }
+ ALOGV("flushing last cluster (size %zu)", outstandingFrames.size());
+ flushFrames(outstandingFrames, /* last = */ true);
+ mDone = true;
+}
+
+//=================================================================================================
+
+static const int64_t kInitialDelayTimeUs = 700000LL;
+
+void WebmFrameMediaSourceThread::clearFlags() {
+ mDone = false;
+ mPaused = false;
+ mResumed = false;
+ mStarted = false;
+ mReachedEOS = false;
+}
+
+WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
+ const sp<MediaSource>& source,
+ int type,
+ LinkedBlockingQueue<const sp<WebmFrame> >& sink,
+ uint64_t timeCodeScale,
+ int64_t startTimeRealUs,
+ int32_t startTimeOffsetMs,
+ int numTracks,
+ bool realTimeRecording)
+ : WebmFrameSourceThread(type, sink),
+ mSource(source),
+ mTimeCodeScale(timeCodeScale),
+ mTrackDurationUs(0) {
+ clearFlags();
+ mStartTimeUs = startTimeRealUs;
+ if (realTimeRecording && numTracks > 1) {
+ /*
+ * Copied from MPEG4Writer
+ *
+ * This extra delay of accepting incoming audio/video signals
+ * helps to align a/v start time at the beginning of a recording
+ * session, and it also helps eliminate the "recording" sound for
+ * camcorder applications.
+ *
+ * If client does not set the start time offset, we fall back to
+ * use the default initial delay value.
+ */
+ int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL;
+ if (startTimeOffsetUs < 0) { // Start time offset was not set
+ startTimeOffsetUs = kInitialDelayTimeUs;
+ }
+ mStartTimeUs += startTimeOffsetUs;
+ ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
+ }
+}
+
+status_t WebmFrameMediaSourceThread::start() {
+ sp<MetaData> meta = new MetaData;
+ meta->setInt64(kKeyTime, mStartTimeUs);
+ status_t err = mSource->start(meta.get());
+ if (err != OK) {
+ mDone = true;
+ mReachedEOS = true;
+ return err;
+ } else {
+ mStarted = true;
+ return WebmFrameThread::start();
+ }
+}
+
+status_t WebmFrameMediaSourceThread::resume() {
+ if (!mDone && mPaused) {
+ mPaused = false;
+ mResumed = true;
+ }
+ return OK;
+}
+
+status_t WebmFrameMediaSourceThread::pause() {
+ if (mStarted) {
+ mPaused = true;
+ }
+ return OK;
+}
+
+status_t WebmFrameMediaSourceThread::stop() {
+ if (mStarted) {
+ mStarted = false;
+ mDone = true;
+ mSource->stop();
+ return WebmFrameThread::stop();
+ }
+ return OK;
+}
+
+void WebmFrameMediaSourceThread::run() {
+ int32_t count = 0;
+ int64_t timestampUs = 0xdeadbeef;
+ int64_t lastTimestampUs = 0; // Previous sample time stamp
+ int64_t lastDurationUs = 0; // Previous sample duration
+ int64_t previousPausedDurationUs = 0;
+
+ const uint64_t kUninitialized = 0xffffffffffffffffL;
+ mStartTimeUs = kUninitialized;
+
+ status_t err = OK;
+ MediaBuffer *buffer;
+ while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
+ if (buffer->range_length() == 0) {
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+
+ sp<MetaData> md = buffer->meta_data();
+ CHECK(md->findInt64(kKeyTime, &timestampUs));
+ if (mStartTimeUs == kUninitialized) {
+ mStartTimeUs = timestampUs;
+ }
+ timestampUs -= mStartTimeUs;
+
+ if (mPaused && !mResumed) {
+ lastDurationUs = timestampUs - lastTimestampUs;
+ lastTimestampUs = timestampUs;
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+ ++count;
+
+ // adjust time-stamps after pause/resume
+ if (mResumed) {
+ int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
+ CHECK_GE(durExcludingEarlierPausesUs, 0ll);
+ int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
+ CHECK_GE(pausedDurationUs, lastDurationUs);
+ previousPausedDurationUs += pausedDurationUs - lastDurationUs;
+ mResumed = false;
+ }
+ timestampUs -= previousPausedDurationUs;
+ CHECK_GE(timestampUs, 0ll);
+
+ int32_t isSync = false;
+ md->findInt32(kKeyIsSyncFrame, &isSync);
+ const sp<WebmFrame> f = new WebmFrame(
+ mType,
+ isSync,
+ timestampUs * 1000 / mTimeCodeScale,
+ buffer);
+ mSink.push(f);
+
+ ALOGV(
+ "%s %s frame at %" PRId64 " size %zu\n",
+ mType == kVideoType ? "video" : "audio",
+ isSync ? "I" : "P",
+ timestampUs * 1000 / mTimeCodeScale,
+ buffer->range_length());
+
+ buffer->release();
+ buffer = NULL;
+
+ if (timestampUs > mTrackDurationUs) {
+ mTrackDurationUs = timestampUs;
+ }
+ lastDurationUs = timestampUs - lastTimestampUs;
+ lastTimestampUs = timestampUs;
+ }
+
+ mTrackDurationUs += lastDurationUs;
+ mSink.push(WebmFrame::EOS);
+}
+}
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h
new file mode 100644
index 0000000..d65d9b7
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrameThread.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMFRAMETHREAD_H_
+#define WEBMFRAMETHREAD_H_
+
+#include "WebmFrame.h"
+#include "LinkedBlockingQueue.h"
+
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaSource.h>
+
+#include <utils/List.h>
+#include <utils/Errors.h>
+
+#include <pthread.h>
+
+namespace android {
+
+class WebmFrameThread : public LightRefBase<WebmFrameThread> {
+public:
+ virtual void run() = 0;
+ virtual bool running() { return false; }
+ virtual status_t start();
+ virtual status_t pause() { return OK; }
+ virtual status_t resume() { return OK; }
+ virtual status_t stop();
+ virtual ~WebmFrameThread() { stop(); }
+ static void *wrap(void *arg);
+
+protected:
+ WebmFrameThread()
+ : mThread(0) {
+ }
+
+private:
+ pthread_t mThread;
+ DISALLOW_EVIL_CONSTRUCTORS(WebmFrameThread);
+};
+
+//=================================================================================================
+
+class WebmFrameSourceThread;
+class WebmFrameSinkThread : public WebmFrameThread {
+public:
+ WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ sp<WebmFrameSourceThread> videoThread,
+ sp<WebmFrameSourceThread> audioThread,
+ List<sp<WebmElement> >& cues);
+
+ WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
+ LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
+ List<sp<WebmElement> >& cues);
+
+ void run();
+ bool running() {
+ return !mDone;
+ }
+ status_t start();
+ status_t stop();
+
+private:
+ const int& mFd;
+ const uint64_t& mSegmentDataStart;
+ LinkedBlockingQueue<const sp<WebmFrame> >& mVideoFrames;
+ LinkedBlockingQueue<const sp<WebmFrame> >& mAudioFrames;
+ List<sp<WebmElement> >& mCues;
+
+ volatile bool mDone;
+
+ static void initCluster(
+ List<const sp<WebmFrame> >& frames,
+ uint64_t& clusterTimecodeL,
+ List<sp<WebmElement> >& children);
+ void writeCluster(List<sp<WebmElement> >& children);
+ void flushFrames(List<const sp<WebmFrame> >& frames, bool last);
+};
+
+//=================================================================================================
+
+class WebmFrameSourceThread : public WebmFrameThread {
+public:
+ WebmFrameSourceThread(int type, LinkedBlockingQueue<const sp<WebmFrame> >& sink);
+ virtual int64_t getDurationUs() = 0;
+protected:
+ const int mType;
+ LinkedBlockingQueue<const sp<WebmFrame> >& mSink;
+
+ friend class WebmFrameSinkThread;
+};
+
+//=================================================================================================
+
+class WebmFrameEmptySourceThread : public WebmFrameSourceThread {
+public:
+ WebmFrameEmptySourceThread(int type, LinkedBlockingQueue<const sp<WebmFrame> >& sink)
+ : WebmFrameSourceThread(type, sink) {
+ }
+ void run() { mSink.push(WebmFrame::EOS); }
+ int64_t getDurationUs() { return 0; }
+};
+
+//=================================================================================================
+
+class WebmFrameMediaSourceThread: public WebmFrameSourceThread {
+public:
+ WebmFrameMediaSourceThread(
+ const sp<MediaSource>& source,
+ int type,
+ LinkedBlockingQueue<const sp<WebmFrame> >& sink,
+ uint64_t timeCodeScale,
+ int64_t startTimeRealUs,
+ int32_t startTimeOffsetMs,
+ int numPeers,
+ bool realTimeRecording);
+
+ void run();
+ status_t start();
+ status_t resume();
+ status_t pause();
+ status_t stop();
+ int64_t getDurationUs() {
+ return mTrackDurationUs;
+ }
+
+private:
+ const sp<MediaSource> mSource;
+ const uint64_t mTimeCodeScale;
+ uint64_t mStartTimeUs;
+
+ volatile bool mDone;
+ volatile bool mPaused;
+ volatile bool mResumed;
+ volatile bool mStarted;
+ volatile bool mReachedEOS;
+ int64_t mTrackDurationUs;
+
+ void clearFlags();
+};
+} /* namespace android */
+
+#endif /* WEBMFRAMETHREAD_H_ */
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
new file mode 100644
index 0000000..069961b
--- /dev/null
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -0,0 +1,550 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "WebmWriter"
+
+#include "EbmlUtil.h"
+#include "WebmWriter.h"
+
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <utils/Errors.h>
+
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <inttypes.h>
+
+using namespace webm;
+
+namespace {
+size_t XiphLaceCodeLen(size_t size) {
+ return size / 0xff + 1;
+}
+
+size_t XiphLaceEnc(uint8_t *buf, size_t size) {
+ size_t i;
+ for (i = 0; size >= 0xff; ++i, size -= 0xff) {
+ buf[i] = 0xff;
+ }
+ buf[i++] = size;
+ return i;
+}
+}
+
+namespace android {
+
+static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
+
+WebmWriter::WebmWriter(int fd)
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0 ? NO_INIT : OK),
+ mTimeCodeScale(1000000),
+ mStartTimestampUs(0),
+ mStartTimeOffsetMs(0),
+ mSegmentOffset(0),
+ mSegmentDataStart(0),
+ mInfoOffset(0),
+ mInfoSize(0),
+ mTracksOffset(0),
+ mCuesOffset(0),
+ mPaused(false),
+ mStarted(false),
+ mIsFileSizeLimitExplicitlyRequested(false),
+ mIsRealTimeRecording(false),
+ mStreamableFile(true),
+ mEstimatedCuesSize(0) {
+ mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack);
+ mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack);
+ mSinkThread = new WebmFrameSinkThread(
+ mFd,
+ mSegmentDataStart,
+ mStreams[kVideoIndex].mSink,
+ mStreams[kAudioIndex].mSink,
+ mCuePoints);
+}
+
+WebmWriter::WebmWriter(const char *filename)
+ : mInitCheck(NO_INIT),
+ mTimeCodeScale(1000000),
+ mStartTimestampUs(0),
+ mStartTimeOffsetMs(0),
+ mSegmentOffset(0),
+ mSegmentDataStart(0),
+ mInfoOffset(0),
+ mInfoSize(0),
+ mTracksOffset(0),
+ mCuesOffset(0),
+ mPaused(false),
+ mStarted(false),
+ mIsFileSizeLimitExplicitlyRequested(false),
+ mIsRealTimeRecording(false),
+ mStreamableFile(true),
+ mEstimatedCuesSize(0) {
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ if (mFd >= 0) {
+ ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0));
+ mInitCheck = OK;
+ }
+ mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack);
+ mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack);
+ mSinkThread = new WebmFrameSinkThread(
+ mFd,
+ mSegmentDataStart,
+ mStreams[kVideoIndex].mSink,
+ mStreams[kAudioIndex].mSink,
+ mCuePoints);
+}
+
+// static
+sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {
+ int32_t width, height;
+ CHECK(md->findInt32(kKeyWidth, &width));
+ CHECK(md->findInt32(kKeyHeight, &height));
+ return WebmElement::VideoTrackEntry(width, height);
+}
+
+// static
+sp<WebmElement> WebmWriter::audioTrack(const sp<MetaData>& md) {
+ int32_t nChannels, samplerate;
+ uint32_t type;
+ const void *headerData1;
+ const char headerData2[] = { 3, 'v', 'o', 'r', 'b', 'i', 's', 7, 0, 0, 0,
+ 'a', 'n', 'd', 'r', 'o', 'i', 'd', 0, 0, 0, 0, 1 };
+ const void *headerData3;
+ size_t headerSize1, headerSize2 = sizeof(headerData2), headerSize3;
+
+ CHECK(md->findInt32(kKeyChannelCount, &nChannels));
+ CHECK(md->findInt32(kKeySampleRate, &samplerate));
+ CHECK(md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1));
+ CHECK(md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3));
+
+ size_t codecPrivateSize = 1;
+ codecPrivateSize += XiphLaceCodeLen(headerSize1);
+ codecPrivateSize += XiphLaceCodeLen(headerSize2);
+ codecPrivateSize += headerSize1 + headerSize2 + headerSize3;
+
+ off_t off = 0;
+ sp<ABuffer> codecPrivateBuf = new ABuffer(codecPrivateSize);
+ uint8_t *codecPrivateData = codecPrivateBuf->data();
+ codecPrivateData[off++] = 2;
+
+ off += XiphLaceEnc(codecPrivateData + off, headerSize1);
+ off += XiphLaceEnc(codecPrivateData + off, headerSize2);
+
+ memcpy(codecPrivateData + off, headerData1, headerSize1);
+ off += headerSize1;
+ memcpy(codecPrivateData + off, headerData2, headerSize2);
+ off += headerSize2;
+ memcpy(codecPrivateData + off, headerData3, headerSize3);
+
+ sp<WebmElement> entry = WebmElement::AudioTrackEntry(
+ nChannels,
+ samplerate,
+ codecPrivateBuf);
+ return entry;
+}
+
+size_t WebmWriter::numTracks() {
+ Mutex::Autolock autolock(mLock);
+
+ size_t numTracks = 0;
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mTrackEntry != NULL) {
+ numTracks++;
+ }
+ }
+
+ return numTracks;
+}
+
+uint64_t WebmWriter::estimateCuesSize(int32_t bitRate) {
+ // This implementation is based on estimateMoovBoxSize in MPEG4Writer.
+ //
+ // Statistical analysis shows that metadata usually accounts
+ // for a small portion of the total file size, usually < 0.6%.
+
+ // The default MIN_MOOV_BOX_SIZE is set to 0.6% x 1MB / 2,
+ // where 1MB is the common file size limit for MMS application.
+ // The default MAX _MOOV_BOX_SIZE value is based on about 3
+ // minute video recording with a bit rate about 3 Mbps, because
+ // statistics also show that most of the video captured are going
+ // to be less than 3 minutes.
+
+ // If the estimation is wrong, we will pay the price of wasting
+ // some reserved space. This should not happen so often statistically.
+ static const int32_t factor = 2;
+ static const int64_t MIN_CUES_SIZE = 3 * 1024; // 3 KB
+ static const int64_t MAX_CUES_SIZE = (180 * 3000000 * 6LL / 8000);
+ int64_t size = MIN_CUES_SIZE;
+
+ // Max file size limit is set
+ if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
+ size = mMaxFileSizeLimitBytes * 6 / 1000;
+ }
+
+ // Max file duration limit is set
+ if (mMaxFileDurationLimitUs != 0) {
+ if (bitRate > 0) {
+ int64_t size2 = ((mMaxFileDurationLimitUs * bitRate * 6) / 1000 / 8000000);
+ if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
+ // When both file size and duration limits are set,
+ // we use the smaller limit of the two.
+ if (size > size2) {
+ size = size2;
+ }
+ } else {
+ // Only max file duration limit is set
+ size = size2;
+ }
+ }
+ }
+
+ if (size < MIN_CUES_SIZE) {
+ size = MIN_CUES_SIZE;
+ }
+
+ // Any long duration recording will be probably end up with
+ // non-streamable webm file.
+ if (size > MAX_CUES_SIZE) {
+ size = MAX_CUES_SIZE;
+ }
+
+ ALOGV("limits: %" PRId64 "/%" PRId64 " bytes/us,"
+ " bit rate: %d bps and the estimated cues size %" PRId64 " bytes",
+ mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
+ return factor * size;
+}
+
+void WebmWriter::initStream(size_t idx) {
+ if (mStreams[idx].mThread != NULL) {
+ return;
+ }
+ if (mStreams[idx].mSource == NULL) {
+ ALOGV("adding dummy source ... ");
+ mStreams[idx].mThread = new WebmFrameEmptySourceThread(
+ mStreams[idx].mType, mStreams[idx].mSink);
+ } else {
+ ALOGV("adding source %p", mStreams[idx].mSource.get());
+ mStreams[idx].mThread = new WebmFrameMediaSourceThread(
+ mStreams[idx].mSource,
+ mStreams[idx].mType,
+ mStreams[idx].mSink,
+ mTimeCodeScale,
+ mStartTimestampUs,
+ mStartTimeOffsetMs,
+ numTracks(),
+ mIsRealTimeRecording);
+ }
+}
+
+void WebmWriter::release() {
+ close(mFd);
+ mFd = -1;
+ mInitCheck = NO_INIT;
+ mStarted = false;
+}
+
+status_t WebmWriter::reset() {
+ if (mInitCheck != OK) {
+ return OK;
+ } else {
+ if (!mStarted) {
+ release();
+ return OK;
+ }
+ }
+
+ status_t err = OK;
+ int64_t maxDurationUs = 0;
+ int64_t minDurationUs = 0x7fffffffffffffffLL;
+ for (int i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mThread == NULL) {
+ continue;
+ }
+
+ status_t status = mStreams[i].mThread->stop();
+ if (err == OK && status != OK) {
+ err = status;
+ }
+
+ int64_t durationUs = mStreams[i].mThread->getDurationUs();
+ if (durationUs > maxDurationUs) {
+ maxDurationUs = durationUs;
+ }
+ if (durationUs < minDurationUs) {
+ minDurationUs = durationUs;
+ }
+ }
+
+ if (numTracks() > 1) {
+ ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us", minDurationUs, maxDurationUs);
+ }
+
+ mSinkThread->stop();
+
+ // Do not write out movie header on error.
+ if (err != OK) {
+ release();
+ return err;
+ }
+
+ sp<WebmElement> cues = new WebmMaster(kMkvCues, mCuePoints);
+ uint64_t cuesSize = cues->totalSize();
+ // TRICKY Even when the cues do fit in the space we reserved, if they do not fit
+ // perfectly, we still need to check if there is enough "extra space" to write an
+ // EBML void element.
+ if (cuesSize != mEstimatedCuesSize && cuesSize > mEstimatedCuesSize - kMinEbmlVoidSize) {
+ mCuesOffset = ::lseek(mFd, 0, SEEK_CUR);
+ cues->write(mFd, cuesSize);
+ } else {
+ uint64_t spaceSize;
+ ::lseek(mFd, mCuesOffset, SEEK_SET);
+ cues->write(mFd, cuesSize);
+ sp<WebmElement> space = new EbmlVoid(mEstimatedCuesSize - cuesSize);
+ space->write(mFd, spaceSize);
+ }
+
+ mCuePoints.clear();
+ mStreams[kVideoIndex].mSink.clear();
+ mStreams[kAudioIndex].mSink.clear();
+
+ uint8_t bary[sizeof(uint64_t)];
+ uint64_t totalSize = ::lseek(mFd, 0, SEEK_END);
+ uint64_t segmentSize = totalSize - mSegmentDataStart;
+ ::lseek(mFd, mSegmentOffset + sizeOf(kMkvSegment), SEEK_SET);
+ uint64_t segmentSizeCoded = encodeUnsigned(segmentSize, sizeOf(kMkvUnknownLength));
+ serializeCodedUnsigned(segmentSizeCoded, bary);
+ ::write(mFd, bary, sizeOf(kMkvUnknownLength));
+
+ uint64_t durationOffset = mInfoOffset + sizeOf(kMkvInfo) + sizeOf(mInfoSize)
+ + sizeOf(kMkvSegmentDuration) + sizeOf(sizeof(double));
+ sp<WebmElement> duration = new WebmFloat(
+ kMkvSegmentDuration,
+ (double) (maxDurationUs * 1000 / mTimeCodeScale));
+ duration->serializePayload(bary);
+ ::lseek(mFd, durationOffset, SEEK_SET);
+ ::write(mFd, bary, sizeof(double));
+
+ List<sp<WebmElement> > seekEntries;
+ seekEntries.push_back(WebmElement::SeekEntry(kMkvInfo, mInfoOffset - mSegmentDataStart));
+ seekEntries.push_back(WebmElement::SeekEntry(kMkvTracks, mTracksOffset - mSegmentDataStart));
+ seekEntries.push_back(WebmElement::SeekEntry(kMkvCues, mCuesOffset - mSegmentDataStart));
+ sp<WebmElement> seekHead = new WebmMaster(kMkvSeekHead, seekEntries);
+
+ uint64_t metaSeekSize;
+ ::lseek(mFd, mSegmentDataStart, SEEK_SET);
+ seekHead->write(mFd, metaSeekSize);
+
+ uint64_t spaceSize;
+ sp<WebmElement> space = new EbmlVoid(kMaxMetaSeekSize - metaSeekSize);
+ space->write(mFd, spaceSize);
+
+ release();
+ return err;
+}
+
+status_t WebmWriter::addSource(const sp<MediaSource> &source) {
+ Mutex::Autolock l(mLock);
+ if (mStarted) {
+ ALOGE("Attempt to add source AFTER recording is started");
+ return UNKNOWN_ERROR;
+ }
+
+ // At most 2 tracks can be supported.
+ if (mStreams[kVideoIndex].mTrackEntry != NULL
+ && mStreams[kAudioIndex].mTrackEntry != NULL) {
+ ALOGE("Too many tracks (2) to add");
+ return ERROR_UNSUPPORTED;
+ }
+
+ CHECK(source != NULL);
+
+ // A track of type other than video or audio is not supported.
+ const char *mime;
+ source->getFormat()->findCString(kKeyMIMEType, &mime);
+ const char *vp8 = MEDIA_MIMETYPE_VIDEO_VP8;
+ const char *vorbis = MEDIA_MIMETYPE_AUDIO_VORBIS;
+
+ size_t streamIndex;
+ if (!strncasecmp(mime, vp8, strlen(vp8))) {
+ streamIndex = kVideoIndex;
+ } else if (!strncasecmp(mime, vorbis, strlen(vorbis))) {
+ streamIndex = kAudioIndex;
+ } else {
+ ALOGE("Track (%s) other than %s or %s is not supported", mime, vp8, vorbis);
+ return ERROR_UNSUPPORTED;
+ }
+
+ // No more than one video or one audio track is supported.
+ if (mStreams[streamIndex].mTrackEntry != NULL) {
+ ALOGE("%s track already exists", mStreams[streamIndex].mName);
+ return ERROR_UNSUPPORTED;
+ }
+
+ // This is the first track of either audio or video.
+ // Go ahead to add the track.
+ mStreams[streamIndex].mSource = source;
+ mStreams[streamIndex].mTrackEntry = mStreams[streamIndex].mMakeTrack(source->getFormat());
+
+ return OK;
+}
+
+status_t WebmWriter::start(MetaData *params) {
+ if (mInitCheck != OK) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (mStreams[kVideoIndex].mTrackEntry == NULL
+ && mStreams[kAudioIndex].mTrackEntry == NULL) {
+ ALOGE("No source added");
+ return INVALID_OPERATION;
+ }
+
+ if (mMaxFileSizeLimitBytes != 0) {
+ mIsFileSizeLimitExplicitlyRequested = true;
+ }
+
+ if (params) {
+ int32_t isRealTimeRecording;
+ params->findInt32(kKeyRealTimeRecording, &isRealTimeRecording);
+ mIsRealTimeRecording = isRealTimeRecording;
+ }
+
+ if (mStarted) {
+ if (mPaused) {
+ mPaused = false;
+ mStreams[kAudioIndex].mThread->resume();
+ mStreams[kVideoIndex].mThread->resume();
+ }
+ return OK;
+ }
+
+ if (params) {
+ int32_t tcsl;
+ if (params->findInt32(kKeyTimeScale, &tcsl)) {
+ mTimeCodeScale = tcsl;
+ }
+ }
+ CHECK_GT(mTimeCodeScale, 0);
+ ALOGV("movie time scale: %" PRIu64, mTimeCodeScale);
+
+ /*
+ * When the requested file size limit is small, the priority
+ * is to meet the file size limit requirement, rather than
+ * to make the file streamable. mStreamableFile does not tell
+ * whether the actual recorded file is streamable or not.
+ */
+ mStreamableFile = (!mMaxFileSizeLimitBytes)
+ || (mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes);
+
+ /*
+ * Write various metadata.
+ */
+ sp<WebmElement> ebml, segment, info, seekHead, tracks, cues;
+ ebml = WebmElement::EbmlHeader();
+ segment = new WebmMaster(kMkvSegment);
+ seekHead = new EbmlVoid(kMaxMetaSeekSize);
+ info = WebmElement::SegmentInfo(mTimeCodeScale, 0);
+
+ List<sp<WebmElement> > children;
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mTrackEntry != NULL) {
+ children.push_back(mStreams[i].mTrackEntry);
+ }
+ }
+ tracks = new WebmMaster(kMkvTracks, children);
+
+ if (!mStreamableFile) {
+ cues = NULL;
+ } else {
+ int32_t bitRate = -1;
+ if (params) {
+ params->findInt32(kKeyBitRate, &bitRate);
+ }
+ mEstimatedCuesSize = estimateCuesSize(bitRate);
+ CHECK_GE(mEstimatedCuesSize, 8);
+ cues = new EbmlVoid(mEstimatedCuesSize);
+ }
+
+ sp<WebmElement> elems[] = { ebml, segment, seekHead, info, tracks, cues };
+ size_t nElems = sizeof(elems) / sizeof(elems[0]);
+ uint64_t offsets[nElems];
+ uint64_t sizes[nElems];
+ for (uint32_t i = 0; i < nElems; i++) {
+ WebmElement *e = elems[i].get();
+ if (!e) {
+ continue;
+ }
+
+ uint64_t size;
+ offsets[i] = ::lseek(mFd, 0, SEEK_CUR);
+ sizes[i] = e->mSize;
+ e->write(mFd, size);
+ }
+
+ mSegmentOffset = offsets[1];
+ mSegmentDataStart = offsets[2];
+ mInfoOffset = offsets[3];
+ mInfoSize = sizes[3];
+ mTracksOffset = offsets[4];
+ mCuesOffset = offsets[5];
+
+ // start threads
+ if (params) {
+ params->findInt64(kKeyTime, &mStartTimestampUs);
+ }
+
+ initStream(kAudioIndex);
+ initStream(kVideoIndex);
+
+ mStreams[kAudioIndex].mThread->start();
+ mStreams[kVideoIndex].mThread->start();
+ mSinkThread->start();
+
+ mStarted = true;
+ return OK;
+}
+
+status_t WebmWriter::pause() {
+ if (mInitCheck != OK) {
+ return OK;
+ }
+ mPaused = true;
+ status_t err = OK;
+ for (int i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mThread == NULL) {
+ continue;
+ }
+ status_t status = mStreams[i].mThread->pause();
+ if (status != OK) {
+ err = status;
+ }
+ }
+ return err;
+}
+
+status_t WebmWriter::stop() {
+ return reset();
+}
+
+bool WebmWriter::reachedEOS() {
+ return !mSinkThread->running();
+}
+} /* namespace android */
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
new file mode 100644
index 0000000..36b6965
--- /dev/null
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMWRITER_H_
+#define WEBMWRITER_H_
+
+#include "WebmConstants.h"
+#include "WebmFrameThread.h"
+#include "LinkedBlockingQueue.h"
+
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaWriter.h>
+
+#include <utils/Errors.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+
+#include <stdint.h>
+
+using namespace webm;
+
+namespace android {
+
+class WebmWriter : public MediaWriter {
+public:
+ WebmWriter(int fd);
+ WebmWriter(const char *filename);
+ ~WebmWriter() { reset(); }
+
+
+ virtual status_t addSource(const sp<MediaSource> &source);
+ virtual status_t start(MetaData *param = NULL);
+ virtual status_t stop();
+ virtual status_t pause();
+ virtual bool reachedEOS();
+
+ virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
+ virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
+
+private:
+ int mFd;
+ status_t mInitCheck;
+
+ uint64_t mTimeCodeScale;
+ int64_t mStartTimestampUs;
+ int32_t mStartTimeOffsetMs;
+
+ uint64_t mSegmentOffset;
+ uint64_t mSegmentDataStart;
+ uint64_t mInfoOffset;
+ uint64_t mInfoSize;
+ uint64_t mTracksOffset;
+ uint64_t mCuesOffset;
+
+ bool mPaused;
+ bool mStarted;
+ bool mIsFileSizeLimitExplicitlyRequested;
+ bool mIsRealTimeRecording;
+ bool mStreamableFile;
+ uint64_t mEstimatedCuesSize;
+
+ Mutex mLock;
+ List<sp<WebmElement> > mCuePoints;
+
+ enum {
+ kAudioIndex = 0,
+ kVideoIndex = 1,
+ kMaxStreams = 2,
+ };
+
+ struct WebmStream {
+ int mType;
+ const char *mName;
+ sp<WebmElement> (*mMakeTrack)(const sp<MetaData>&);
+
+ sp<MediaSource> mSource;
+ sp<WebmElement> mTrackEntry;
+ sp<WebmFrameSourceThread> mThread;
+ LinkedBlockingQueue<const sp<WebmFrame> > mSink;
+
+ WebmStream()
+ : mType(kInvalidType),
+ mName("Invalid"),
+ mMakeTrack(NULL) {
+ }
+
+ WebmStream(int type, const char *name, sp<WebmElement> (*makeTrack)(const sp<MetaData>&))
+ : mType(type),
+ mName(name),
+ mMakeTrack(makeTrack) {
+ }
+
+ WebmStream &operator=(const WebmStream &other) {
+ mType = other.mType;
+ mName = other.mName;
+ mMakeTrack = other.mMakeTrack;
+ return *this;
+ }
+ };
+ WebmStream mStreams[kMaxStreams];
+
+ sp<WebmFrameSinkThread> mSinkThread;
+
+ size_t numTracks();
+ uint64_t estimateCuesSize(int32_t bitRate);
+ void initStream(size_t idx);
+ void release();
+ status_t reset();
+
+ static sp<WebmElement> videoTrack(const sp<MetaData>& md);
+ static sp<WebmElement> audioTrack(const sp<MetaData>& md);
+
+ DISALLOW_EVIL_CONSTRUCTORS(WebmWriter);
+};
+
+} /* namespace android */
+#endif /* WEBMWRITER_H_ */
diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp
index 04e02c1..2f4af5b 100644
--- a/media/libstagefright/wifi-display/VideoFormats.cpp
+++ b/media/libstagefright/wifi-display/VideoFormats.cpp
@@ -435,7 +435,7 @@ AString VideoFormats::getFormatSpec(bool forM4Message) const {
// max-hres (none or 2 byte)
// max-vres (none or 2 byte)
- return StringPrintf(
+ return AStringPrintf(
"%02x 00 %02x %02x %08x %08x %08x 00 0000 0000 00 none none",
forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType),
mConfigs[mNativeType][mNativeIndex].profile,
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
index 1887b8b..e88a3bd 100644
--- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
@@ -685,9 +685,8 @@ status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) {
return OK;
}
-status_t RTPSender::parseReceiverReport(const uint8_t *data, size_t size) {
- // hexdump(data, size);
-
+status_t RTPSender::parseReceiverReport(
+ const uint8_t *data, size_t /* size */) {
float fractionLost = data[12] / 256.0f;
ALOGI("lost %.2f %% of packets during report interval.",
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 753b3ec..2834a66 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -74,19 +74,6 @@ Converter::Converter(
}
}
-static void ReleaseMediaBufferReference(const sp<ABuffer> &accessUnit) {
- void *mbuf;
- if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
- && mbuf != NULL) {
- ALOGV("releasing mbuf %p", mbuf);
-
- accessUnit->meta()->setPointer("mediaBuffer", NULL);
-
- static_cast<MediaBuffer *>(mbuf)->release();
- mbuf = NULL;
- }
-}
-
void Converter::releaseEncoder() {
if (mEncoder == NULL) {
return;
@@ -95,18 +82,7 @@ void Converter::releaseEncoder() {
mEncoder->release();
mEncoder.clear();
- while (!mInputBufferQueue.empty()) {
- sp<ABuffer> accessUnit = *mInputBufferQueue.begin();
- mInputBufferQueue.erase(mInputBufferQueue.begin());
-
- ReleaseMediaBufferReference(accessUnit);
- }
-
- for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
- sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
- ReleaseMediaBufferReference(accessUnit);
- }
-
+ mInputBufferQueue.clear();
mEncoderInputBuffers.clear();
mEncoderOutputBuffers.clear();
}
@@ -328,7 +304,7 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
sp<ABuffer> accessUnit;
CHECK(msg->findBuffer("accessUnit", &accessUnit));
- ReleaseMediaBufferReference(accessUnit);
+ accessUnit->setMediaBufferBase(NULL);
}
break;
}
@@ -351,15 +327,16 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
ALOGI("dropping frame.");
}
- ReleaseMediaBufferReference(accessUnit);
+ accessUnit->setMediaBufferBase(NULL);
break;
}
#if 0
- void *mbuf;
- if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
- && mbuf != NULL) {
+ MediaBuffer *mbuf =
+ (MediaBuffer *)(accessUnit->getMediaBufferBase());
+ if (mbuf != NULL) {
ALOGI("queueing mbuf %p", mbuf);
+ mbuf->release();
}
#endif
@@ -647,13 +624,13 @@ status_t Converter::feedEncoderInputBuffers() {
buffer->data(),
buffer->size());
- void *mediaBuffer;
- if (buffer->meta()->findPointer("mediaBuffer", &mediaBuffer)
- && mediaBuffer != NULL) {
- mEncoderInputBuffers.itemAt(bufferIndex)->meta()
- ->setPointer("mediaBuffer", mediaBuffer);
+ MediaBuffer *mediaBuffer =
+ (MediaBuffer *)(buffer->getMediaBufferBase());
+ if (mediaBuffer != NULL) {
+ mEncoderInputBuffers.itemAt(bufferIndex)->setMediaBufferBase(
+ mediaBuffer);
- buffer->meta()->setPointer("mediaBuffer", NULL);
+ buffer->setMediaBufferBase(NULL);
}
} else {
flags = MediaCodec::BUFFER_FLAG_EOS;
diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp
index 7e8891d..86b918f 100644
--- a/media/libstagefright/wifi-display/source/MediaPuller.cpp
+++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp
@@ -179,7 +179,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {
} else {
// video encoder will release MediaBuffer when done
// with underlying data.
- accessUnit->meta()->setPointer("mediaBuffer", mbuf);
+ accessUnit->setMediaBufferBase(mbuf);
}
sp<AMessage> notify = mNotify->dup();
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 286ea13..2cb4786 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -29,6 +29,7 @@
#include <binder/IServiceManager.h>
#include <cutils/properties.h>
#include <media/IHDCP.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -749,7 +750,8 @@ status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
mExtractor = new NuMediaExtractor;
- status_t err = mExtractor->setDataSource(mMediaPath.c_str());
+ status_t err = mExtractor->setDataSource(
+ NULL /* httpService */, mMediaPath.c_str());
if (err != OK) {
return err;
@@ -1053,7 +1055,7 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource(
err = source->setMaxAcquiredBufferCount(numInputBuffers);
CHECK_EQ(err, (status_t)OK);
- mBufferQueue = source->getBufferQueue();
+ mProducer = source->getProducer();
return OK;
}
@@ -1077,7 +1079,7 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
}
sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
- return mBufferQueue;
+ return mProducer;
}
void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
index 5c8ee94..2824143 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.h
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.h
@@ -25,7 +25,6 @@
namespace android {
struct ABuffer;
-struct BufferQueue;
struct IHDCP;
struct IGraphicBufferProducer;
struct MediaPuller;
@@ -111,7 +110,7 @@ private:
int64_t mLastLifesignUs;
- sp<BufferQueue> mBufferQueue;
+ sp<IGraphicBufferProducer> mProducer;
KeyedVector<size_t, sp<Track> > mTracks;
ssize_t mVideoTrackIndex;
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
index cc8dee3..59d7e6e 100644
--- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
@@ -79,6 +79,8 @@ status_t RepeaterSource::stop() {
ALOGV("stopping");
+ status_t err = mSource->stop();
+
if (mLooper != NULL) {
mLooper->stop();
mLooper.clear();
@@ -92,7 +94,6 @@ status_t RepeaterSource::stop() {
mBuffer = NULL;
}
- status_t err = mSource->stop();
ALOGV("stopped");
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
index edcc087..4c5ad17 100644
--- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
@@ -106,7 +106,7 @@ void TSPacketizer::Track::extractCSDIfNecessary() {
|| !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
for (size_t i = 0;; ++i) {
sp<ABuffer> csd;
- if (!mFormat->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
+ if (!mFormat->findBuffer(AStringPrintf("csd-%d", i).c_str(), &csd)) {
break;
}
@@ -216,7 +216,7 @@ sp<ABuffer> TSPacketizer::Track::prependADTSHeader(
uint8_t *ptr = dup->data();
*ptr++ = 0xff;
- *ptr++ = 0xf1; // b11110001, ID=0, layer=0, protection_absent=1
+ *ptr++ = 0xf9; // b11111001, ID=1(MPEG-2), layer=0, protection_absent=1
*ptr++ =
profile << 6
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index 05e4018..7eb8b73 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -43,6 +43,10 @@
namespace android {
// static
+const int64_t WifiDisplaySource::kReaperIntervalUs;
+const int64_t WifiDisplaySource::kTeardownTriggerTimeouSecs;
+const int64_t WifiDisplaySource::kPlaybackSessionTimeoutSecs;
+const int64_t WifiDisplaySource::kPlaybackSessionTimeoutUs;
const AString WifiDisplaySource::sUserAgent = MakeUserAgent();
WifiDisplaySource::WifiDisplaySource(
@@ -594,7 +598,7 @@ status_t WifiDisplaySource::sendM3(int32_t sessionID) {
AppendCommonResponse(&request, mNextCSeq);
request.append("Content-Type: text/parameters\r\n");
- request.append(StringPrintf("Content-Length: %d\r\n", body.size()));
+ request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
request.append("\r\n");
request.append(body);
@@ -635,26 +639,26 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) {
if (mSinkSupportsAudio) {
body.append(
- StringPrintf("wfd_audio_codecs: %s\r\n",
+ AStringPrintf("wfd_audio_codecs: %s\r\n",
(mUsingPCMAudio
? "LPCM 00000002 00" // 2 ch PCM 48kHz
: "AAC 00000001 00"))); // 2 ch AAC 48kHz
}
body.append(
- StringPrintf(
+ AStringPrintf(
"wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n",
mClientInfo.mLocalIP.c_str()));
body.append(
- StringPrintf(
+ AStringPrintf(
"wfd_client_rtp_ports: %s\r\n", mWfdClientRtpPorts.c_str()));
AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
AppendCommonResponse(&request, mNextCSeq);
request.append("Content-Type: text/parameters\r\n");
- request.append(StringPrintf("Content-Length: %d\r\n", body.size()));
+ request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
request.append("\r\n");
request.append(body);
@@ -700,7 +704,7 @@ status_t WifiDisplaySource::sendTrigger(
AppendCommonResponse(&request, mNextCSeq);
request.append("Content-Type: text/parameters\r\n");
- request.append(StringPrintf("Content-Length: %d\r\n", body.size()));
+ request.append(AStringPrintf("Content-Length: %d\r\n", body.size()));
request.append("\r\n");
request.append(body);
@@ -725,7 +729,7 @@ status_t WifiDisplaySource::sendM16(int32_t sessionID) {
CHECK_EQ(sessionID, mClientSessionID);
request.append(
- StringPrintf("Session: %d\r\n", mClientInfo.mPlaybackSessionID));
+ AStringPrintf("Session: %d\r\n", mClientInfo.mPlaybackSessionID));
request.append("\r\n"); // Empty body
status_t err =
@@ -746,7 +750,7 @@ status_t WifiDisplaySource::sendM16(int32_t sessionID) {
}
status_t WifiDisplaySource::onReceiveM1Response(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t /* sessionID */, const sp<ParsedMessage> &msg) {
int32_t statusCode;
if (!msg->getStatusCode(&statusCode)) {
return ERROR_MALFORMED;
@@ -991,7 +995,7 @@ status_t WifiDisplaySource::onReceiveM4Response(
}
status_t WifiDisplaySource::onReceiveM5Response(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t /* sessionID */, const sp<ParsedMessage> &msg) {
int32_t statusCode;
if (!msg->getStatusCode(&statusCode)) {
return ERROR_MALFORMED;
@@ -1005,7 +1009,7 @@ status_t WifiDisplaySource::onReceiveM5Response(
}
status_t WifiDisplaySource::onReceiveM16Response(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
+ int32_t sessionID, const sp<ParsedMessage> & /* msg */) {
// If only the response was required to include a "Session:" header...
CHECK_EQ(sessionID, mClientSessionID);
@@ -1301,7 +1305,7 @@ status_t WifiDisplaySource::onSetupRequest(
if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) {
response.append(
- StringPrintf(
+ AStringPrintf(
"Transport: RTP/AVP/TCP;interleaved=%d-%d;",
clientRtp, clientRtcp));
} else {
@@ -1314,14 +1318,14 @@ status_t WifiDisplaySource::onSetupRequest(
if (clientRtcp >= 0) {
response.append(
- StringPrintf(
+ AStringPrintf(
"Transport: RTP/AVP/%s;unicast;client_port=%d-%d;"
"server_port=%d-%d\r\n",
transportString.c_str(),
clientRtp, clientRtcp, serverRtp, serverRtp + 1));
} else {
response.append(
- StringPrintf(
+ AStringPrintf(
"Transport: RTP/AVP/%s;unicast;client_port=%d;"
"server_port=%d\r\n",
transportString.c_str(),
@@ -1581,15 +1585,15 @@ void WifiDisplaySource::AppendCommonResponse(
response->append(buf);
response->append("\r\n");
- response->append(StringPrintf("Server: %s\r\n", sUserAgent.c_str()));
+ response->append(AStringPrintf("Server: %s\r\n", sUserAgent.c_str()));
if (cseq >= 0) {
- response->append(StringPrintf("CSeq: %d\r\n", cseq));
+ response->append(AStringPrintf("CSeq: %d\r\n", cseq));
}
if (playbackSessionID >= 0ll) {
response->append(
- StringPrintf(
+ AStringPrintf(
"Session: %d;timeout=%lld\r\n",
playbackSessionID, kPlaybackSessionTimeoutSecs));
}
@@ -1680,7 +1684,7 @@ WifiDisplaySource::HDCPObserver::HDCPObserver(
}
void WifiDisplaySource::HDCPObserver::notify(
- int msg, int ext1, int ext2, const Parcel *obj) {
+ int msg, int ext1, int ext2, const Parcel * /* obj */) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("msg", msg);
notify->setInt32("ext1", ext1);
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
index b3f7b1b..bb86dfc 100644
--- a/media/libstagefright/yuv/Android.mk
+++ b/media/libstagefright/yuv/Android.mk
@@ -12,5 +12,7 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE:= libstagefright_yuv
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
index 7b9000b..bb3e2fd 100644
--- a/media/libstagefright/yuv/YUVImage.cpp
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -226,8 +226,8 @@ void YUVImage::fastCopyRectangle420Planar(
&ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
int32_t yDestOffsetIncrement;
- int32_t uDestOffsetIncrement;
- int32_t vDestOffsetIncrement;
+ int32_t uDestOffsetIncrement = 0;
+ int32_t vDestOffsetIncrement = 0;
destImage.getOffsetIncrementsPerDataRow(
&yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
@@ -309,7 +309,7 @@ void YUVImage::fastCopyRectangle420SemiPlanar(
int32_t yDestOffsetIncrement;
int32_t uDestOffsetIncrement;
- int32_t vDestOffsetIncrement;
+ int32_t vDestOffsetIncrement = 0;
destImage.getOffsetIncrementsPerDataRow(
&yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
@@ -393,9 +393,9 @@ bool YUVImage::writeToPPM(const char *filename) const {
fprintf(fp, "255\n");
for (int32_t y = 0; y < mHeight; ++y) {
for (int32_t x = 0; x < mWidth; ++x) {
- uint8_t yValue;
- uint8_t uValue;
- uint8_t vValue;
+ uint8_t yValue = 0u;
+ uint8_t uValue = 0u;
+ uint8_t vValue = 0u;
getPixelValue(x, y, &yValue, &uValue, & vValue);
uint8_t rValue;
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index 1ac647a..3a280f0 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -15,6 +15,8 @@ LOCAL_SRC_FILES:= \
LOCAL_SHARED_LIBRARIES := \
libaudioflinger \
+ libaudiopolicyservice \
+ libcamera_metadata\
libcameraservice \
libmedialogservice \
libcutils \
@@ -23,7 +25,8 @@ LOCAL_SHARED_LIBRARIES := \
libmediaplayerservice \
libutils \
liblog \
- libbinder
+ libbinder \
+ libsoundtriggerservice
LOCAL_STATIC_LIBRARIES := \
libregistermsext
@@ -32,8 +35,12 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libmediaplayerservice \
frameworks/av/services/medialog \
frameworks/av/services/audioflinger \
- frameworks/av/services/camera/libcameraservice
+ frameworks/av/services/audiopolicy \
+ frameworks/av/services/camera/libcameraservice \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/services/soundtrigger
LOCAL_MODULE:= mediaserver
+LOCAL_32_BIT_ONLY := true
include $(BUILD_EXECUTABLE)
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index d5207d5..af1c9e6 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -34,10 +34,11 @@
#include "MediaLogService.h"
#include "MediaPlayerService.h"
#include "AudioPolicyService.h"
+#include "SoundTriggerHwService.h"
using namespace android;
-int main(int argc, char** argv)
+int main(int argc __unused, char** argv)
{
signal(SIGPIPE, SIG_IGN);
char value[PROPERTY_VALUE_MAX];
@@ -128,6 +129,7 @@ int main(int argc, char** argv)
MediaPlayerService::instantiate();
CameraService::instantiate();
AudioPolicyService::instantiate();
+ SoundTriggerHwService::instantiate();
registerExtensions();
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk
index ac608a1..3af0956 100644
--- a/media/mtp/Android.mk
+++ b/media/mtp/Android.mk
@@ -39,9 +39,6 @@ LOCAL_MODULE:= libmtp
LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST
-# Needed for <bionic_time.h>
-LOCAL_C_INCLUDES := bionic/libc/private
-
LOCAL_SHARED_LIBRARIES := libutils libcutils liblog libusbhost libbinder
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index c4f87a0..052b700 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -51,104 +51,178 @@ void MtpDataPacket::setTransactionID(MtpTransactionID id) {
MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
}
-uint16_t MtpDataPacket::getUInt16() {
+bool MtpDataPacket::getUInt8(uint8_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
+ value = mBuffer[mOffset++];
+ return true;
+}
+
+bool MtpDataPacket::getUInt16(uint16_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
- mOffset += 2;
- return result;
+ value = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+ mOffset += sizeof(value);
+ return true;
}
-uint32_t MtpDataPacket::getUInt32() {
+bool MtpDataPacket::getUInt32(uint32_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+ value = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24);
- mOffset += 4;
- return result;
+ mOffset += sizeof(value);
+ return true;
}
-uint64_t MtpDataPacket::getUInt64() {
+bool MtpDataPacket::getUInt64(uint64_t& value) {
+ if (mPacketSize - mOffset < sizeof(value))
+ return false;
int offset = mOffset;
- uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+ value = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56);
- mOffset += 8;
- return result;
+ mOffset += sizeof(value);
+ return true;
}
-void MtpDataPacket::getUInt128(uint128_t& value) {
- value[0] = getUInt32();
- value[1] = getUInt32();
- value[2] = getUInt32();
- value[3] = getUInt32();
+bool MtpDataPacket::getUInt128(uint128_t& value) {
+ return getUInt32(value[0]) && getUInt32(value[1]) && getUInt32(value[2]) && getUInt32(value[3]);
}
-void MtpDataPacket::getString(MtpStringBuffer& string)
+bool MtpDataPacket::getString(MtpStringBuffer& string)
{
- string.readFromPacket(this);
+ return string.readFromPacket(this);
}
Int8List* MtpDataPacket::getAInt8() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int8List* result = new Int8List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt8());
+ for (uint32_t i = 0; i < count; i++) {
+ int8_t value;
+ if (!getInt8(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt8List* MtpDataPacket::getAUInt8() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt8List* result = new UInt8List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt8());
+ for (uint32_t i = 0; i < count; i++) {
+ uint8_t value;
+ if (!getUInt8(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int16List* MtpDataPacket::getAInt16() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int16List* result = new Int16List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt16());
+ for (uint32_t i = 0; i < count; i++) {
+ int16_t value;
+ if (!getInt16(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt16List* MtpDataPacket::getAUInt16() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt16List* result = new UInt16List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt16());
+ for (uint32_t i = 0; i < count; i++) {
+ uint16_t value;
+ if (!getUInt16(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int32List* MtpDataPacket::getAInt32() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int32List* result = new Int32List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt32());
+ for (uint32_t i = 0; i < count; i++) {
+ int32_t value;
+ if (!getInt32(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt32List* MtpDataPacket::getAUInt32() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt32List* result = new UInt32List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt32());
+ for (uint32_t i = 0; i < count; i++) {
+ uint32_t value;
+ if (!getUInt32(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
Int64List* MtpDataPacket::getAInt64() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
Int64List* result = new Int64List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getInt64());
+ for (uint32_t i = 0; i < count; i++) {
+ int64_t value;
+ if (!getInt64(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
UInt64List* MtpDataPacket::getAUInt64() {
+ uint32_t count;
+ if (!getUInt32(count))
+ return NULL;
UInt64List* result = new UInt64List;
- int count = getUInt32();
- for (int i = 0; i < count; i++)
- result->push(getUInt64());
+ for (uint32_t i = 0; i < count; i++) {
+ uint64_t value;
+ if (!getUInt64(value)) {
+ delete result;
+ return NULL;
+ }
+ result->push(value);
+ }
return result;
}
@@ -363,7 +437,7 @@ int MtpDataPacket::write(int fd) {
}
int MtpDataPacket::writeData(int fd, void* data, uint32_t length) {
- allocate(length);
+ allocate(length + MTP_CONTAINER_HEADER_SIZE);
memcpy(mBuffer + MTP_CONTAINER_HEADER_SIZE, data, length);
length += MTP_CONTAINER_HEADER_SIZE;
MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
index 2b81063..13d3bd9 100644
--- a/media/mtp/MtpDataPacket.h
+++ b/media/mtp/MtpDataPacket.h
@@ -30,7 +30,7 @@ class MtpStringBuffer;
class MtpDataPacket : public MtpPacket {
private:
// current offset for get/put methods
- int mOffset;
+ size_t mOffset;
public:
MtpDataPacket();
@@ -42,17 +42,18 @@ public:
void setTransactionID(MtpTransactionID id);
inline const uint8_t* getData() const { return mBuffer + MTP_CONTAINER_HEADER_SIZE; }
- inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
- inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; }
- uint16_t getUInt16();
- inline int16_t getInt16() { return (int16_t)getUInt16(); }
- uint32_t getUInt32();
- inline int32_t getInt32() { return (int32_t)getUInt32(); }
- uint64_t getUInt64();
- inline int64_t getInt64() { return (int64_t)getUInt64(); }
- void getUInt128(uint128_t& value);
- inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
- void getString(MtpStringBuffer& string);
+
+ bool getUInt8(uint8_t& value);
+ inline bool getInt8(int8_t& value) { return getUInt8((uint8_t&)value); }
+ bool getUInt16(uint16_t& value);
+ inline bool getInt16(int16_t& value) { return getUInt16((uint16_t&)value); }
+ bool getUInt32(uint32_t& value);
+ inline bool getInt32(int32_t& value) { return getUInt32((uint32_t&)value); }
+ bool getUInt64(uint64_t& value);
+ inline bool getInt64(int64_t& value) { return getUInt64((uint64_t&)value); }
+ bool getUInt128(uint128_t& value);
+ inline bool getInt128(int128_t& value) { return getUInt128((uint128_t&)value); }
+ bool getString(MtpStringBuffer& string);
Int8List* getAInt8();
UInt8List* getAUInt8();
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
index d672dff..3eafd6f 100644
--- a/media/mtp/MtpDevice.cpp
+++ b/media/mtp/MtpDevice.cpp
@@ -131,13 +131,22 @@ MtpDevice* MtpDevice::open(const char* deviceName, int fd) {
struct usb_endpoint_descriptor *ep_in_desc = NULL;
struct usb_endpoint_descriptor *ep_out_desc = NULL;
struct usb_endpoint_descriptor *ep_intr_desc = NULL;
+ //USB3 add USB_DT_SS_ENDPOINT_COMP as companion descriptor;
+ struct usb_ss_ep_comp_descriptor *ep_ss_ep_comp_desc = NULL;
for (int i = 0; i < 3; i++) {
ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+ if (ep && ep->bDescriptorType == USB_DT_SS_ENDPOINT_COMP) {
+ ALOGD("Descriptor type is USB_DT_SS_ENDPOINT_COMP for USB3 \n");
+ ep_ss_ep_comp_desc = (usb_ss_ep_comp_descriptor*)ep;
+ ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+ }
+
if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) {
ALOGE("endpoints not found\n");
usb_device_close(device);
return NULL;
}
+
if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) {
if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK)
ep_in_desc = ep;
@@ -195,7 +204,7 @@ MtpDevice::MtpDevice(struct usb_device* device, int interface,
MtpDevice::~MtpDevice() {
close();
- for (int i = 0; i < mDeviceProperties.size(); i++)
+ for (size_t i = 0; i < mDeviceProperties.size(); i++)
delete mDeviceProperties[i];
usb_request_free(mRequestIn1);
usb_request_free(mRequestIn2);
@@ -253,7 +262,7 @@ void MtpDevice::print() {
ALOGI("*** FORMAT: %s\n", MtpDebug::getFormatCodeName(format));
MtpObjectPropertyList* props = getObjectPropsSupported(format);
if (props) {
- for (int j = 0; j < props->size(); j++) {
+ for (size_t j = 0; j < props->size(); j++) {
MtpObjectProperty prop = (*props)[j];
MtpProperty* property = getObjectPropDesc(prop, format);
if (property) {
@@ -313,8 +322,10 @@ MtpDeviceInfo* MtpDevice::getDeviceInfo() {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpDeviceInfo* info = new MtpDeviceInfo;
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -346,8 +357,10 @@ MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpStorageInfo* info = new MtpStorageInfo(storageID);
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -385,8 +398,10 @@ MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpObjectInfo* info = new MtpObjectInfo(handle);
- info->read(mData);
- return info;
+ if (info->read(mData))
+ return info;
+ else
+ delete info;
}
return NULL;
}
@@ -547,8 +562,10 @@ MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpProperty* property = new MtpProperty;
- property->read(mData);
- return property;
+ if (property->read(mData))
+ return property;
+ else
+ delete property;
}
return NULL;
}
@@ -566,15 +583,17 @@ MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectForma
MtpResponseCode ret = readResponse();
if (ret == MTP_RESPONSE_OK) {
MtpProperty* property = new MtpProperty;
- property->read(mData);
- return property;
+ if (property->read(mData))
+ return property;
+ else
+ delete property;
}
return NULL;
}
bool MtpDevice::readObject(MtpObjectHandle handle,
bool (* callback)(void* data, int offset, int length, void* clientData),
- int objectSize, void* clientData) {
+ size_t objectSize, void* clientData) {
Mutex::Autolock autoLock(mMutex);
bool result = false;
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
index b69203e..9b0acbf 100644
--- a/media/mtp/MtpDevice.h
+++ b/media/mtp/MtpDevice.h
@@ -98,7 +98,7 @@ public:
bool readObject(MtpObjectHandle handle,
bool (* callback)(void* data, int offset,
int length, void* clientData),
- int objectSize, void* clientData);
+ size_t objectSize, void* clientData);
bool readObject(MtpObjectHandle handle, const char* destPath, int group,
int perm);
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
index 108e2b8..3e1dff7 100644
--- a/media/mtp/MtpDeviceInfo.cpp
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -28,7 +28,7 @@ MtpDeviceInfo::MtpDeviceInfo()
mVendorExtensionID(0),
mVendorExtensionVersion(0),
mVendorExtensionDesc(NULL),
- mFunctionalCode(0),
+ mFunctionalMode(0),
mOperations(NULL),
mEvents(NULL),
mDeviceProperties(NULL),
@@ -59,39 +59,46 @@ MtpDeviceInfo::~MtpDeviceInfo() {
free(mSerial);
}
-void MtpDeviceInfo::read(MtpDataPacket& packet) {
+bool MtpDeviceInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
// read the device info
- mStandardVersion = packet.getUInt16();
- mVendorExtensionID = packet.getUInt32();
- mVendorExtensionVersion = packet.getUInt16();
+ if (!packet.getUInt16(mStandardVersion)) return false;
+ if (!packet.getUInt32(mVendorExtensionID)) return false;
+ if (!packet.getUInt16(mVendorExtensionVersion)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVendorExtensionDesc = strdup((const char *)string);
- mFunctionalCode = packet.getUInt16();
+ if (!packet.getUInt16(mFunctionalMode)) return false;
mOperations = packet.getAUInt16();
+ if (!mOperations) return false;
mEvents = packet.getAUInt16();
+ if (!mEvents) return false;
mDeviceProperties = packet.getAUInt16();
+ if (!mDeviceProperties) return false;
mCaptureFormats = packet.getAUInt16();
+ if (!mCaptureFormats) return false;
mPlaybackFormats = packet.getAUInt16();
+ if (!mCaptureFormats) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mManufacturer = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mModel = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVersion = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mSerial = strdup((const char *)string);
+
+ return true;
}
void MtpDeviceInfo::print() {
ALOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
- ALOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
- mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+ ALOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalMode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+ mVendorExtensionDesc, mFunctionalMode, mManufacturer, mModel, mVersion, mSerial);
}
} // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
index 2abaa10..bcda9a5 100644
--- a/media/mtp/MtpDeviceInfo.h
+++ b/media/mtp/MtpDeviceInfo.h
@@ -29,7 +29,7 @@ public:
uint32_t mVendorExtensionID;
uint16_t mVendorExtensionVersion;
char* mVendorExtensionDesc;
- uint16_t mFunctionalCode;
+ uint16_t mFunctionalMode;
UInt16List* mOperations;
UInt16List* mEvents;
MtpDevicePropertyList* mDeviceProperties;
@@ -44,7 +44,7 @@ public:
MtpDeviceInfo();
virtual ~MtpDeviceInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
index cd15343..0573104 100644
--- a/media/mtp/MtpObjectInfo.cpp
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -55,39 +55,41 @@ MtpObjectInfo::~MtpObjectInfo() {
free(mKeywords);
}
-void MtpObjectInfo::read(MtpDataPacket& packet) {
+bool MtpObjectInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
time_t time;
- mStorageID = packet.getUInt32();
- mFormat = packet.getUInt16();
- mProtectionStatus = packet.getUInt16();
- mCompressedSize = packet.getUInt32();
- mThumbFormat = packet.getUInt16();
- mThumbCompressedSize = packet.getUInt32();
- mThumbPixWidth = packet.getUInt32();
- mThumbPixHeight = packet.getUInt32();
- mImagePixWidth = packet.getUInt32();
- mImagePixHeight = packet.getUInt32();
- mImagePixDepth = packet.getUInt32();
- mParent = packet.getUInt32();
- mAssociationType = packet.getUInt16();
- mAssociationDesc = packet.getUInt32();
- mSequenceNumber = packet.getUInt32();
+ if (!packet.getUInt32(mStorageID)) return false;
+ if (!packet.getUInt16(mFormat)) return false;
+ if (!packet.getUInt16(mProtectionStatus)) return false;
+ if (!packet.getUInt32(mCompressedSize)) return false;
+ if (!packet.getUInt16(mThumbFormat)) return false;
+ if (!packet.getUInt32(mThumbCompressedSize)) return false;
+ if (!packet.getUInt32(mThumbPixWidth)) return false;
+ if (!packet.getUInt32(mThumbPixHeight)) return false;
+ if (!packet.getUInt32(mImagePixWidth)) return false;
+ if (!packet.getUInt32(mImagePixHeight)) return false;
+ if (!packet.getUInt32(mImagePixDepth)) return false;
+ if (!packet.getUInt32(mParent)) return false;
+ if (!packet.getUInt16(mAssociationType)) return false;
+ if (!packet.getUInt32(mAssociationDesc)) return false;
+ if (!packet.getUInt32(mSequenceNumber)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mName = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
mDateCreated = time;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
if (parseDateTime((const char*)string, time))
mDateModified = time;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mKeywords = strdup((const char *)string);
+
+ return true;
}
void MtpObjectInfo::print() {
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
index c7a449c..86780f1 100644
--- a/media/mtp/MtpObjectInfo.h
+++ b/media/mtp/MtpObjectInfo.h
@@ -50,7 +50,7 @@ public:
MtpObjectInfo(MtpObjectHandle handle);
virtual ~MtpObjectInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index dd07843..bab1335 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -52,7 +52,7 @@ void MtpPacket::reset() {
memset(mBuffer, 0, mBufferSize);
}
-void MtpPacket::allocate(int length) {
+void MtpPacket::allocate(size_t length) {
if (length > mBufferSize) {
int newLength = length + mAllocationIncrement;
mBuffer = (uint8_t *)realloc(mBuffer, newLength);
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
index 0ffb1d3..037722a 100644
--- a/media/mtp/MtpPacket.h
+++ b/media/mtp/MtpPacket.h
@@ -28,11 +28,11 @@ class MtpPacket {
protected:
uint8_t* mBuffer;
// current size of the buffer
- int mBufferSize;
+ size_t mBufferSize;
// number of bytes to add when resizing the buffer
- int mAllocationIncrement;
+ size_t mAllocationIncrement;
// size of the data in the packet
- int mPacketSize;
+ size_t mPacketSize;
public:
MtpPacket(int bufferSize);
@@ -41,7 +41,7 @@ public:
// sets packet size to the default container size and sets buffer to zero
virtual void reset();
- void allocate(int length);
+ void allocate(size_t length);
void dump();
void copyFrom(const MtpPacket& src);
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
index 375ed9a..d58e2a4 100644
--- a/media/mtp/MtpProperty.cpp
+++ b/media/mtp/MtpProperty.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "MtpProperty"
#include <inttypes.h>
+#include <cutils/compiler.h>
#include "MtpDataPacket.h"
#include "MtpDebug.h"
#include "MtpProperty.h"
@@ -105,15 +106,15 @@ MtpProperty::~MtpProperty() {
free(mMinimumValue.str);
free(mMaximumValue.str);
if (mDefaultArrayValues) {
- for (int i = 0; i < mDefaultArrayLength; i++)
+ for (uint32_t i = 0; i < mDefaultArrayLength; i++)
free(mDefaultArrayValues[i].str);
}
if (mCurrentArrayValues) {
- for (int i = 0; i < mCurrentArrayLength; i++)
+ for (uint32_t i = 0; i < mCurrentArrayLength; i++)
free(mCurrentArrayValues[i].str);
}
if (mEnumValues) {
- for (int i = 0; i < mEnumLength; i++)
+ for (uint16_t i = 0; i < mEnumLength; i++)
free(mEnumValues[i].str);
}
}
@@ -122,11 +123,14 @@ MtpProperty::~MtpProperty() {
delete[] mEnumValues;
}
-void MtpProperty::read(MtpDataPacket& packet) {
- mCode = packet.getUInt16();
+bool MtpProperty::read(MtpDataPacket& packet) {
+ uint8_t temp8;
+
+ if (!packet.getUInt16(mCode)) return false;
bool deviceProp = isDeviceProperty();
- mType = packet.getUInt16();
- mWriteable = (packet.getUInt8() == 1);
+ if (!packet.getUInt16(mType)) return false;
+ if (!packet.getUInt8(temp8)) return false;
+ mWriteable = (temp8 == 1);
switch (mType) {
case MTP_TYPE_AINT8:
case MTP_TYPE_AUINT8:
@@ -139,28 +143,36 @@ void MtpProperty::read(MtpDataPacket& packet) {
case MTP_TYPE_AINT128:
case MTP_TYPE_AUINT128:
mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
- if (deviceProp)
+ if (!mDefaultArrayValues) return false;
+ if (deviceProp) {
mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+ if (!mCurrentArrayValues) return false;
+ }
break;
default:
- readValue(packet, mDefaultValue);
- if (deviceProp)
- readValue(packet, mCurrentValue);
+ if (!readValue(packet, mDefaultValue)) return false;
+ if (deviceProp) {
+ if (!readValue(packet, mCurrentValue)) return false;
+ }
}
- if (!deviceProp)
- mGroupCode = packet.getUInt32();
- mFormFlag = packet.getUInt8();
+ if (!deviceProp) {
+ if (!packet.getUInt32(mGroupCode)) return false;
+ }
+ if (!packet.getUInt8(mFormFlag)) return false;
if (mFormFlag == kFormRange) {
- readValue(packet, mMinimumValue);
- readValue(packet, mMaximumValue);
- readValue(packet, mStepSize);
+ if (!readValue(packet, mMinimumValue)) return false;
+ if (!readValue(packet, mMaximumValue)) return false;
+ if (!readValue(packet, mStepSize)) return false;
} else if (mFormFlag == kFormEnum) {
- mEnumLength = packet.getUInt16();
+ if (!packet.getUInt16(mEnumLength)) return false;
mEnumValues = new MtpPropertyValue[mEnumLength];
- for (int i = 0; i < mEnumLength; i++)
- readValue(packet, mEnumValues[i]);
+ for (int i = 0; i < mEnumLength; i++) {
+ if (!readValue(packet, mEnumValues[i])) return false;
+ }
}
+
+ return true;
}
void MtpProperty::write(MtpDataPacket& packet) {
@@ -190,9 +202,9 @@ void MtpProperty::write(MtpDataPacket& packet) {
if (deviceProp)
writeValue(packet, mCurrentValue);
}
- packet.putUInt32(mGroupCode);
if (!deviceProp)
- packet.putUInt8(mFormFlag);
+ packet.putUInt32(mGroupCode);
+ packet.putUInt8(mFormFlag);
if (mFormFlag == kFormRange) {
writeValue(packet, mMinimumValue);
writeValue(packet, mMaximumValue);
@@ -408,57 +420,59 @@ void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) {
}
}
-void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+bool MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
MtpStringBuffer stringBuffer;
switch (mType) {
case MTP_TYPE_INT8:
case MTP_TYPE_AINT8:
- value.u.i8 = packet.getInt8();
+ if (!packet.getInt8(value.u.i8)) return false;
break;
case MTP_TYPE_UINT8:
case MTP_TYPE_AUINT8:
- value.u.u8 = packet.getUInt8();
+ if (!packet.getUInt8(value.u.u8)) return false;
break;
case MTP_TYPE_INT16:
case MTP_TYPE_AINT16:
- value.u.i16 = packet.getInt16();
+ if (!packet.getInt16(value.u.i16)) return false;
break;
case MTP_TYPE_UINT16:
case MTP_TYPE_AUINT16:
- value.u.u16 = packet.getUInt16();
+ if (!packet.getUInt16(value.u.u16)) return false;
break;
case MTP_TYPE_INT32:
case MTP_TYPE_AINT32:
- value.u.i32 = packet.getInt32();
+ if (!packet.getInt32(value.u.i32)) return false;
break;
case MTP_TYPE_UINT32:
case MTP_TYPE_AUINT32:
- value.u.u32 = packet.getUInt32();
+ if (!packet.getUInt32(value.u.u32)) return false;
break;
case MTP_TYPE_INT64:
case MTP_TYPE_AINT64:
- value.u.i64 = packet.getInt64();
+ if (!packet.getInt64(value.u.i64)) return false;
break;
case MTP_TYPE_UINT64:
case MTP_TYPE_AUINT64:
- value.u.u64 = packet.getUInt64();
+ if (!packet.getUInt64(value.u.u64)) return false;
break;
case MTP_TYPE_INT128:
case MTP_TYPE_AINT128:
- packet.getInt128(value.u.i128);
+ if (!packet.getInt128(value.u.i128)) return false;
break;
case MTP_TYPE_UINT128:
case MTP_TYPE_AUINT128:
- packet.getUInt128(value.u.u128);
+ if (!packet.getUInt128(value.u.u128)) return false;
break;
case MTP_TYPE_STR:
- packet.getString(stringBuffer);
+ if (!packet.getString(stringBuffer)) return false;
value.str = strdup(stringBuffer);
break;
default:
ALOGE("unknown type %04X in MtpProperty::readValue", mType);
+ return false;
}
+ return true;
}
void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
@@ -516,19 +530,29 @@ void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
}
}
-MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
- length = packet.getUInt32();
- if (length == 0)
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, uint32_t& length) {
+ if (!packet.getUInt32(length)) return NULL;
+
+ // Fail if resulting array is over 2GB. This is because the maximum array
+ // size may be less than SIZE_MAX on some platforms.
+ if ( CC_UNLIKELY(
+ length == 0 ||
+ length >= INT32_MAX / sizeof(MtpPropertyValue)) ) {
+ length = 0;
return NULL;
+ }
MtpPropertyValue* result = new MtpPropertyValue[length];
- for (int i = 0; i < length; i++)
- readValue(packet, result[i]);
+ for (uint32_t i = 0; i < length; i++)
+ if (!readValue(packet, result[i])) {
+ delete result;
+ return NULL;
+ }
return result;
}
-void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, uint32_t length) {
packet.putUInt32(length);
- for (int i = 0; i < length; i++)
+ for (uint32_t i = 0; i < length; i++)
writeValue(packet, values[i]);
}
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
index 06ca56e..2e2ead1 100644
--- a/media/mtp/MtpProperty.h
+++ b/media/mtp/MtpProperty.h
@@ -49,9 +49,9 @@ public:
MtpPropertyValue mCurrentValue;
// for array types
- int mDefaultArrayLength;
+ uint32_t mDefaultArrayLength;
MtpPropertyValue* mDefaultArrayValues;
- int mCurrentArrayLength;
+ uint32_t mCurrentArrayLength;
MtpPropertyValue* mCurrentArrayValues;
enum {
@@ -70,7 +70,7 @@ public:
MtpPropertyValue mStepSize;
// for enum form
- int mEnumLength;
+ uint16_t mEnumLength;
MtpPropertyValue* mEnumValues;
public:
@@ -83,7 +83,7 @@ public:
inline MtpPropertyCode getPropertyCode() const { return mCode; }
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void write(MtpDataPacket& packet);
void setDefaultValue(const uint16_t* string);
@@ -102,11 +102,11 @@ public:
}
private:
- void readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ bool readValue(MtpDataPacket& packet, MtpPropertyValue& value);
void writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
- MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length);
+ MtpPropertyValue* readArrayValues(MtpDataPacket& packet, uint32_t& length);
void writeArrayValues(MtpDataPacket& packet,
- MtpPropertyValue* values, int length);
+ MtpPropertyValue* values, uint32_t length);
};
}; // namespace android
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
index 0e58e01..40b11b0 100644
--- a/media/mtp/MtpRequestPacket.cpp
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -27,7 +27,8 @@
namespace android {
MtpRequestPacket::MtpRequestPacket()
- : MtpPacket(512)
+ : MtpPacket(512),
+ mParameterCount(0)
{
}
@@ -37,10 +38,21 @@ MtpRequestPacket::~MtpRequestPacket() {
#ifdef MTP_DEVICE
int MtpRequestPacket::read(int fd) {
int ret = ::read(fd, mBuffer, mBufferSize);
- if (ret >= 0)
+ if (ret < 0) {
+ // file read error
+ return ret;
+ }
+
+ // request packet should have 12 byte header followed by 0 to 5 32-bit arguments
+ if (ret >= MTP_CONTAINER_HEADER_SIZE
+ && ret <= MTP_CONTAINER_HEADER_SIZE + 5 * sizeof(uint32_t)
+ && ((ret - MTP_CONTAINER_HEADER_SIZE) & 3) == 0) {
mPacketSize = ret;
- else
- mPacketSize = 0;
+ mParameterCount = (ret - MTP_CONTAINER_HEADER_SIZE) / sizeof(uint32_t);
+ } else {
+ ALOGE("Malformed MTP request packet");
+ ret = -1;
+ }
return ret;
}
#endif
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
index 1201f11..79b798d 100644
--- a/media/mtp/MtpRequestPacket.h
+++ b/media/mtp/MtpRequestPacket.h
@@ -43,6 +43,10 @@ public:
inline MtpOperationCode getOperationCode() const { return getContainerCode(); }
inline void setOperationCode(MtpOperationCode code)
{ return setContainerCode(code); }
+ inline int getParameterCount() const { return mParameterCount; }
+
+private:
+ int mParameterCount;
};
}; // namespace android
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index df87db4..07199e3 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -20,6 +20,7 @@
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <fcntl.h>
+#include <inttypes.h>
#include <errno.h>
#include <sys/stat.h>
#include <dirent.h>
@@ -93,6 +94,7 @@ static const MtpEventCode kSupportedEventCodes[] = {
MTP_EVENT_OBJECT_REMOVED,
MTP_EVENT_STORE_ADDED,
MTP_EVENT_STORE_REMOVED,
+ MTP_EVENT_DEVICE_PROP_CHANGED,
};
MtpServer::MtpServer(int fd, MtpDatabase* database, bool ptp,
@@ -124,7 +126,7 @@ void MtpServer::addStorage(MtpStorage* storage) {
void MtpServer::removeStorage(MtpStorage* storage) {
Mutex::Autolock autoLock(mMutex);
- for (int i = 0; i < mStorages.size(); i++) {
+ for (size_t i = 0; i < mStorages.size(); i++) {
if (mStorages[i] == storage) {
mStorages.removeAt(i);
sendStoreRemoved(storage->getStorageID());
@@ -136,7 +138,7 @@ void MtpServer::removeStorage(MtpStorage* storage) {
MtpStorage* MtpServer::getStorage(MtpStorageID id) {
if (id == 0)
return mStorages[0];
- for (int i = 0; i < mStorages.size(); i++) {
+ for (size_t i = 0; i < mStorages.size(); i++) {
MtpStorage* storage = mStorages[i];
if (storage->getStorageID() == id)
return storage;
@@ -261,6 +263,11 @@ void MtpServer::sendStoreRemoved(MtpStorageID id) {
sendEvent(MTP_EVENT_STORE_REMOVED, id);
}
+void MtpServer::sendDevicePropertyChanged(MtpDeviceProperty property) {
+ ALOGV("sendDevicePropertyChanged %d\n", property);
+ sendEvent(MTP_EVENT_DEVICE_PROP_CHANGED, property);
+}
+
void MtpServer::sendEvent(MtpEventCode code, uint32_t param1) {
if (mSessionOpen) {
mEvent.setEventCode(code);
@@ -318,6 +325,14 @@ bool MtpServer::handleRequest() {
mSendObjectHandle = kInvalidObjectHandle;
}
+ int containertype = mRequest.getContainerType();
+ if (containertype != MTP_CONTAINER_TYPE_COMMAND) {
+ ALOGE("wrong container type %d", containertype);
+ return false;
+ }
+
+ ALOGV("got command %s (%x)", MtpDebug::getOperationCodeName(operation), operation);
+
switch (operation) {
case MTP_OPERATION_GET_DEVICE_INFO:
response = doGetDeviceInfo();
@@ -408,7 +423,8 @@ bool MtpServer::handleRequest() {
response = doEndEditObject();
break;
default:
- ALOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation));
+ ALOGE("got unsupported command %s (%x)",
+ MtpDebug::getOperationCodeName(operation), operation);
response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED;
break;
}
@@ -479,6 +495,9 @@ MtpResponseCode MtpServer::doOpenSession() {
mResponse.setParameter(1, mSessionID);
return MTP_RESPONSE_SESSION_ALREADY_OPEN;
}
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
mSessionID = mRequest.getParameter(1);
mSessionOpen = true;
@@ -513,6 +532,9 @@ MtpResponseCode MtpServer::doGetStorageInfo() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
MtpStorageID id = mRequest.getParameter(1);
MtpStorage* storage = getStorage(id);
if (!storage)
@@ -534,6 +556,8 @@ MtpResponseCode MtpServer::doGetStorageInfo() {
MtpResponseCode MtpServer::doGetObjectPropsSupported() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectFormat format = mRequest.getParameter(1);
MtpObjectPropertyList* properties = mDatabase->getSupportedObjectProperties(format);
mData.putAUInt16(properties);
@@ -544,6 +568,8 @@ MtpResponseCode MtpServer::doGetObjectPropsSupported() {
MtpResponseCode MtpServer::doGetObjectHandles() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
@@ -561,6 +587,8 @@ MtpResponseCode MtpServer::doGetObjectHandles() {
MtpResponseCode MtpServer::doGetNumObjects() {
if (!mSessionOpen)
return MTP_RESPONSE_SESSION_NOT_OPEN;
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
@@ -583,6 +611,8 @@ MtpResponseCode MtpServer::doGetObjectReferences() {
return MTP_RESPONSE_SESSION_NOT_OPEN;
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
// FIXME - check for invalid object handle
@@ -601,9 +631,13 @@ MtpResponseCode MtpServer::doSetObjectReferences() {
return MTP_RESPONSE_SESSION_NOT_OPEN;
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID handle = mRequest.getParameter(1);
MtpObjectHandleList* references = mData.getAUInt32();
+ if (!references)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
delete references;
return result;
@@ -612,6 +646,8 @@ MtpResponseCode MtpServer::doSetObjectReferences() {
MtpResponseCode MtpServer::doGetObjectPropValue() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectProperty property = mRequest.getParameter(2);
ALOGV("GetObjectPropValue %d %s\n", handle,
@@ -623,6 +659,8 @@ MtpResponseCode MtpServer::doGetObjectPropValue() {
MtpResponseCode MtpServer::doSetObjectPropValue() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectProperty property = mRequest.getParameter(2);
ALOGV("SetObjectPropValue %d %s\n", handle,
@@ -632,6 +670,8 @@ MtpResponseCode MtpServer::doSetObjectPropValue() {
}
MtpResponseCode MtpServer::doGetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("GetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -640,6 +680,8 @@ MtpResponseCode MtpServer::doGetDevicePropValue() {
}
MtpResponseCode MtpServer::doSetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("SetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -648,6 +690,8 @@ MtpResponseCode MtpServer::doSetDevicePropValue() {
}
MtpResponseCode MtpServer::doResetDevicePropValue() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty property = mRequest.getParameter(1);
ALOGV("ResetDevicePropValue %s\n",
MtpDebug::getDevicePropCodeName(property));
@@ -658,6 +702,8 @@ MtpResponseCode MtpServer::doResetDevicePropValue() {
MtpResponseCode MtpServer::doGetObjectPropList() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 5)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
// use uint32_t so we can support 0xFFFFFFFF
@@ -675,6 +721,8 @@ MtpResponseCode MtpServer::doGetObjectPropList() {
MtpResponseCode MtpServer::doGetObjectInfo() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpObjectInfo info(handle);
MtpResponseCode result = mDatabase->getObjectInfo(handle, info);
@@ -716,6 +764,8 @@ MtpResponseCode MtpServer::doGetObjectInfo() {
MtpResponseCode MtpServer::doGetObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
MtpString pathBuf;
int64_t fileLength;
@@ -749,6 +799,8 @@ MtpResponseCode MtpServer::doGetObject() {
}
MtpResponseCode MtpServer::doGetThumb() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
size_t thumbSize;
void* thumb = mDatabase->getThumbnail(handle, thumbSize);
@@ -772,11 +824,19 @@ MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
uint32_t length;
offset = mRequest.getParameter(2);
if (operation == MTP_OPERATION_GET_PARTIAL_OBJECT_64) {
+ // MTP_OPERATION_GET_PARTIAL_OBJECT_64 takes 4 arguments
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
// android extension with 64 bit offset
uint64_t offset2 = mRequest.getParameter(3);
offset = offset | (offset2 << 32);
length = mRequest.getParameter(4);
} else {
+ // MTP_OPERATION_GET_PARTIAL_OBJECT takes 3 arguments
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
+
// standard GetPartialObject
length = mRequest.getParameter(3);
}
@@ -786,7 +846,7 @@ MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
if (result != MTP_RESPONSE_OK)
return result;
- if (offset + length > fileLength)
+ if (offset + length > (uint64_t)fileLength)
length = fileLength - offset;
const char* filePath = (const char *)pathBuf;
@@ -816,6 +876,11 @@ MtpResponseCode MtpServer::doGetPartialObject(MtpOperationCode operation) {
MtpResponseCode MtpServer::doSendObjectInfo() {
MtpString path;
+ uint16_t temp16;
+ uint32_t temp32;
+
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpStorageID storageID = mRequest.getParameter(1);
MtpStorage* storage = getStorage(storageID);
MtpObjectHandle parent = mRequest.getParameter(2);
@@ -837,25 +902,29 @@ MtpResponseCode MtpServer::doSendObjectInfo() {
}
// read only the fields we need
- mData.getUInt32(); // storage ID
- MtpObjectFormat format = mData.getUInt16();
- mData.getUInt16(); // protection status
- mSendObjectFileSize = mData.getUInt32();
- mData.getUInt16(); // thumb format
- mData.getUInt32(); // thumb compressed size
- mData.getUInt32(); // thumb pix width
- mData.getUInt32(); // thumb pix height
- mData.getUInt32(); // image pix width
- mData.getUInt32(); // image pix height
- mData.getUInt32(); // image bit depth
- mData.getUInt32(); // parent
- uint16_t associationType = mData.getUInt16();
- uint32_t associationDesc = mData.getUInt32(); // association desc
- mData.getUInt32(); // sequence number
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // storage ID
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER;
+ MtpObjectFormat format = temp16;
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER; // protection status
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER;
+ mSendObjectFileSize = temp32;
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb format
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb compressed size
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb pix width
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // thumb pix height
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image pix width
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image pix height
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // image bit depth
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // parent
+ if (!mData.getUInt16(temp16)) return MTP_RESPONSE_INVALID_PARAMETER;
+ uint16_t associationType = temp16;
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER;
+ uint32_t associationDesc = temp32; // association desc
+ if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // sequence number
MtpStringBuffer name, created, modified;
- mData.getString(name); // file name
- mData.getString(created); // date created
- mData.getString(modified); // date modified
+ if (!mData.getString(name)) return MTP_RESPONSE_INVALID_PARAMETER; // file name
+ if (!mData.getString(created)) return MTP_RESPONSE_INVALID_PARAMETER; // date created
+ if (!mData.getString(modified)) return MTP_RESPONSE_INVALID_PARAMETER; // date modified
// keywords follow
ALOGV("name: %s format: %04X\n", (const char *)name, format);
@@ -943,22 +1012,28 @@ MtpResponseCode MtpServer::doSendObject() {
fchmod(mfr.fd, mFilePermission);
umask(mask);
- if (initialData > 0)
+ if (initialData > 0) {
ret = write(mfr.fd, mData.getData(), initialData);
+ }
- if (mSendObjectFileSize - initialData > 0) {
- mfr.offset = initialData;
- if (mSendObjectFileSize == 0xFFFFFFFF) {
- // tell driver to read until it receives a short packet
- mfr.length = 0xFFFFFFFF;
- } else {
- mfr.length = mSendObjectFileSize - initialData;
- }
+ if (ret < 0) {
+ ALOGE("failed to write initial data");
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ } else {
+ if (mSendObjectFileSize - initialData > 0) {
+ mfr.offset = initialData;
+ if (mSendObjectFileSize == 0xFFFFFFFF) {
+ // tell driver to read until it receives a short packet
+ mfr.length = 0xFFFFFFFF;
+ } else {
+ mfr.length = mSendObjectFileSize - initialData;
+ }
- ALOGV("receiving %s\n", (const char *)mSendObjectFilePath);
- // transfer the file
- ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
- ALOGV("MTP_RECEIVE_FILE returned %d\n", ret);
+ ALOGV("receiving %s\n", (const char *)mSendObjectFilePath);
+ // transfer the file
+ ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+ ALOGV("MTP_RECEIVE_FILE returned %d\n", ret);
+ }
}
close(mfr.fd);
@@ -983,7 +1058,7 @@ done:
static void deleteRecursive(const char* path) {
char pathbuf[PATH_MAX];
- int pathLength = strlen(path);
+ size_t pathLength = strlen(path);
if (pathLength >= sizeof(pathbuf) - 1) {
ALOGE("path too long: %s\n", path);
}
@@ -1044,8 +1119,10 @@ static void deletePath(const char* path) {
MtpResponseCode MtpServer::doDeleteObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
- MtpObjectFormat format = mRequest.getParameter(2);
+ MtpObjectFormat format;
// FIXME - support deleting all objects if handle is 0xFFFFFFFF
// FIXME - implement deleting objects by format
@@ -1065,6 +1142,8 @@ MtpResponseCode MtpServer::doDeleteObject() {
}
MtpResponseCode MtpServer::doGetObjectPropDesc() {
+ if (mRequest.getParameterCount() < 2)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectProperty propCode = mRequest.getParameter(1);
MtpObjectFormat format = mRequest.getParameter(2);
ALOGV("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
@@ -1078,6 +1157,8 @@ MtpResponseCode MtpServer::doGetObjectPropDesc() {
}
MtpResponseCode MtpServer::doGetDevicePropDesc() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpDeviceProperty propCode = mRequest.getParameter(1);
ALOGV("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
@@ -1091,6 +1172,8 @@ MtpResponseCode MtpServer::doGetDevicePropDesc() {
MtpResponseCode MtpServer::doSendPartialObject() {
if (!hasStorage())
return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ if (mRequest.getParameterCount() < 4)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
uint64_t offset = mRequest.getParameter(2);
uint64_t offset2 = mRequest.getParameter(3);
@@ -1105,12 +1188,13 @@ MtpResponseCode MtpServer::doSendPartialObject() {
// can't start writing past the end of the file
if (offset > edit->mSize) {
- ALOGD("writing past end of object, offset: %lld, edit->mSize: %lld", offset, edit->mSize);
+ ALOGD("writing past end of object, offset: %" PRIu64 ", edit->mSize: %" PRIu64,
+ offset, edit->mSize);
return MTP_RESPONSE_GENERAL_ERROR;
}
const char* filePath = (const char *)edit->mPath;
- ALOGV("receiving partial %s %lld %lld\n", filePath, offset, length);
+ ALOGV("receiving partial %s %" PRIu64 " %" PRIu32, filePath, offset, length);
// read the header, and possibly some data
int ret = mData.read(mFD);
@@ -1124,15 +1208,19 @@ MtpResponseCode MtpServer::doSendPartialObject() {
length -= initialData;
}
- if (length > 0) {
- mtp_file_range mfr;
- mfr.fd = edit->mFD;
- mfr.offset = offset;
- mfr.length = length;
-
- // transfer the file
- ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
- ALOGV("MTP_RECEIVE_FILE returned %d", ret);
+ if (ret < 0) {
+ ALOGE("failed to write initial data");
+ } else {
+ if (length > 0) {
+ mtp_file_range mfr;
+ mfr.fd = edit->mFD;
+ mfr.offset = offset;
+ mfr.length = length;
+
+ // transfer the file
+ ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+ ALOGV("MTP_RECEIVE_FILE returned %d", ret);
+ }
}
if (ret < 0) {
mResponse.setParameter(1, 0);
@@ -1153,6 +1241,8 @@ MtpResponseCode MtpServer::doSendPartialObject() {
}
MtpResponseCode MtpServer::doTruncateObject() {
+ if (mRequest.getParameterCount() < 3)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
ObjectEdit* edit = getEditObject(handle);
if (!edit) {
@@ -1172,6 +1262,8 @@ MtpResponseCode MtpServer::doTruncateObject() {
}
MtpResponseCode MtpServer::doBeginEditObject() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
if (getEditObject(handle)) {
ALOGE("object already open for edit in doBeginEditObject");
@@ -1196,6 +1288,8 @@ MtpResponseCode MtpServer::doBeginEditObject() {
}
MtpResponseCode MtpServer::doEndEditObject() {
+ if (mRequest.getParameterCount() < 1)
+ return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
ObjectEdit* edit = getEditObject(handle);
if (!edit) {
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
index dfa8258..b3a11e0 100644
--- a/media/mtp/MtpServer.h
+++ b/media/mtp/MtpServer.h
@@ -104,6 +104,7 @@ public:
void sendObjectAdded(MtpObjectHandle handle);
void sendObjectRemoved(MtpObjectHandle handle);
+ void sendDevicePropertyChanged(MtpDeviceProperty property);
private:
void sendStoreAdded(MtpStorageID id);
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
index dcd37cd..5d4ebbf 100644
--- a/media/mtp/MtpStorageInfo.cpp
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -16,6 +16,8 @@
#define LOG_TAG "MtpStorageInfo"
+#include <inttypes.h>
+
#include "MtpDebug.h"
#include "MtpDataPacket.h"
#include "MtpStorageInfo.h"
@@ -43,27 +45,29 @@ MtpStorageInfo::~MtpStorageInfo() {
free(mVolumeIdentifier);
}
-void MtpStorageInfo::read(MtpDataPacket& packet) {
+bool MtpStorageInfo::read(MtpDataPacket& packet) {
MtpStringBuffer string;
// read the device info
- mStorageType = packet.getUInt16();
- mFileSystemType = packet.getUInt16();
- mAccessCapability = packet.getUInt16();
- mMaxCapacity = packet.getUInt64();
- mFreeSpaceBytes = packet.getUInt64();
- mFreeSpaceObjects = packet.getUInt32();
+ if (!packet.getUInt16(mStorageType)) return false;
+ if (!packet.getUInt16(mFileSystemType)) return false;
+ if (!packet.getUInt16(mAccessCapability)) return false;
+ if (!packet.getUInt64(mMaxCapacity)) return false;
+ if (!packet.getUInt64(mFreeSpaceBytes)) return false;
+ if (!packet.getUInt32(mFreeSpaceObjects)) return false;
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mStorageDescription = strdup((const char *)string);
- packet.getString(string);
+ if (!packet.getString(string)) return false;
mVolumeIdentifier = strdup((const char *)string);
+
+ return true;
}
void MtpStorageInfo::print() {
ALOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n",
mStorageID, mStorageType, mFileSystemType, mAccessCapability);
- ALOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n",
+ ALOGD("\tmMaxCapacity: %" PRIu64 "\n\tmFreeSpaceBytes: %" PRIu64 "\n\tmFreeSpaceObjects: %d\n",
mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects);
ALOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n",
mStorageDescription, mVolumeIdentifier);
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
index 2cb626e..35a8189 100644
--- a/media/mtp/MtpStorageInfo.h
+++ b/media/mtp/MtpStorageInfo.h
@@ -39,7 +39,7 @@ public:
MtpStorageInfo(MtpStorageID id);
virtual ~MtpStorageInfo();
- void read(MtpDataPacket& packet);
+ bool read(MtpDataPacket& packet);
void print();
};
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
index f3420a4..df04694 100644
--- a/media/mtp/MtpStringBuffer.cpp
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -123,11 +123,17 @@ void MtpStringBuffer::set(const uint16_t* src) {
mByteCount = dest - mBuffer;
}
-void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
- int count = packet->getUInt8();
+bool MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+ uint8_t count;
+ if (!packet->getUInt8(count))
+ return false;
+
uint8_t* dest = mBuffer;
for (int i = 0; i < count; i++) {
- uint16_t ch = packet->getUInt16();
+ uint16_t ch;
+
+ if (!packet->getUInt16(ch))
+ return false;
if (ch >= 0x0800) {
*dest++ = (uint8_t)(0xE0 | (ch >> 12));
*dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
@@ -142,6 +148,7 @@ void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
*dest++ = 0;
mCharCount = count;
mByteCount = dest - mBuffer;
+ return true;
}
void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
index e5150df..85d91e8 100644
--- a/media/mtp/MtpStringBuffer.h
+++ b/media/mtp/MtpStringBuffer.h
@@ -46,7 +46,7 @@ public:
void set(const char* src);
void set(const uint16_t* src);
- void readFromPacket(MtpDataPacket* packet);
+ bool readFromPacket(MtpDataPacket* packet);
void writeToPacket(MtpDataPacket* packet) const;
inline int getCharCount() const { return mCharCount; }
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
index 6ec8876..0667bdd 100644
--- a/media/mtp/MtpUtils.cpp
+++ b/media/mtp/MtpUtils.cpp
@@ -19,7 +19,8 @@
#include <stdio.h>
#include <time.h>
-#include <cutils/tztime.h>
+#include <../private/bionic_time.h> /* TODO: switch this code to icu4c! */
+
#include "MtpUtils.h"
namespace android {
diff --git a/media/ndk/Android.mk b/media/ndk/Android.mk
new file mode 100644
index 0000000..8f795cd
--- /dev/null
+++ b/media/ndk/Android.mk
@@ -0,0 +1,52 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_BUILD_PDK), true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ NdkMediaCodec.cpp \
+ NdkMediaCrypto.cpp \
+ NdkMediaExtractor.cpp \
+ NdkMediaFormat.cpp \
+ NdkMediaMuxer.cpp \
+ NdkMediaDrm.cpp \
+
+LOCAL_MODULE:= libmediandk
+
+LOCAL_C_INCLUDES := \
+ bionic/libc/private \
+ frameworks/base/core/jni \
+ frameworks/av/include/ndk
+
+LOCAL_CFLAGS += -fvisibility=hidden -D EXPORT='__attribute__ ((visibility ("default")))'
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ liblog \
+ libutils \
+ libandroid_runtime \
+ libbinder \
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
new file mode 100644
index 0000000..ed00b72
--- /dev/null
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -0,0 +1,505 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodec"
+
+#include "NdkMediaCodec.h"
+#include "NdkMediaError.h"
+#include "NdkMediaCryptoPriv.h"
+#include "NdkMediaFormatPriv.h"
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <gui/Surface.h>
+
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ABuffer.h>
+
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaErrors.h>
+
+using namespace android;
+
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ } else if (err == -EAGAIN) {
+ return (media_status_t) AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+enum {
+ kWhatActivityNotify,
+ kWhatRequestActivityNotifications,
+ kWhatStopActivityNotifications,
+};
+
+
+class CodecHandler: public AHandler {
+private:
+ AMediaCodec* mCodec;
+public:
+ CodecHandler(AMediaCodec *codec);
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+};
+
+typedef void (*OnCodecEvent)(AMediaCodec *codec, void *userdata);
+
+struct AMediaCodec {
+ sp<android::MediaCodec> mCodec;
+ sp<ALooper> mLooper;
+ sp<CodecHandler> mHandler;
+ sp<AMessage> mActivityNotification;
+ int32_t mGeneration;
+ bool mRequestedActivityNotification;
+ OnCodecEvent mCallback;
+ void *mCallbackUserData;
+};
+
+CodecHandler::CodecHandler(AMediaCodec *codec) {
+ mCodec = codec;
+}
+
+void CodecHandler::onMessageReceived(const sp<AMessage> &msg) {
+
+ switch (msg->what()) {
+ case kWhatRequestActivityNotifications:
+ {
+ if (mCodec->mRequestedActivityNotification) {
+ break;
+ }
+
+ mCodec->mCodec->requestActivityNotification(mCodec->mActivityNotification);
+ mCodec->mRequestedActivityNotification = true;
+ break;
+ }
+
+ case kWhatActivityNotify:
+ {
+ {
+ int32_t generation;
+ msg->findInt32("generation", &generation);
+
+ if (generation != mCodec->mGeneration) {
+ // stale
+ break;
+ }
+
+ mCodec->mRequestedActivityNotification = false;
+ }
+
+ if (mCodec->mCallback) {
+ mCodec->mCallback(mCodec, mCodec->mCallbackUserData);
+ }
+ break;
+ }
+
+ case kWhatStopActivityNotifications:
+ {
+ uint32_t replyID;
+ msg->senderAwaitsResponse(&replyID);
+
+ mCodec->mGeneration++;
+ mCodec->mRequestedActivityNotification = false;
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
+ default:
+ ALOGE("shouldn't be here");
+ break;
+ }
+
+}
+
+
+static void requestActivityNotification(AMediaCodec *codec) {
+ (new AMessage(kWhatRequestActivityNotifications, codec->mHandler->id()))->post();
+}
+
+extern "C" {
+
+static AMediaCodec * createAMediaCodec(const char *name, bool name_is_type, bool encoder) {
+ AMediaCodec *mData = new AMediaCodec();
+ mData->mLooper = new ALooper;
+ mData->mLooper->setName("NDK MediaCodec_looper");
+ status_t ret = mData->mLooper->start(
+ false, // runOnCallingThread
+ true, // canCallJava XXX
+ PRIORITY_FOREGROUND);
+ if (name_is_type) {
+ mData->mCodec = android::MediaCodec::CreateByType(mData->mLooper, name, encoder);
+ } else {
+ mData->mCodec = android::MediaCodec::CreateByComponentName(mData->mLooper, name);
+ }
+ mData->mHandler = new CodecHandler(mData);
+ mData->mLooper->registerHandler(mData->mHandler);
+ mData->mGeneration = 1;
+ mData->mRequestedActivityNotification = false;
+ mData->mCallback = NULL;
+
+ return mData;
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createCodecByName(const char *name) {
+ return createAMediaCodec(name, false, false);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type) {
+ return createAMediaCodec(mime_type, true, false);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createEncoderByType(const char *name) {
+ return createAMediaCodec(name, true, true);
+}
+
+EXPORT
+media_status_t AMediaCodec_delete(AMediaCodec *mData) {
+ if (mData->mCodec != NULL) {
+ mData->mCodec->release();
+ mData->mCodec.clear();
+ }
+
+ if (mData->mLooper != NULL) {
+ mData->mLooper->unregisterHandler(mData->mHandler->id());
+ mData->mLooper->stop();
+ mData->mLooper.clear();
+ }
+ delete mData;
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodec_configure(
+ AMediaCodec *mData,
+ const AMediaFormat* format,
+ ANativeWindow* window,
+ AMediaCrypto *crypto,
+ uint32_t flags) {
+ sp<AMessage> nativeFormat;
+ AMediaFormat_getFormat(format, &nativeFormat);
+ ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
+ sp<Surface> surface = NULL;
+ if (window != NULL) {
+ surface = (Surface*) window;
+ }
+
+ return translate_error(mData->mCodec->configure(nativeFormat, surface,
+ crypto ? crypto->mCrypto : NULL, flags));
+}
+
+EXPORT
+media_status_t AMediaCodec_start(AMediaCodec *mData) {
+ status_t ret = mData->mCodec->start();
+ if (ret != OK) {
+ return translate_error(ret);
+ }
+ mData->mActivityNotification = new AMessage(kWhatActivityNotify, mData->mHandler->id());
+ mData->mActivityNotification->setInt32("generation", mData->mGeneration);
+ requestActivityNotification(mData);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodec_stop(AMediaCodec *mData) {
+ media_status_t ret = translate_error(mData->mCodec->stop());
+
+ sp<AMessage> msg = new AMessage(kWhatStopActivityNotifications, mData->mHandler->id());
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+ mData->mActivityNotification.clear();
+
+ return ret;
+}
+
+EXPORT
+media_status_t AMediaCodec_flush(AMediaCodec *mData) {
+ return translate_error(mData->mCodec->flush());
+}
+
+EXPORT
+ssize_t AMediaCodec_dequeueInputBuffer(AMediaCodec *mData, int64_t timeoutUs) {
+ size_t idx;
+ status_t ret = mData->mCodec->dequeueInputBuffer(&idx, timeoutUs);
+ requestActivityNotification(mData);
+ if (ret == OK) {
+ return idx;
+ }
+ return translate_error(ret);
+}
+
+EXPORT
+uint8_t* AMediaCodec_getInputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
+ android::Vector<android::sp<android::ABuffer> > abufs;
+ if (mData->mCodec->getInputBuffers(&abufs) == 0) {
+ size_t n = abufs.size();
+ if (idx >= n) {
+ ALOGE("buffer index %zu out of range", idx);
+ return NULL;
+ }
+ if (out_size != NULL) {
+ *out_size = abufs[idx]->capacity();
+ }
+ return abufs[idx]->data();
+ }
+ ALOGE("couldn't get input buffers");
+ return NULL;
+}
+
+EXPORT
+uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
+ android::Vector<android::sp<android::ABuffer> > abufs;
+ if (mData->mCodec->getOutputBuffers(&abufs) == 0) {
+ size_t n = abufs.size();
+ if (idx >= n) {
+ ALOGE("buffer index %zu out of range", idx);
+ return NULL;
+ }
+ if (out_size != NULL) {
+ *out_size = abufs[idx]->capacity();
+ }
+ return abufs[idx]->data();
+ }
+ ALOGE("couldn't get output buffers");
+ return NULL;
+}
+
+EXPORT
+media_status_t AMediaCodec_queueInputBuffer(AMediaCodec *mData,
+ size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags) {
+
+ AString errorMsg;
+ status_t ret = mData->mCodec->queueInputBuffer(idx, offset, size, time, flags, &errorMsg);
+ return translate_error(ret);
+}
+
+EXPORT
+ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec *mData,
+ AMediaCodecBufferInfo *info, int64_t timeoutUs) {
+ size_t idx;
+ size_t offset;
+ size_t size;
+ uint32_t flags;
+ int64_t presentationTimeUs;
+ status_t ret = mData->mCodec->dequeueOutputBuffer(&idx, &offset, &size, &presentationTimeUs,
+ &flags, timeoutUs);
+ requestActivityNotification(mData);
+ switch (ret) {
+ case OK:
+ info->offset = offset;
+ info->size = size;
+ info->flags = flags;
+ info->presentationTimeUs = presentationTimeUs;
+ return idx;
+ case -EAGAIN:
+ return AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+ case android::INFO_FORMAT_CHANGED:
+ return AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED;
+ case INFO_OUTPUT_BUFFERS_CHANGED:
+ return AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED;
+ default:
+ break;
+ }
+ return translate_error(ret);
+}
+
+EXPORT
+AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec *mData) {
+ sp<AMessage> format;
+ mData->mCodec->getOutputFormat(&format);
+ return AMediaFormat_fromMsg(&format);
+}
+
+EXPORT
+media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec *mData, size_t idx, bool render) {
+ if (render) {
+ return translate_error(mData->mCodec->renderOutputBufferAndRelease(idx));
+ } else {
+ return translate_error(mData->mCodec->releaseOutputBuffer(idx));
+ }
+}
+
+EXPORT
+media_status_t AMediaCodec_releaseOutputBufferAtTime(
+ AMediaCodec *mData, size_t idx, int64_t timestampNs) {
+ ALOGV("render @ %" PRId64, timestampNs);
+ return translate_error(mData->mCodec->renderOutputBufferAndRelease(idx, timestampNs));
+}
+
+//EXPORT
+media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, void *userdata) {
+ mData->mCallback = callback;
+ mData->mCallbackUserData = userdata;
+ return AMEDIA_OK;
+}
+
+typedef struct AMediaCodecCryptoInfo {
+ int numsubsamples;
+ uint8_t key[16];
+ uint8_t iv[16];
+ cryptoinfo_mode_t mode;
+ size_t *clearbytes;
+ size_t *encryptedbytes;
+} AMediaCodecCryptoInfo;
+
+EXPORT
+media_status_t AMediaCodec_queueSecureInputBuffer(
+ AMediaCodec* codec,
+ size_t idx,
+ off_t offset,
+ AMediaCodecCryptoInfo* crypto,
+ uint64_t time,
+ uint32_t flags) {
+
+ CryptoPlugin::SubSample *subSamples = new CryptoPlugin::SubSample[crypto->numsubsamples];
+ for (int i = 0; i < crypto->numsubsamples; i++) {
+ subSamples[i].mNumBytesOfClearData = crypto->clearbytes[i];
+ subSamples[i].mNumBytesOfEncryptedData = crypto->encryptedbytes[i];
+ }
+
+ AString errormsg;
+ status_t err = codec->mCodec->queueSecureInputBuffer(idx,
+ offset,
+ subSamples,
+ crypto->numsubsamples,
+ crypto->key,
+ crypto->iv,
+ (CryptoPlugin::Mode) crypto->mode,
+ time,
+ flags,
+ &errormsg);
+ if (err != 0) {
+ ALOGE("queSecureInputBuffer: %s", errormsg.c_str());
+ }
+ delete [] subSamples;
+ return translate_error(err);
+}
+
+
+
+EXPORT
+AMediaCodecCryptoInfo *AMediaCodecCryptoInfo_new(
+ int numsubsamples,
+ uint8_t key[16],
+ uint8_t iv[16],
+ cryptoinfo_mode_t mode,
+ size_t *clearbytes,
+ size_t *encryptedbytes) {
+
+ // size needed to store all the crypto data
+ size_t cryptosize = sizeof(AMediaCodecCryptoInfo) + sizeof(size_t) * numsubsamples * 2;
+ AMediaCodecCryptoInfo *ret = (AMediaCodecCryptoInfo*) malloc(cryptosize);
+ if (!ret) {
+ ALOGE("couldn't allocate %zu bytes", cryptosize);
+ return NULL;
+ }
+ ret->numsubsamples = numsubsamples;
+ memcpy(ret->key, key, 16);
+ memcpy(ret->iv, iv, 16);
+ ret->mode = mode;
+
+ // clearbytes and encryptedbytes point at the actual data, which follows
+ ret->clearbytes = (size_t*) (ret + 1); // point immediately after the struct
+ ret->encryptedbytes = ret->clearbytes + numsubsamples; // point after the clear sizes
+
+ memcpy(ret->clearbytes, clearbytes, numsubsamples * sizeof(size_t));
+ memcpy(ret->encryptedbytes, encryptedbytes, numsubsamples * sizeof(size_t));
+
+ return ret;
+}
+
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_delete(AMediaCodecCryptoInfo* info) {
+ free(info);
+ return AMEDIA_OK;
+}
+
+EXPORT
+size_t AMediaCodecCryptoInfo_getNumSubSamples(AMediaCodecCryptoInfo* ci) {
+ return ci->numsubsamples;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getKey(AMediaCodecCryptoInfo* ci, uint8_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->key, 16);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getIV(AMediaCodecCryptoInfo* ci, uint8_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->iv, 16);
+ return AMEDIA_OK;
+}
+
+EXPORT
+cryptoinfo_mode_t AMediaCodecCryptoInfo_getMode(AMediaCodecCryptoInfo* ci) {
+ if (!ci) {
+ return (cryptoinfo_mode_t) AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ return ci->mode;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getClearBytes(AMediaCodecCryptoInfo* ci, size_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->clearbytes, sizeof(size_t) * ci->numsubsamples);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getEncryptedBytes(AMediaCodecCryptoInfo* ci, size_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->encryptedbytes, sizeof(size_t) * ci->numsubsamples);
+ return AMEDIA_OK;
+}
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaCrypto.cpp b/media/ndk/NdkMediaCrypto.cpp
new file mode 100644
index 0000000..1cc2f1a
--- /dev/null
+++ b/media/ndk/NdkMediaCrypto.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCrypto"
+
+
+#include "NdkMediaCrypto.h"
+#include "NdkMediaCodec.h"
+#include "NdkMediaFormatPriv.h"
+
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <binder/IServiceManager.h>
+#include <media/ICrypto.h>
+#include <media/IMediaPlayerService.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+
+static sp<ICrypto> makeCrypto() {
+ sp<IServiceManager> sm = defaultServiceManager();
+
+ sp<IBinder> binder =
+ sm->getService(String16("media.player"));
+
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ if (service == NULL) {
+ return NULL;
+ }
+
+ sp<ICrypto> crypto = service->makeCrypto();
+
+ if (crypto == NULL || (crypto->initCheck() != OK && crypto->initCheck() != NO_INIT)) {
+ return NULL;
+ }
+
+ return crypto;
+}
+
+struct AMediaCrypto {
+ sp<ICrypto> mCrypto;
+};
+
+
+extern "C" {
+
+
+EXPORT
+bool AMediaCrypto_isCryptoSchemeSupported(const AMediaUUID uuid) {
+ sp<ICrypto> crypto = makeCrypto();
+ if (crypto == NULL) {
+ return false;
+ }
+ return crypto->isCryptoSchemeSupported(uuid);
+}
+
+EXPORT
+bool AMediaCrypto_requiresSecureDecoderComponent(const char *mime) {
+ sp<ICrypto> crypto = makeCrypto();
+ if (crypto == NULL) {
+ return false;
+ }
+ return crypto->requiresSecureDecoderComponent(mime);
+}
+
+EXPORT
+AMediaCrypto* AMediaCrypto_new(const AMediaUUID uuid, const void *data, size_t datasize) {
+
+ sp<ICrypto> tmp = makeCrypto();
+ if (tmp == NULL) {
+ return NULL;
+ }
+
+ if (tmp->createPlugin(uuid, data, datasize) != 0) {
+ return NULL;
+ }
+
+ AMediaCrypto *crypto = new AMediaCrypto();
+ crypto->mCrypto = tmp;
+
+ return crypto;
+}
+
+EXPORT
+void AMediaCrypto_delete(AMediaCrypto* crypto) {
+ delete crypto;
+}
+
+
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaCryptoPriv.h b/media/ndk/NdkMediaCryptoPriv.h
new file mode 100644
index 0000000..14ea928
--- /dev/null
+++ b/media/ndk/NdkMediaCryptoPriv.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CRYPTO_PRIV_H
+#define _NDK_MEDIA_CRYPTO_PRIV_H
+
+#include <sys/types.h>
+#include <utils/StrongPointer.h>
+#include <media/ICrypto.h>
+
+using namespace android;
+
+struct AMediaCrypto {
+ sp<ICrypto> mCrypto;
+};
+
+#endif // _NDK_MEDIA_CRYPTO_PRIV_H
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
new file mode 100644
index 0000000..7a1048c
--- /dev/null
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -0,0 +1,728 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaDrm"
+
+#include "NdkMediaDrm.h"
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <gui/Surface.h>
+
+#include <media/IDrm.h>
+#include <media/IDrmClient.h>
+#include <media/stagefright/MediaErrors.h>
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <ndk/NdkMediaCrypto.h>
+
+
+using namespace android;
+
+typedef Vector<uint8_t> idvec_t;
+
+struct DrmListener: virtual public BnDrmClient
+{
+private:
+ AMediaDrm *mObj;
+ AMediaDrmEventListener mListener;
+
+public:
+ DrmListener(AMediaDrm *obj, AMediaDrmEventListener listener) : mObj(obj), mListener(listener) {}
+ void notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj);
+};
+
+struct AMediaDrm {
+ sp<IDrm> mDrm;
+ sp<IDrmClient> mDrmClient;
+ List<idvec_t> mIds;
+ KeyedVector<String8, String8> mQueryResults;
+ Vector<uint8_t> mKeyRequest;
+ Vector<uint8_t> mProvisionRequest;
+ String8 mProvisionUrl;
+ String8 mPropertyString;
+ Vector<uint8_t> mPropertyByteArray;
+ List<Vector<uint8_t> > mSecureStops;
+ sp<DrmListener> mListener;
+};
+
+void DrmListener::notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) {
+ if (!mListener) {
+ return;
+ }
+
+ AMediaDrmSessionId sessionId = {NULL, 0};
+ int32_t sessionIdSize = obj->readInt32();
+ if (sessionIdSize) {
+ uint8_t *sessionIdData = new uint8_t[sessionIdSize];
+ sessionId.ptr = sessionIdData;
+ sessionId.length = sessionIdSize;
+ obj->read(sessionIdData, sessionId.length);
+ }
+
+ int32_t dataSize = obj->readInt32();
+ uint8_t *data = NULL;
+ if (dataSize) {
+ data = new uint8_t[dataSize];
+ obj->read(data, dataSize);
+ }
+
+ // translate DrmPlugin event types into their NDK equivalents
+ AMediaDrmEventType ndkEventType;
+ switch(eventType) {
+ case DrmPlugin::kDrmPluginEventProvisionRequired:
+ ndkEventType = EVENT_PROVISION_REQUIRED;
+ break;
+ case DrmPlugin::kDrmPluginEventKeyNeeded:
+ ndkEventType = EVENT_KEY_REQUIRED;
+ break;
+ case DrmPlugin::kDrmPluginEventKeyExpired:
+ ndkEventType = EVENT_KEY_EXPIRED;
+ break;
+ case DrmPlugin::kDrmPluginEventVendorDefined:
+ ndkEventType = EVENT_VENDOR_DEFINED;
+ break;
+ default:
+ ALOGE("Invalid event DrmPlugin::EventType %d, ignored", (int)eventType);
+ return;
+ }
+
+ (*mListener)(mObj, &sessionId, ndkEventType, extra, data, dataSize);
+
+ delete [] sessionId.ptr;
+ delete [] data;
+}
+
+
+extern "C" {
+
+static media_status_t translateStatus(status_t status) {
+ media_status_t result = AMEDIA_ERROR_UNKNOWN;
+ switch (status) {
+ case OK:
+ result = AMEDIA_OK;
+ break;
+ case android::ERROR_DRM_NOT_PROVISIONED:
+ result = AMEDIA_DRM_NOT_PROVISIONED;
+ break;
+ case android::ERROR_DRM_RESOURCE_BUSY:
+ result = AMEDIA_DRM_RESOURCE_BUSY;
+ break;
+ case android::ERROR_DRM_DEVICE_REVOKED:
+ result = AMEDIA_DRM_DEVICE_REVOKED;
+ break;
+ case android::ERROR_DRM_CANNOT_HANDLE:
+ result = AMEDIA_ERROR_INVALID_PARAMETER;
+ break;
+ case android::ERROR_DRM_TAMPER_DETECTED:
+ result = AMEDIA_DRM_TAMPER_DETECTED;
+ break;
+ case android::ERROR_DRM_SESSION_NOT_OPENED:
+ result = AMEDIA_DRM_SESSION_NOT_OPENED;
+ break;
+ case android::ERROR_DRM_NO_LICENSE:
+ result = AMEDIA_DRM_NEED_KEY;
+ break;
+ case android::ERROR_DRM_LICENSE_EXPIRED:
+ result = AMEDIA_DRM_LICENSE_EXPIRED;
+ break;
+ default:
+ break;
+ }
+ return result;
+}
+
+static sp<IDrm> CreateDrm() {
+ sp<IServiceManager> sm = defaultServiceManager();
+
+ sp<IBinder> binder =
+ sm->getService(String16("media.player"));
+
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ if (service == NULL) {
+ return NULL;
+ }
+
+ sp<IDrm> drm = service->makeDrm();
+
+ if (drm == NULL || (drm->initCheck() != OK && drm->initCheck() != NO_INIT)) {
+ return NULL;
+ }
+
+ return drm;
+}
+
+
+static sp<IDrm> CreateDrmFromUUID(const AMediaUUID uuid) {
+ sp<IDrm> drm = CreateDrm();
+
+ if (drm == NULL) {
+ return NULL;
+ }
+
+ status_t err = drm->createPlugin(uuid);
+
+ if (err != OK) {
+ return NULL;
+ }
+
+ return drm;
+}
+
+EXPORT
+bool AMediaDrm_isCryptoSchemeSupported(const AMediaUUID uuid, const char *mimeType) {
+ sp<IDrm> drm = CreateDrm();
+
+ if (drm == NULL) {
+ return false;
+ }
+
+ String8 mimeStr = mimeType ? String8(mimeType) : String8("");
+ return drm->isCryptoSchemeSupported(uuid, mimeStr);
+}
+
+EXPORT
+AMediaDrm* AMediaDrm_createByUUID(const AMediaUUID uuid) {
+ AMediaDrm *mObj = new AMediaDrm();
+ mObj->mDrm = CreateDrmFromUUID(uuid);
+ return mObj;
+}
+
+EXPORT
+void AMediaDrm_release(AMediaDrm *mObj) {
+ if (mObj->mDrm != NULL) {
+ mObj->mDrm->setListener(NULL);
+ mObj->mDrm->destroyPlugin();
+ mObj->mDrm.clear();
+ }
+ delete mObj;
+}
+
+EXPORT
+media_status_t AMediaDrm_setOnEventListener(AMediaDrm *mObj, AMediaDrmEventListener listener) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ mObj->mListener = new DrmListener(mObj, listener);
+ mObj->mDrm->setListener(mObj->mListener);
+ return AMEDIA_OK;
+}
+
+
+static bool findId(AMediaDrm *mObj, const AMediaDrmByteArray &id, List<idvec_t>::iterator &iter) {
+ iter = mObj->mIds.begin();
+ while (iter != mObj->mIds.end()) {
+ if (iter->array() == id.ptr && iter->size() == id.length) {
+ return true;
+ }
+ }
+ return false;
+}
+
+EXPORT
+media_status_t AMediaDrm_openSession(AMediaDrm *mObj, AMediaDrmSessionId *sessionId) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ Vector<uint8_t> session;
+ status_t status = mObj->mDrm->openSession(session);
+ if (status == OK) {
+ mObj->mIds.push_front(session);
+ List<idvec_t>::iterator iter = mObj->mIds.begin();
+ sessionId->ptr = iter->array();
+ sessionId->length = iter->size();
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_closeSession(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+ mObj->mDrm->closeSession(*iter);
+ mObj->mIds.erase(iter);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_getKeyRequest(AMediaDrm *mObj, const AMediaDrmScope *scope,
+ const uint8_t *init, size_t initSize, const char *mimeType, AMediaDrmKeyType keyType,
+ const AMediaDrmKeyValue *optionalParameters, size_t numOptionalParameters,
+ const uint8_t **keyRequest, size_t *keyRequestSize) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!mimeType || !scope || !keyRequest || !keyRequestSize) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *scope, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ Vector<uint8_t> mdInit;
+ mdInit.appendArray(init, initSize);
+ DrmPlugin::KeyType mdKeyType;
+ switch (keyType) {
+ case KEY_TYPE_STREAMING:
+ mdKeyType = DrmPlugin::kKeyType_Streaming;
+ break;
+ case KEY_TYPE_OFFLINE:
+ mdKeyType = DrmPlugin::kKeyType_Offline;
+ break;
+ case KEY_TYPE_RELEASE:
+ mdKeyType = DrmPlugin::kKeyType_Release;
+ break;
+ default:
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ KeyedVector<String8, String8> mdOptionalParameters;
+ for (size_t i = 0; i < numOptionalParameters; i++) {
+ mdOptionalParameters.add(String8(optionalParameters[i].mKey),
+ String8(optionalParameters[i].mValue));
+ }
+ String8 defaultUrl;
+ status_t status = mObj->mDrm->getKeyRequest(*iter, mdInit, String8(mimeType),
+ mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl);
+ if (status != OK) {
+ return translateStatus(status);
+ } else {
+ *keyRequest = mObj->mKeyRequest.array();
+ *keyRequestSize = mObj->mKeyRequest.size();
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_provideKeyResponse(AMediaDrm *mObj, const AMediaDrmScope *scope,
+ const uint8_t *response, size_t responseSize, AMediaDrmKeySetId *keySetId) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!scope || !response || !responseSize || !keySetId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *scope, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+ Vector<uint8_t> mdResponse;
+ mdResponse.appendArray(response, responseSize);
+
+ Vector<uint8_t> mdKeySetId;
+ status_t status = mObj->mDrm->provideKeyResponse(*iter, mdResponse, mdKeySetId);
+ if (status == OK) {
+ mObj->mIds.push_front(mdKeySetId);
+ List<idvec_t>::iterator iter = mObj->mIds.begin();
+ keySetId->ptr = iter->array();
+ keySetId->length = iter->size();
+ } else {
+ keySetId->ptr = NULL;
+ keySetId->length = 0;
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_restoreKeys(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const AMediaDrmKeySetId *keySetId) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId || !keySetId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+ Vector<uint8_t> keySet;
+ keySet.appendArray(keySetId->ptr, keySetId->length);
+ return translateStatus(mObj->mDrm->restoreKeys(*iter, keySet));
+}
+
+EXPORT
+media_status_t AMediaDrm_removeKeys(AMediaDrm *mObj, const AMediaDrmSessionId *keySetId) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!keySetId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ status_t status;
+ if (!findId(mObj, *keySetId, iter)) {
+ Vector<uint8_t> keySet;
+ keySet.appendArray(keySetId->ptr, keySetId->length);
+ status = mObj->mDrm->removeKeys(keySet);
+ } else {
+ status = mObj->mDrm->removeKeys(*iter);
+ mObj->mIds.erase(iter);
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_queryKeyStatus(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ AMediaDrmKeyValue *keyValuePairs, size_t *numPairs) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId || !numPairs) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->queryKeyStatus(*iter, mObj->mQueryResults);
+ if (status != OK) {
+ *numPairs = 0;
+ return translateStatus(status);
+ }
+
+ if (mObj->mQueryResults.size() > *numPairs) {
+ *numPairs = mObj->mQueryResults.size();
+ return AMEDIA_DRM_SHORT_BUFFER;
+ }
+
+ for (size_t i = 0; i < mObj->mQueryResults.size(); i++) {
+ keyValuePairs[i].mKey = mObj->mQueryResults.keyAt(i).string();
+ keyValuePairs[i].mValue = mObj->mQueryResults.keyAt(i).string();
+ }
+ *numPairs = mObj->mQueryResults.size();
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_getProvisionRequest(AMediaDrm *mObj, const uint8_t **provisionRequest,
+ size_t *provisionRequestSize, const char **serverUrl) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!provisionRequest || !provisionRequestSize || !*provisionRequestSize || !serverUrl) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ status_t status = mObj->mDrm->getProvisionRequest(String8(""), String8(""),
+ mObj->mProvisionRequest, mObj->mProvisionUrl);
+ if (status != OK) {
+ return translateStatus(status);
+ } else {
+ *provisionRequest = mObj->mProvisionRequest.array();
+ *provisionRequestSize = mObj->mProvisionRequest.size();
+ *serverUrl = mObj->mProvisionUrl.string();
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_provideProvisionResponse(AMediaDrm *mObj,
+ const uint8_t *response, size_t responseSize) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!response || !responseSize) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ Vector<uint8_t> mdResponse;
+ mdResponse.appendArray(response, responseSize);
+
+ Vector<uint8_t> unused;
+ return translateStatus(mObj->mDrm->provideProvisionResponse(mdResponse, unused, unused));
+}
+
+EXPORT
+media_status_t AMediaDrm_getSecureStops(AMediaDrm *mObj,
+ AMediaDrmSecureStop *secureStops, size_t *numSecureStops) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!numSecureStops) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ status_t status = mObj->mDrm->getSecureStops(mObj->mSecureStops);
+ if (status != OK) {
+ *numSecureStops = 0;
+ return translateStatus(status);
+ }
+ if (*numSecureStops < mObj->mSecureStops.size()) {
+ return AMEDIA_DRM_SHORT_BUFFER;
+ }
+ List<Vector<uint8_t> >::iterator iter = mObj->mSecureStops.begin();
+ size_t i = 0;
+ while (iter != mObj->mSecureStops.end()) {
+ secureStops[i].ptr = iter->array();
+ secureStops[i].length = iter->size();
+ ++iter;
+ ++i;
+ }
+ *numSecureStops = mObj->mSecureStops.size();
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_releaseSecureStops(AMediaDrm *mObj,
+ const AMediaDrmSecureStop *ssRelease) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!ssRelease) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ Vector<uint8_t> release;
+ release.appendArray(ssRelease->ptr, ssRelease->length);
+ return translateStatus(mObj->mDrm->releaseSecureStops(release));
+}
+
+
+EXPORT
+media_status_t AMediaDrm_getPropertyString(AMediaDrm *mObj, const char *propertyName,
+ const char **propertyValue) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!propertyName || !propertyValue) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ status_t status = mObj->mDrm->getPropertyString(String8(propertyName),
+ mObj->mPropertyString);
+
+ if (status == OK) {
+ *propertyValue = mObj->mPropertyString.string();
+ } else {
+ *propertyValue = NULL;
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_getPropertyByteArray(AMediaDrm *mObj,
+ const char *propertyName, AMediaDrmByteArray *propertyValue) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!propertyName || !propertyValue) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ status_t status = mObj->mDrm->getPropertyByteArray(String8(propertyName),
+ mObj->mPropertyByteArray);
+
+ if (status == OK) {
+ propertyValue->ptr = mObj->mPropertyByteArray.array();
+ propertyValue->length = mObj->mPropertyByteArray.size();
+ } else {
+ propertyValue->ptr = NULL;
+ propertyValue->length = 0;
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_setPropertyString(AMediaDrm *mObj,
+ const char *propertyName, const char *value) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+
+ return translateStatus(mObj->mDrm->setPropertyString(String8(propertyName),
+ String8(value)));
+}
+
+EXPORT
+media_status_t AMediaDrm_setPropertyByteArray(AMediaDrm *mObj,
+ const char *propertyName, const uint8_t *value, size_t valueSize) {
+
+ Vector<uint8_t> byteArray;
+ byteArray.appendArray(value, valueSize);
+
+ return translateStatus(mObj->mDrm->getPropertyByteArray(String8(propertyName),
+ byteArray));
+}
+
+
+static media_status_t encrypt_decrypt_common(AMediaDrm *mObj,
+ const AMediaDrmSessionId &sessionId,
+ const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
+ const uint8_t *input, uint8_t *output, size_t dataSize, bool encrypt) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->setCipherAlgorithm(*iter, String8(cipherAlgorithm));
+ if (status != OK) {
+ return translateStatus(status);
+ }
+
+ Vector<uint8_t> keyIdVec;
+ const size_t kKeyIdSize = 16;
+ keyIdVec.appendArray(keyId, kKeyIdSize);
+
+ Vector<uint8_t> inputVec;
+ inputVec.appendArray(input, dataSize);
+
+ Vector<uint8_t> ivVec;
+ const size_t kIvSize = 16;
+ ivVec.appendArray(iv, kIvSize);
+
+ Vector<uint8_t> outputVec;
+ if (encrypt) {
+ status_t status = mObj->mDrm->encrypt(*iter, keyIdVec, inputVec, ivVec, outputVec);
+ } else {
+ status_t status = mObj->mDrm->decrypt(*iter, keyIdVec, inputVec, ivVec, outputVec);
+ }
+ if (status == OK) {
+ memcpy(output, outputVec.array(), outputVec.size());
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_encrypt(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
+ const uint8_t *input, uint8_t *output, size_t dataSize) {
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ return encrypt_decrypt_common(mObj, *sessionId, cipherAlgorithm, keyId, iv,
+ input, output, dataSize, true);
+}
+
+EXPORT
+media_status_t AMediaDrm_decrypt(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
+ const uint8_t *input, uint8_t *output, size_t dataSize) {
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ return encrypt_decrypt_common(mObj, *sessionId, cipherAlgorithm, keyId, iv,
+ input, output, dataSize, false);
+}
+
+EXPORT
+media_status_t AMediaDrm_sign(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *macAlgorithm, uint8_t *keyId, uint8_t *message, size_t messageSize,
+ uint8_t *signature, size_t *signatureSize) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->setMacAlgorithm(*iter, String8(macAlgorithm));
+ if (status != OK) {
+ return translateStatus(status);
+ }
+
+ Vector<uint8_t> keyIdVec;
+ const size_t kKeyIdSize = 16;
+ keyIdVec.appendArray(keyId, kKeyIdSize);
+
+ Vector<uint8_t> messageVec;
+ messageVec.appendArray(message, messageSize);
+
+ Vector<uint8_t> signatureVec;
+ status = mObj->mDrm->sign(*iter, keyIdVec, messageVec, signatureVec);
+ if (signatureVec.size() > *signatureSize) {
+ return AMEDIA_DRM_SHORT_BUFFER;
+ }
+ if (status == OK) {
+ memcpy(signature, signatureVec.array(), signatureVec.size());
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_verify(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *macAlgorithm, uint8_t *keyId, const uint8_t *message, size_t messageSize,
+ const uint8_t *signature, size_t signatureSize) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->setMacAlgorithm(*iter, String8(macAlgorithm));
+ if (status != OK) {
+ return translateStatus(status);
+ }
+
+ Vector<uint8_t> keyIdVec;
+ const size_t kKeyIdSize = 16;
+ keyIdVec.appendArray(keyId, kKeyIdSize);
+
+ Vector<uint8_t> messageVec;
+ messageVec.appendArray(message, messageSize);
+
+ Vector<uint8_t> signatureVec;
+ signatureVec.appendArray(signature, signatureSize);
+
+ bool match;
+ status = mObj->mDrm->verify(*iter, keyIdVec, messageVec, signatureVec, match);
+ if (status == OK) {
+ return match ? AMEDIA_OK : AMEDIA_DRM_VERIFY_FAILED;
+ }
+ return translateStatus(status);
+}
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
new file mode 100644
index 0000000..db57d0b
--- /dev/null
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -0,0 +1,360 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaExtractor"
+
+
+#include "NdkMediaError.h"
+#include "NdkMediaExtractor.h"
+#include "NdkMediaFormatPriv.h"
+
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <media/hardware/CryptoAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <media/IMediaHTTPService.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+struct AMediaExtractor {
+ sp<NuMediaExtractor> mImpl;
+ sp<ABuffer> mPsshBuf;
+
+};
+
+extern "C" {
+
+EXPORT
+AMediaExtractor* AMediaExtractor_new() {
+ ALOGV("ctor");
+ AMediaExtractor *mData = new AMediaExtractor();
+ mData->mImpl = new NuMediaExtractor();
+ return mData;
+}
+
+EXPORT
+media_status_t AMediaExtractor_delete(AMediaExtractor *mData) {
+ ALOGV("dtor");
+ delete mData;
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, off64_t length) {
+ ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ return translate_error(mData->mImpl->setDataSource(fd, offset, length));
+}
+
+EXPORT
+media_status_t AMediaExtractor_setDataSource(AMediaExtractor *mData, const char *location) {
+ ALOGV("setDataSource(%s)", location);
+ // TODO: add header support
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject service = NULL;
+ if (env == NULL) {
+ ALOGE("setDataSource(path) must be called from Java thread");
+ env->ExceptionClear();
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ jclass mediahttpclass = env->FindClass("android/media/MediaHTTPService");
+ if (mediahttpclass == NULL) {
+ ALOGE("can't find MediaHttpService");
+ env->ExceptionClear();
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ jmethodID mediaHttpCreateMethod = env->GetStaticMethodID(mediahttpclass,
+ "createHttpServiceBinderIfNecessary", "(Ljava/lang/String;)Landroid/os/IBinder;");
+ if (mediaHttpCreateMethod == NULL) {
+ ALOGE("can't find method");
+ env->ExceptionClear();
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ jstring jloc = env->NewStringUTF(location);
+
+ service = env->CallStaticObjectMethod(mediahttpclass, mediaHttpCreateMethod, jloc);
+ env->DeleteLocalRef(jloc);
+
+ sp<IMediaHTTPService> httpService;
+ if (service != NULL) {
+ sp<IBinder> binder = ibinderForJavaObject(env, service);
+ httpService = interface_cast<IMediaHTTPService>(binder);
+ }
+
+ status_t err = mData->mImpl->setDataSource(httpService, location, NULL);
+ env->ExceptionClear();
+ return translate_error(err);
+}
+
+EXPORT
+size_t AMediaExtractor_getTrackCount(AMediaExtractor *mData) {
+ return mData->mImpl->countTracks();
+}
+
+EXPORT
+AMediaFormat* AMediaExtractor_getTrackFormat(AMediaExtractor *mData, size_t idx) {
+ sp<AMessage> format;
+ mData->mImpl->getTrackFormat(idx, &format);
+ return AMediaFormat_fromMsg(&format);
+}
+
+EXPORT
+media_status_t AMediaExtractor_selectTrack(AMediaExtractor *mData, size_t idx) {
+ ALOGV("selectTrack(%zu)", idx);
+ return translate_error(mData->mImpl->selectTrack(idx));
+}
+
+EXPORT
+media_status_t AMediaExtractor_unselectTrack(AMediaExtractor *mData, size_t idx) {
+ ALOGV("unselectTrack(%zu)", idx);
+ return translate_error(mData->mImpl->unselectTrack(idx));
+}
+
+EXPORT
+bool AMediaExtractor_advance(AMediaExtractor *mData) {
+ //ALOGV("advance");
+ return mData->mImpl->advance();
+}
+
+EXPORT
+media_status_t AMediaExtractor_seekTo(AMediaExtractor *ex, int64_t seekPosUs, SeekMode mode) {
+ android::MediaSource::ReadOptions::SeekMode sfmode;
+ if (mode == AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC) {
+ sfmode = android::MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC;
+ } else if (mode == AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC) {
+ sfmode = android::MediaSource::ReadOptions::SEEK_CLOSEST_SYNC;
+ } else {
+ sfmode = android::MediaSource::ReadOptions::SEEK_NEXT_SYNC;
+ }
+
+ return translate_error(ex->mImpl->seekTo(seekPosUs, sfmode));
+}
+
+EXPORT
+ssize_t AMediaExtractor_readSampleData(AMediaExtractor *mData, uint8_t *buffer, size_t capacity) {
+ //ALOGV("readSampleData");
+ sp<ABuffer> tmp = new ABuffer(buffer, capacity);
+ if (mData->mImpl->readSampleData(tmp) == OK) {
+ return tmp->size();
+ }
+ return -1;
+}
+
+EXPORT
+uint32_t AMediaExtractor_getSampleFlags(AMediaExtractor *mData) {
+ int sampleFlags = 0;
+ sp<MetaData> meta;
+ status_t err = mData->mImpl->getSampleMeta(&meta);
+ if (err != OK) {
+ return -1;
+ }
+ int32_t val;
+ if (meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+ sampleFlags |= AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC;
+ }
+
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyEncryptedSizes, &type, &data, &size)) {
+ sampleFlags |= AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED;
+ }
+ return sampleFlags;
+}
+
+EXPORT
+int AMediaExtractor_getSampleTrackIndex(AMediaExtractor *mData) {
+ size_t idx;
+ if (mData->mImpl->getSampleTrackIndex(&idx) != OK) {
+ return -1;
+ }
+ return idx;
+}
+
+EXPORT
+int64_t AMediaExtractor_getSampleTime(AMediaExtractor *mData) {
+ int64_t time;
+ if (mData->mImpl->getSampleTime(&time) != OK) {
+ return -1;
+ }
+ return time;
+}
+
+EXPORT
+PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor *ex) {
+
+ if (ex->mPsshBuf != NULL) {
+ return (PsshInfo*) ex->mPsshBuf->data();
+ }
+
+ sp<AMessage> format;
+ ex->mImpl->getFileFormat(&format);
+ sp<ABuffer> buffer;
+ if(!format->findBuffer("pssh", &buffer)) {
+ return NULL;
+ }
+
+ // the format of the buffer is 1 or more of:
+ // {
+ // 16 byte uuid
+ // 4 byte data length N
+ // N bytes of data
+ // }
+
+ // Determine the number of entries in the source data.
+ // Since we got the data from stagefright, we trust it is valid and properly formatted.
+ const uint8_t* data = buffer->data();
+ size_t len = buffer->size();
+ size_t numentries = 0;
+ while (len > 0) {
+ numentries++;
+
+ // skip uuid
+ data += 16;
+ len -= 16;
+
+ // get data length
+ uint32_t datalen = *((uint32_t*)data);
+ data += 4;
+ len -= 4;
+
+ // skip the data
+ data += datalen;
+ len -= datalen;
+ }
+
+ // there are <numentries> in the source buffer, we need
+ // (source buffer size) - (sizeof(uint32_t) * numentries) + sizeof(size_t)
+ // + ((sizeof(void*) + sizeof(size_t)) * numentries) bytes for the PsshInfo structure
+ // Or in other words, the data lengths in the source structure are replaced by size_t
+ // (which may be the same size or larger, for 64 bit), and in addition there is an
+ // extra pointer for each entry, and an extra size_t for the entire PsshInfo.
+ size_t newsize = buffer->size() - (sizeof(uint32_t) * numentries) + sizeof(size_t)
+ + ((sizeof(void*) + sizeof(size_t)) * numentries);
+ ex->mPsshBuf = new ABuffer(newsize);
+ ex->mPsshBuf->setRange(0, newsize);
+
+ // copy data
+ const uint8_t* src = buffer->data();
+ uint8_t* dst = ex->mPsshBuf->data();
+ uint8_t* dstdata = dst + sizeof(size_t) + numentries * sizeof(PsshEntry);
+ *((size_t*)dst) = numentries;
+ dst += sizeof(size_t);
+ for (size_t i = 0; i < numentries; i++) {
+ // copy uuid
+ memcpy(dst, src, 16);
+ src += 16;
+ dst += 16;
+
+ // get/copy data length
+ uint32_t datalen = *((uint32_t*)src);
+ *((size_t*)dst) = datalen;
+ src += sizeof(uint32_t);
+ dst += sizeof(size_t);
+
+ // the next entry in the destination is a pointer to the actual data, which we store
+ // after the array of PsshEntry
+ *((void**)dst) = dstdata;
+ dst += sizeof(void*);
+
+ // copy the actual data
+ memcpy(dstdata, src, datalen);
+ dstdata += datalen;
+ src += datalen;
+ }
+
+ return (PsshInfo*) ex->mPsshBuf->data();
+}
+
+EXPORT
+AMediaCodecCryptoInfo *AMediaExtractor_getSampleCryptoInfo(AMediaExtractor *ex) {
+ sp<MetaData> meta;
+ if(ex->mImpl->getSampleMeta(&meta) != 0) {
+ return NULL;
+ }
+
+ uint32_t type;
+ const void *crypteddata;
+ size_t cryptedsize;
+ if (!meta->findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
+ return NULL;
+ }
+ size_t numSubSamples = cryptedsize / sizeof(size_t);
+
+ const void *cleardata;
+ size_t clearsize;
+ if (meta->findData(kKeyPlainSizes, &type, &cleardata, &clearsize)) {
+ if (clearsize != cryptedsize) {
+ // The two must be of the same length.
+ return NULL;
+ }
+ }
+
+ const void *key;
+ size_t keysize;
+ if (meta->findData(kKeyCryptoIV, &type, &key, &keysize)) {
+ if (keysize != 16) {
+ // IVs must be 16 bytes in length.
+ return NULL;
+ }
+ }
+
+ const void *iv;
+ size_t ivsize;
+ if (meta->findData(kKeyCryptoIV, &type, &iv, &ivsize)) {
+ if (ivsize != 16) {
+ // IVs must be 16 bytes in length.
+ return NULL;
+ }
+ }
+
+ int32_t mode;
+ if (!meta->findInt32(kKeyCryptoMode, &mode)) {
+ mode = CryptoPlugin::kMode_AES_CTR;
+ }
+
+ return AMediaCodecCryptoInfo_new(
+ numSubSamples,
+ (uint8_t*) key,
+ (uint8_t*) iv,
+ (cryptoinfo_mode_t) mode,
+ (size_t*) cleardata,
+ (size_t*) crypteddata);
+}
+
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
new file mode 100644
index 0000000..a354d58
--- /dev/null
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -0,0 +1,260 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaFormat"
+
+#include <inttypes.h>
+
+#include "NdkMediaFormat.h"
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+struct AMediaFormat {
+ sp<AMessage> mFormat;
+ String8 mDebug;
+ KeyedVector<String8, String8> mStringCache;
+};
+
+extern "C" {
+
+// private functions for conversion to/from AMessage
+AMediaFormat* AMediaFormat_fromMsg(const void* data) {
+ ALOGV("private ctor");
+ AMediaFormat* mData = new AMediaFormat();
+ mData->mFormat = *((sp<AMessage>*)data);
+ return mData;
+}
+
+void AMediaFormat_getFormat(const AMediaFormat* mData, void* dest) {
+ *((sp<AMessage>*)dest) = mData->mFormat;
+}
+
+
+/*
+ * public function follow
+ */
+EXPORT
+AMediaFormat *AMediaFormat_new() {
+ ALOGV("ctor");
+ sp<AMessage> msg = new AMessage();
+ return AMediaFormat_fromMsg(&msg);
+}
+
+EXPORT
+media_status_t AMediaFormat_delete(AMediaFormat *mData) {
+ ALOGV("dtor");
+ delete mData;
+ return AMEDIA_OK;
+}
+
+
+EXPORT
+const char* AMediaFormat_toString(AMediaFormat *mData) {
+ sp<AMessage> f = mData->mFormat;
+ String8 ret;
+ int num = f->countEntries();
+ for (int i = 0; i < num; i++) {
+ if (i != 0) {
+ ret.append(", ");
+ }
+ AMessage::Type t;
+ const char *name = f->getEntryNameAt(i, &t);
+ ret.append(name);
+ ret.append(": ");
+ switch (t) {
+ case AMessage::kTypeInt32:
+ {
+ int32_t val;
+ f->findInt32(name, &val);
+ ret.appendFormat("int32(%" PRId32 ")", val);
+ break;
+ }
+ case AMessage::kTypeInt64:
+ {
+ int64_t val;
+ f->findInt64(name, &val);
+ ret.appendFormat("int64(%" PRId64 ")", val);
+ break;
+ }
+ case AMessage::kTypeSize:
+ {
+ size_t val;
+ f->findSize(name, &val);
+ ret.appendFormat("size_t(%zu)", val);
+ break;
+ }
+ case AMessage::kTypeFloat:
+ {
+ float val;
+ f->findFloat(name, &val);
+ ret.appendFormat("float(%f)", val);
+ break;
+ }
+ case AMessage::kTypeDouble:
+ {
+ double val;
+ f->findDouble(name, &val);
+ ret.appendFormat("double(%f)", val);
+ break;
+ }
+ case AMessage::kTypeString:
+ {
+ AString val;
+ f->findString(name, &val);
+ ret.appendFormat("string(%s)", val.c_str());
+ break;
+ }
+ case AMessage::kTypeBuffer:
+ {
+ ret.appendFormat("data");
+ break;
+ }
+ default:
+ {
+ ret.appendFormat("unknown(%d)", t);
+ break;
+ }
+ }
+ }
+ ret.append("}");
+ mData->mDebug = ret;
+ return mData->mDebug.string();
+}
+
+EXPORT
+bool AMediaFormat_getInt32(AMediaFormat* format, const char *name, int32_t *out) {
+ return format->mFormat->findInt32(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getInt64(AMediaFormat* format, const char *name, int64_t *out) {
+ return format->mFormat->findInt64(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getFloat(AMediaFormat* format, const char *name, float *out) {
+ return format->mFormat->findFloat(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getSize(AMediaFormat* format, const char *name, size_t *out) {
+ return format->mFormat->findSize(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getBuffer(AMediaFormat* format, const char *name, void** data, size_t *outsize) {
+ sp<ABuffer> buf;
+ if (format->mFormat->findBuffer(name, &buf)) {
+ *data = buf->data() + buf->offset();
+ *outsize = buf->size();
+ return true;
+ }
+ return false;
+}
+
+EXPORT
+bool AMediaFormat_getString(AMediaFormat* mData, const char *name, const char **out) {
+
+ for (size_t i = 0; i < mData->mStringCache.size(); i++) {
+ if (strcmp(mData->mStringCache.keyAt(i).string(), name) == 0) {
+ mData->mStringCache.removeItemsAt(i, 1);
+ break;
+ }
+ }
+
+ AString tmp;
+ if (mData->mFormat->findString(name, &tmp)) {
+ String8 ret(tmp.c_str());
+ mData->mStringCache.add(String8(name), ret);
+ *out = ret.string();
+ return true;
+ }
+ return false;
+}
+
+EXPORT
+void AMediaFormat_setInt32(AMediaFormat* format, const char *name, int32_t value) {
+ format->mFormat->setInt32(name, value);
+}
+
+EXPORT
+void AMediaFormat_setInt64(AMediaFormat* format, const char *name, int64_t value) {
+ format->mFormat->setInt64(name, value);
+}
+
+EXPORT
+void AMediaFormat_setFloat(AMediaFormat* format, const char* name, float value) {
+ format->mFormat->setFloat(name, value);
+}
+
+EXPORT
+void AMediaFormat_setString(AMediaFormat* format, const char* name, const char* value) {
+ // AMessage::setString() makes a copy of the string
+ format->mFormat->setString(name, value, strlen(value));
+}
+
+EXPORT
+void AMediaFormat_setBuffer(AMediaFormat* format, const char* name, void* data, size_t size) {
+ // the ABuffer(void*, size_t) constructor doesn't take ownership of the data, so create
+ // a new buffer and copy the data into it
+ sp<ABuffer> buf = new ABuffer(size);
+ memcpy(buf->data(), data, size);
+ buf->setRange(0, size);
+ // AMessage::setBuffer() increases the refcount of the buffer
+ format->mFormat->setBuffer(name, buf);
+}
+
+
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_PROFILE = "aac-profile";
+EXPORT const char* AMEDIAFORMAT_KEY_BIT_RATE = "bitrate";
+EXPORT const char* AMEDIAFORMAT_KEY_CHANNEL_COUNT = "channel-count";
+EXPORT const char* AMEDIAFORMAT_KEY_CHANNEL_MASK = "channel-mask";
+EXPORT const char* AMEDIAFORMAT_KEY_COLOR_FORMAT = "color-format";
+EXPORT const char* AMEDIAFORMAT_KEY_DURATION = "durationUs";
+EXPORT const char* AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL = "flac-compression-level";
+EXPORT const char* AMEDIAFORMAT_KEY_FRAME_RATE = "frame-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_HEIGHT = "height";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_ADTS = "is-adts";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT = "is-autoselect";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_DEFAULT = "is-default";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE = "is-forced-subtitle";
+EXPORT const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL = "i-frame-interval";
+EXPORT const char* AMEDIAFORMAT_KEY_LANGUAGE = "language";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_WIDTH = "max-width";
+EXPORT const char* AMEDIAFORMAT_KEY_MIME = "mime";
+EXPORT const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP = "push-blank-buffers-on-shutdown";
+EXPORT const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER = "repeat-previous-frame-after";
+EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_RATE = "sample-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
+EXPORT const char* AMEDIAFORMAT_KEY_STRIDE = "stride";
+
+
+} // extern "C"
+
+
diff --git a/media/ndk/NdkMediaFormatPriv.h b/media/ndk/NdkMediaFormatPriv.h
new file mode 100644
index 0000000..02342d9
--- /dev/null
+++ b/media/ndk/NdkMediaFormatPriv.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_FORMAT_PRIV_H
+#define _NDK_MEDIA_FORMAT_PRIV_H
+
+#include <NdkMediaFormat.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+AMediaFormat* AMediaFormat_fromMsg(void*);
+void AMediaFormat_getFormat(const AMediaFormat* mData, void* dest);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // _NDK_MEDIA_FORMAT_PRIV_H
+
diff --git a/media/ndk/NdkMediaMuxer.cpp b/media/ndk/NdkMediaMuxer.cpp
new file mode 100644
index 0000000..b1b0362
--- /dev/null
+++ b/media/ndk/NdkMediaMuxer.cpp
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaMuxer"
+
+
+#include "NdkMediaMuxer.h"
+#include "NdkMediaCodec.h"
+#include "NdkMediaFormatPriv.h"
+
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaMuxer.h>
+#include <media/IMediaHTTPService.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+struct AMediaMuxer {
+ sp<MediaMuxer> mImpl;
+
+};
+
+extern "C" {
+
+EXPORT
+AMediaMuxer* AMediaMuxer_new(int fd, OutputFormat format) {
+ ALOGV("ctor");
+ AMediaMuxer *mData = new AMediaMuxer();
+ mData->mImpl = new MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+ return mData;
+}
+
+EXPORT
+media_status_t AMediaMuxer_delete(AMediaMuxer *muxer) {
+ ALOGV("dtor");
+ delete muxer;
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaMuxer_setLocation(AMediaMuxer *muxer, float latitude, float longtitude) {
+ return translate_error(muxer->mImpl->setLocation(latitude * 10000, longtitude * 10000));
+}
+
+EXPORT
+media_status_t AMediaMuxer_setOrientationHint(AMediaMuxer *muxer, int degrees) {
+ return translate_error(muxer->mImpl->setOrientationHint(degrees));
+}
+
+EXPORT
+ssize_t AMediaMuxer_addTrack(AMediaMuxer *muxer, const AMediaFormat *format) {
+ sp<AMessage> msg;
+ AMediaFormat_getFormat(format, &msg);
+ return translate_error(muxer->mImpl->addTrack(msg));
+}
+
+EXPORT
+media_status_t AMediaMuxer_start(AMediaMuxer *muxer) {
+ return translate_error(muxer->mImpl->start());
+}
+
+EXPORT
+media_status_t AMediaMuxer_stop(AMediaMuxer *muxer) {
+ return translate_error(muxer->mImpl->stop());
+}
+
+EXPORT
+media_status_t AMediaMuxer_writeSampleData(AMediaMuxer *muxer,
+ size_t trackIdx, const uint8_t *data, const AMediaCodecBufferInfo *info) {
+ sp<ABuffer> buf = new ABuffer((void*)(data + info->offset), info->size);
+ return translate_error(
+ muxer->mImpl->writeSampleData(buf, trackIdx, info->presentationTimeUs, info->flags));
+}
+
+
+} // extern "C"
+