diff options
Diffstat (limited to 'media')
241 files changed, 14952 insertions, 7192 deletions
diff --git a/media/common_time/Android.mk b/media/common_time/Android.mk index 526f17b..632acbc 100644 --- a/media/common_time/Android.mk +++ b/media/common_time/Android.mk @@ -16,6 +16,7 @@ LOCAL_SRC_FILES := cc_helper.cpp \ utils.cpp LOCAL_SHARED_LIBRARIES := libbinder \ libhardware \ - libutils + libutils \ + liblog include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk index 95ca6fd..5d0a87c 100644 --- a/media/libeffects/downmix/Android.mk +++ b/media/libeffects/downmix/Android.mk @@ -7,7 +7,7 @@ LOCAL_SRC_FILES:= \ EffectDownmix.c LOCAL_SHARED_LIBRARIES := \ - libcutils + libcutils liblog LOCAL_MODULE:= libdownmix @@ -25,4 +25,6 @@ LOCAL_C_INCLUDES := \ LOCAL_PRELINK_MODULE := false +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c index 5bf052a..f17a6e8 100644 --- a/media/libeffects/downmix/EffectDownmix.c +++ b/media/libeffects/downmix/EffectDownmix.c @@ -58,13 +58,13 @@ const struct effect_interface_s gDownmixInterface = { NULL /* no process_reverse function, no reference stream needed */ }; +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, name : "Downmix Library", implementor : "The Android Open Source Project", - query_num_effects : DownmixLib_QueryNumberEffects, - query_effect : DownmixLib_QueryEffect, create_effect : DownmixLib_Create, release_effect : DownmixLib_Release, get_descriptor : DownmixLib_GetDescriptor, @@ -159,25 +159,6 @@ void Downmix_testIndexComputation(uint32_t mask) { /*--- Effect Library Interface Implementation ---*/ -int32_t DownmixLib_QueryNumberEffects(uint32_t *pNumEffects) { - ALOGV("DownmixLib_QueryNumberEffects()"); - *pNumEffects = kNbEffects; - return 0; -} - -int32_t DownmixLib_QueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) { - ALOGV("DownmixLib_QueryEffect() index=%d", index); - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index >= (uint32_t)kNbEffects) { - return -EINVAL; - } - memcpy(pDescriptor, gDescriptors[index], sizeof(effect_descriptor_t)); - return 0; -} - - int32_t DownmixLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, diff --git a/media/libeffects/downmix/EffectDownmix.h b/media/libeffects/downmix/EffectDownmix.h index be3ca3f..cb6b957 100644 --- a/media/libeffects/downmix/EffectDownmix.h +++ b/media/libeffects/downmix/EffectDownmix.h @@ -65,9 +65,6 @@ const uint32_t kUnsupported = * Effect API *------------------------------------ */ -int32_t DownmixLib_QueryNumberEffects(uint32_t *pNumEffects); -int32_t DownmixLib_QueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor); int32_t DownmixLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, diff --git a/media/libeffects/factory/Android.mk b/media/libeffects/factory/Android.mk index 6e69151..60a6ce5 100644 --- a/media/libeffects/factory/Android.mk +++ b/media/libeffects/factory/Android.mk @@ -7,7 +7,7 @@ LOCAL_SRC_FILES:= \ EffectsFactory.c LOCAL_SHARED_LIBRARIES := \ - libcutils + libcutils liblog LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES) LOCAL_MODULE:= libeffects diff --git a/media/libeffects/lvm/lib/Android.mk b/media/libeffects/lvm/lib/Android.mk index f49267e..bb56c75 100644 --- a/media/libeffects/lvm/lib/Android.mk +++ b/media/libeffects/lvm/lib/Android.mk @@ -105,8 +105,6 @@ LOCAL_SRC_FILES:= \ LOCAL_MODULE:= libmusicbundle - - LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/Eq/lib \ $(LOCAL_PATH)/Eq/src \ @@ -121,8 +119,12 @@ LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/StereoWidening/src \ $(LOCAL_PATH)/StereoWidening/lib +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_STATIC_LIBRARY) + + # Reverb library include $(CLEAR_VARS) @@ -168,12 +170,11 @@ LOCAL_SRC_FILES:= \ LOCAL_MODULE:= libreverb - - LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/Reverb/lib \ $(LOCAL_PATH)/Reverb/src \ $(LOCAL_PATH)/Common/lib \ $(LOCAL_PATH)/Common/src +LOCAL_CFLAGS += -fvisibility=hidden include $(BUILD_STATIC_LIBRARY) diff --git a/media/libeffects/lvm/wrapper/Android.mk b/media/libeffects/lvm/wrapper/Android.mk index 4313424..f1af389 100644 --- a/media/libeffects/lvm/wrapper/Android.mk +++ b/media/libeffects/lvm/wrapper/Android.mk @@ -9,28 +9,27 @@ LOCAL_ARM_MODE := arm LOCAL_SRC_FILES:= \ Bundle/EffectBundle.cpp +LOCAL_CFLAGS += -fvisibility=hidden + LOCAL_MODULE:= libbundlewrapper LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx - - LOCAL_STATIC_LIBRARIES += libmusicbundle LOCAL_SHARED_LIBRARIES := \ libcutils \ libdl - LOCAL_C_INCLUDES += \ $(LOCAL_PATH)/Bundle \ $(LOCAL_PATH)/../lib/Common/lib/ \ $(LOCAL_PATH)/../lib/Bundle/lib/ \ $(call include-path-for, audio-effects) - include $(BUILD_SHARED_LIBRARY) + # reverb wrapper include $(CLEAR_VARS) @@ -39,12 +38,12 @@ LOCAL_ARM_MODE := arm LOCAL_SRC_FILES:= \ Reverb/EffectReverb.cpp +LOCAL_CFLAGS += -fvisibility=hidden + LOCAL_MODULE:= libreverbwrapper LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx - - LOCAL_STATIC_LIBRARIES += libreverb LOCAL_SHARED_LIBRARIES := \ diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp index d706c2d..85232e7 100644 --- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp +++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp @@ -82,7 +82,7 @@ const effect_descriptor_t gBassBoostDescriptor = { {0x0634f220, 0xddd4, 0x11db, 0xa0fc, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, {0x8631f300, 0x72e2, 0x11df, 0xb57e, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid EFFECT_CONTROL_API_VERSION, - (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_DEVICE_IND + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_DEVICE_IND | EFFECT_FLAG_VOLUME_CTRL), BASS_BOOST_CUP_LOAD_ARM9E, BUNDLE_MEM_USAGE, @@ -108,7 +108,7 @@ const effect_descriptor_t gEqualizerDescriptor = { {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type {0xce772f20, 0x847d, 0x11df, 0xbb17, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid Eq NXP EFFECT_CONTROL_API_VERSION, - (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_LAST | EFFECT_FLAG_VOLUME_CTRL), + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_VOLUME_CTRL), EQUALIZER_CUP_LOAD_ARM9E, BUNDLE_MEM_USAGE, "Equalizer", @@ -158,42 +158,6 @@ int Volume_getParameter (EffectContext *pContext, int Effect_setEnabled(EffectContext *pContext, bool enabled); /* Effect Library Interface Implementation */ -extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects){ - ALOGV("\n\tEffectQueryNumberEffects start"); - *pNumEffects = 4; - ALOGV("\tEffectQueryNumberEffects creating %d effects", *pNumEffects); - ALOGV("\tEffectQueryNumberEffects end\n"); - return 0; -} /* end EffectQueryNumberEffects */ - -extern "C" int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor){ - ALOGV("\n\tEffectQueryEffect start"); - ALOGV("\tEffectQueryEffect processing index %d", index); - - if (pDescriptor == NULL){ - ALOGV("\tLVM_ERROR : EffectQueryEffect was passed NULL pointer"); - return -EINVAL; - } - if (index > 3){ - ALOGV("\tLVM_ERROR : EffectQueryEffect index out of range %d", index); - return -ENOENT; - } - if(index == LVM_BASS_BOOST){ - ALOGV("\tEffectQueryEffect processing LVM_BASS_BOOST"); - *pDescriptor = gBassBoostDescriptor; - }else if(index == LVM_VIRTUALIZER){ - ALOGV("\tEffectQueryEffect processing LVM_VIRTUALIZER"); - *pDescriptor = gVirtualizerDescriptor; - } else if(index == LVM_EQUALIZER){ - ALOGV("\tEffectQueryEffect processing LVM_EQUALIZER"); - *pDescriptor = gEqualizerDescriptor; - } else if(index == LVM_VOLUME){ - ALOGV("\tEffectQueryEffect processing LVM_VOLUME"); - *pDescriptor = gVolumeDescriptor; - } - ALOGV("\tEffectQueryEffect end\n"); - return 0; -} /* end EffectQueryEffect */ extern "C" int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, @@ -260,6 +224,7 @@ extern "C" int EffectCreate(const effect_uuid_t *uuid, pContext->pBundledContext->NumberEffectsEnabled = 0; pContext->pBundledContext->NumberEffectsCalled = 0; pContext->pBundledContext->firstVolume = LVM_TRUE; + pContext->pBundledContext->volume = 0; #ifdef LVM_PCM char fileName[256]; @@ -3299,13 +3264,13 @@ const struct effect_interface_s gLvmEffectInterface = { NULL, }; /* end gLvmEffectInterface */ +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, name : "Effect Bundle Library", implementor : "NXP Software Ltd.", - query_num_effects : android::EffectQueryNumberEffects, - query_effect : android::EffectQueryEffect, create_effect : android::EffectCreate, release_effect : android::EffectRelease, get_descriptor : android::EffectGetDescriptor, diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp index 941d651..8a96212 100755..100644 --- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp +++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp @@ -186,30 +186,6 @@ int Reverb_getParameter (ReverbContext *pContext, int Reverb_LoadPreset (ReverbContext *pContext); /* Effect Library Interface Implementation */ -extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects){ - ALOGV("\n\tEffectQueryNumberEffects start"); - *pNumEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *); - ALOGV("\tEffectQueryNumberEffects creating %d effects", *pNumEffects); - ALOGV("\tEffectQueryNumberEffects end\n"); - return 0; -} /* end EffectQueryNumberEffects */ - -extern "C" int EffectQueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor){ - ALOGV("\n\tEffectQueryEffect start"); - ALOGV("\tEffectQueryEffect processing index %d", index); - if (pDescriptor == NULL){ - ALOGV("\tLVM_ERROR : EffectQueryEffect was passed NULL pointer"); - return -EINVAL; - } - if (index >= sizeof(gDescriptors) / sizeof(const effect_descriptor_t *)) { - ALOGV("\tLVM_ERROR : EffectQueryEffect index out of range %d", index); - return -ENOENT; - } - *pDescriptor = *gDescriptors[index]; - ALOGV("\tEffectQueryEffect end\n"); - return 0; -} /* end EffectQueryEffect */ extern "C" int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, @@ -640,10 +616,6 @@ int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE); CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT); - if(pConfig->inputCfg.samplingRate != 44100){ - return -EINVAL; - } - //ALOGV("\tReverb_setConfig calling memcpy"); pContext->config = *pConfig; @@ -672,7 +644,7 @@ int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ return -EINVAL; } - if(pContext->SampleRate != SampleRate){ + if (pContext->SampleRate != SampleRate) { LVREV_ControlParams_st ActiveParams; LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; @@ -686,11 +658,14 @@ int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ LVM_ERROR_CHECK(LvmStatus, "LVREV_GetControlParameters", "Reverb_setConfig") if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + ActiveParams.SampleRate = SampleRate; + LvmStatus = LVREV_SetControlParameters(pContext->hInstance, &ActiveParams); LVM_ERROR_CHECK(LvmStatus, "LVREV_SetControlParameters", "Reverb_setConfig") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; //ALOGV("\tReverb_setConfig Succesfully called LVREV_SetControlParameters\n"); - + pContext->SampleRate = SampleRate; }else{ //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate); } @@ -842,6 +817,7 @@ int Reverb_init(ReverbContext *pContext){ /* General parameters */ params.OperatingMode = LVM_MODE_ON; params.SampleRate = LVM_FS_44100; + pContext->SampleRate = LVM_FS_44100; if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){ params.SourceFormat = LVM_MONO; @@ -2170,13 +2146,13 @@ const struct effect_interface_s gReverbInterface = { NULL, }; /* end gReverbInterface */ +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, name : "Reverb Library", implementor : "NXP Software Ltd.", - query_num_effects : android::EffectQueryNumberEffects, - query_effect : android::EffectQueryEffect, create_effect : android::EffectCreate, release_effect : android::EffectRelease, get_descriptor : android::EffectGetDescriptor, diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk index c13b9d4..c344352 100755..100644 --- a/media/libeffects/preprocessing/Android.mk +++ b/media/libeffects/preprocessing/Android.mk @@ -21,7 +21,8 @@ LOCAL_C_INCLUDES += $(call include-path-for, speex) LOCAL_SHARED_LIBRARIES := \ libwebrtc_audio_preprocessing \ libspeexresampler \ - libutils + libutils \ + liblog ifeq ($(TARGET_SIMULATOR),true) LOCAL_LDLIBS += -ldl @@ -29,4 +30,6 @@ else LOCAL_SHARED_LIBRARIES += libdl endif +LOCAL_CFLAGS += -fvisibility=hidden + include $(BUILD_SHARED_LIBRARY) diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp index 597866a..25586e8 100755..100644 --- a/media/libeffects/preprocessing/PreProcessing.cpp +++ b/media/libeffects/preprocessing/PreProcessing.cpp @@ -1818,30 +1818,6 @@ const struct effect_interface_s sEffectInterfaceReverse = { // Effect Library Interface Implementation //------------------------------------------------------------------------------ -int PreProcessingLib_QueryNumberEffects(uint32_t *pNumEffects) -{ - if (PreProc_Init() != 0) { - return sInitStatus; - } - if (pNumEffects == NULL) { - return -EINVAL; - } - *pNumEffects = PREPROC_NUM_EFFECTS; - return sInitStatus; -} - -int PreProcessingLib_QueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) -{ - if (PreProc_Init() != 0) { - return sInitStatus; - } - if (index >= PREPROC_NUM_EFFECTS) { - return -EINVAL; - } - *pDescriptor = *sDescriptors[index]; - return 0; -} - int PreProcessingLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -1913,13 +1889,13 @@ int PreProcessingLib_GetDescriptor(const effect_uuid_t *uuid, return 0; } +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, name : "Audio Preprocessing Library", implementor : "The Android Open Source Project", - query_num_effects : PreProcessingLib_QueryNumberEffects, - query_effect : PreProcessingLib_QueryEffect, create_effect : PreProcessingLib_Create, release_effect : PreProcessingLib_Release, get_descriptor : PreProcessingLib_GetDescriptor diff --git a/media/libeffects/testlibs/EffectEqualizer.cpp b/media/libeffects/testlibs/EffectEqualizer.cpp index 90ebe1f..c35453b 100644 --- a/media/libeffects/testlibs/EffectEqualizer.cpp +++ b/media/libeffects/testlibs/EffectEqualizer.cpp @@ -123,23 +123,6 @@ int Equalizer_setParameter(AudioEqualizer * pEqualizer, int32_t *pParam, void *p //--- Effect Library Interface Implementation // -extern "C" int EffectQueryNumberEffects(uint32_t *pNumEffects) { - *pNumEffects = 1; - return 0; -} /* end EffectQueryNumberEffects */ - -extern "C" int EffectQueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor) { - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index > 0) { - return -EINVAL; - } - *pDescriptor = gEqualizerDescriptor; - return 0; -} /* end EffectQueryNext */ - extern "C" int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -771,8 +754,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { version : EFFECT_LIBRARY_API_VERSION, name : "Test Equalizer Library", implementor : "The Android Open Source Project", - query_num_effects : android::EffectQueryNumberEffects, - query_effect : android::EffectQueryEffect, create_effect : android::EffectCreate, release_effect : android::EffectRelease, get_descriptor : android::EffectGetDescriptor, diff --git a/media/libeffects/testlibs/EffectReverb.c b/media/libeffects/testlibs/EffectReverb.c index a87a834..c37f392 100644 --- a/media/libeffects/testlibs/EffectReverb.c +++ b/media/libeffects/testlibs/EffectReverb.c @@ -94,23 +94,6 @@ static const effect_descriptor_t * const gDescriptors[] = { /*--- Effect Library Interface Implementation ---*/ -int EffectQueryNumberEffects(uint32_t *pNumEffects) { - *pNumEffects = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *); - return 0; -} - -int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) { - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index >= sizeof(gDescriptors) / sizeof(const effect_descriptor_t *)) { - return -EINVAL; - } - memcpy(pDescriptor, gDescriptors[index], - sizeof(effect_descriptor_t)); - return 0; -} - int EffectCreate(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -2222,8 +2205,6 @@ audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { .version = EFFECT_LIBRARY_API_VERSION, .name = "Test Equalizer Library", .implementor = "The Android Open Source Project", - .query_num_effects = EffectQueryNumberEffects, - .query_effect = EffectQueryEffect, .create_effect = EffectCreate, .release_effect = EffectRelease, .get_descriptor = EffectGetDescriptor, diff --git a/media/libeffects/testlibs/EffectReverb.h b/media/libeffects/testlibs/EffectReverb.h index 1fb14a7..e5248fe 100644 --- a/media/libeffects/testlibs/EffectReverb.h +++ b/media/libeffects/testlibs/EffectReverb.h @@ -300,9 +300,6 @@ typedef struct reverb_module_s { * Effect API *------------------------------------ */ -int EffectQueryNumberEffects(uint32_t *pNumEffects); -int EffectQueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor); int EffectCreate(const effect_uuid_t *effectUID, int32_t sessionId, int32_t ioId, diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk index 76b5110..e196eb2 100644 --- a/media/libeffects/visualizer/Android.mk +++ b/media/libeffects/visualizer/Android.mk @@ -6,10 +6,11 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ EffectVisualizer.cpp -LOCAL_CFLAGS+= -O2 +LOCAL_CFLAGS+= -O2 -fvisibility=hidden LOCAL_SHARED_LIBRARIES := \ libcutils \ + liblog \ libdl LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/soundfx diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp index 44baf93..e7eccf1 100644 --- a/media/libeffects/visualizer/EffectVisualizer.cpp +++ b/media/libeffects/visualizer/EffectVisualizer.cpp @@ -177,23 +177,6 @@ int Visualizer_init(VisualizerContext *pContext) //--- Effect Library Interface Implementation // -int VisualizerLib_QueryNumberEffects(uint32_t *pNumEffects) { - *pNumEffects = 1; - return 0; -} - -int VisualizerLib_QueryEffect(uint32_t index, - effect_descriptor_t *pDescriptor) { - if (pDescriptor == NULL) { - return -EINVAL; - } - if (index > 0) { - return -EINVAL; - } - *pDescriptor = gVisualizerDescriptor; - return 0; -} - int VisualizerLib_Create(const effect_uuid_t *uuid, int32_t sessionId, int32_t ioId, @@ -574,14 +557,13 @@ const struct effect_interface_s gVisualizerInterface = { NULL, }; - +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { tag : AUDIO_EFFECT_LIBRARY_TAG, version : EFFECT_LIBRARY_API_VERSION, name : "Visualizer Library", implementor : "The Android Open Source Project", - query_num_effects : VisualizerLib_QueryNumberEffects, - query_effect : VisualizerLib_QueryEffect, create_effect : VisualizerLib_Create, release_effect : VisualizerLib_Release, get_descriptor : VisualizerLib_GetDescriptor, diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 54666fb..2c0c3a5 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -13,15 +13,19 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ AudioTrack.cpp \ + AudioTrackShared.cpp \ IAudioFlinger.cpp \ IAudioFlingerClient.cpp \ IAudioTrack.cpp \ IAudioRecord.cpp \ ICrypto.cpp \ + IDrm.cpp \ + IDrmClient.cpp \ IHDCP.cpp \ AudioRecord.cpp \ AudioSystem.cpp \ mediaplayer.cpp \ + IMediaLogService.cpp \ IMediaPlayerService.cpp \ IMediaPlayerClient.cpp \ IMediaRecorderClient.cpp \ @@ -51,10 +55,17 @@ LOCAL_SRC_FILES:= \ SoundPool.cpp \ SoundPoolThread.cpp +LOCAL_SRC_FILES += ../libnbaio/roundup.c + +# for <cutils/atomic-inline.h> +LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0) +LOCAL_SRC_FILES += SingleStateQueue.cpp +LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"' + LOCAL_SHARED_LIBRARIES := \ - libui libcutils libutils libbinder libsonivox libicuuc libexpat \ + libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \ libcamera_client libstagefright_foundation \ - libgui libdl libaudioutils libmedia_native + libgui libdl libaudioutils LOCAL_WHOLE_STATIC_LIBRARY := libmedia_helper diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp index 680604b..8dfffb3 100644 --- a/media/libmedia/AudioEffect.cpp +++ b/media/libmedia/AudioEffect.cpp @@ -127,7 +127,7 @@ status_t AudioEffect::set(const effect_uuid_t *type, mIEffectClient = new EffectClient(this); - iEffect = audioFlinger->createEffect(getpid(), &mDescriptor, + iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor, mIEffectClient, priority, io, mSessionId, &mStatus, &mId, &enabled); if (iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) { @@ -152,7 +152,8 @@ status_t AudioEffect::set(const effect_uuid_t *type, mCblk->buffer = (uint8_t *)mCblk + bufOffset; iEffect->asBinder()->linkToDeath(mIEffectClient); - ALOGV("set() %p OK effect: %s id: %d status %d enabled %d", this, mDescriptor.name, mId, mStatus, mEnabled); + ALOGV("set() %p OK effect: %s id: %d status %d enabled %d", this, mDescriptor.name, mId, + mStatus, mEnabled); return mStatus; } @@ -266,9 +267,11 @@ status_t AudioEffect::setParameter(effect_param_t *param) uint32_t size = sizeof(int); uint32_t psize = ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize; - ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data, (param->psize == 8) ? *((int *)param->data + 1): -1); + ALOGV("setParameter: param: %d, param2: %d", *(int *)param->data, + (param->psize == 8) ? *((int *)param->data + 1): -1); - return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size, ¶m->status); + return mIEffect->command(EFFECT_CMD_SET_PARAM, sizeof (effect_param_t) + psize, param, &size, + ¶m->status); } status_t AudioEffect::setParameterDeferred(effect_param_t *param) @@ -321,11 +324,14 @@ status_t AudioEffect::getParameter(effect_param_t *param) return BAD_VALUE; } - ALOGV("getParameter: param: %d, param2: %d", *(int *)param->data, (param->psize == 8) ? *((int *)param->data + 1): -1); + ALOGV("getParameter: param: %d, param2: %d", *(int *)param->data, + (param->psize == 8) ? *((int *)param->data + 1): -1); - uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + param->vsize; + uint32_t psize = sizeof(effect_param_t) + ((param->psize - 1) / sizeof(int) + 1) * sizeof(int) + + param->vsize; - return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param, &psize, param); + return mIEffect->command(EFFECT_CMD_GET_PARAM, sizeof(effect_param_t) + param->psize, param, + &psize, param); } @@ -346,7 +352,8 @@ void AudioEffect::binderDied() void AudioEffect::controlStatusChanged(bool controlGranted) { - ALOGV("controlStatusChanged %p control %d callback %p mUserData %p", this, controlGranted, mCbf, mUserData); + ALOGV("controlStatusChanged %p control %d callback %p mUserData %p", this, controlGranted, mCbf, + mUserData); if (controlGranted) { if (mStatus == ALREADY_EXISTS) { mStatus = NO_ERROR; diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 8ea6306..40ff1bf 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -36,7 +36,7 @@ namespace android { // static status_t AudioRecord::getMinFrameCount( - int* frameCount, + size_t* frameCount, uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask) @@ -47,14 +47,14 @@ status_t AudioRecord::getMinFrameCount( *frameCount = 0; size_t size = 0; - if (AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &size) - != NO_ERROR) { - ALOGE("AudioSystem could not query the input buffer size."); + status_t status = AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &size); + if (status != NO_ERROR) { + ALOGE("AudioSystem could not query the input buffer size; status %d", status); return NO_INIT; } if (size == 0) { - ALOGE("Unsupported configuration: sampleRate %d, format %d, channelMask %#x", + ALOGE("Unsupported configuration: sampleRate %u, format %d, channelMask %#x", sampleRate, format, channelMask); return BAD_VALUE; } @@ -63,7 +63,7 @@ status_t AudioRecord::getMinFrameCount( size <<= 1; if (audio_is_linear_pcm(format)) { - int channelCount = popcount(channelMask); + uint32_t channelCount = popcount(channelMask); size /= channelCount * audio_bytes_per_sample(format); } @@ -75,7 +75,8 @@ status_t AudioRecord::getMinFrameCount( AudioRecord::AudioRecord() : mStatus(NO_INIT), mSessionId(0), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { } @@ -90,10 +91,12 @@ AudioRecord::AudioRecord( int notificationFrames, int sessionId) : mStatus(NO_INIT), mSessionId(0), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousPriority(ANDROID_PRIORITY_NORMAL), + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { mStatus = set(inputSource, sampleRate, format, channelMask, - frameCount, cbf, user, notificationFrames, sessionId); + frameCount, cbf, user, notificationFrames, false /*threadCanCallJava*/, sessionId); } AudioRecord::~AudioRecord() @@ -112,6 +115,7 @@ AudioRecord::~AudioRecord() IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } + delete mProxy; } status_t AudioRecord::set( @@ -119,15 +123,22 @@ status_t AudioRecord::set( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + int frameCountInt, callback_t cbf, void* user, int notificationFrames, bool threadCanCallJava, int sessionId) { + // FIXME "int" here is legacy and will be replaced by size_t later + if (frameCountInt < 0) { + ALOGE("Invalid frame count %d", frameCountInt); + return BAD_VALUE; + } + size_t frameCount = frameCountInt; - ALOGV("set(): sampleRate %d, channelMask %#x, frameCount %d",sampleRate, channelMask, frameCount); + ALOGV("set(): sampleRate %u, channelMask %#x, frameCount %u", sampleRate, channelMask, + frameCount); AutoMutex lock(mLock); @@ -142,6 +153,8 @@ status_t AudioRecord::set( if (sampleRate == 0) { sampleRate = DEFAULT_SAMPLE_RATE; } + mSampleRate = sampleRate; + // these below should probably come from the audioFlinger too... if (format == AUDIO_FORMAT_DEFAULT) { format = AUDIO_FORMAT_PCM_16_BIT; @@ -151,12 +164,20 @@ status_t AudioRecord::set( ALOGE("Invalid format"); return BAD_VALUE; } + mFormat = format; if (!audio_is_input_channel(channelMask)) { return BAD_VALUE; } + mChannelMask = channelMask; + uint32_t channelCount = popcount(channelMask); + mChannelCount = channelCount; - int channelCount = popcount(channelMask); + if (audio_is_linear_pcm(format)) { + mFrameSize = channelCount * audio_bytes_per_sample(format); + } else { + mFrameSize = sizeof(uint8_t); + } if (sessionId == 0 ) { mSessionId = AudioSystem::newAudioSessionId(); @@ -176,7 +197,7 @@ status_t AudioRecord::set( } // validate framecount - int minFrameCount = 0; + size_t minFrameCount = 0; status_t status = getMinFrameCount(&minFrameCount, sampleRate, format, channelMask); if (status != NO_ERROR) { return status; @@ -194,8 +215,7 @@ status_t AudioRecord::set( } // create the IAudioRecord - status = openRecord_l(sampleRate, format, channelMask, - frameCount, input); + status = openRecord_l(sampleRate, format, frameCount, input); if (status != NO_ERROR) { return status; } @@ -207,11 +227,9 @@ status_t AudioRecord::set( mStatus = NO_ERROR; - mFormat = format; // Update buffer size in case it has been limited by AudioFlinger during track creation - mFrameCount = mCblk->frameCount; - mChannelCount = (uint8_t)channelCount; - mChannelMask = channelMask; + mFrameCount = mCblk->frameCount_; + mActive = false; mCbf = cbf; mNotificationFrames = notificationFrames; @@ -247,25 +265,16 @@ audio_format_t AudioRecord::format() const return mFormat; } -int AudioRecord::channelCount() const +uint32_t AudioRecord::channelCount() const { return mChannelCount; } -uint32_t AudioRecord::frameCount() const +size_t AudioRecord::frameCount() const { return mFrameCount; } -size_t AudioRecord::frameSize() const -{ - if (audio_is_linear_pcm(mFormat)) { - return channelCount()*audio_bytes_per_sample(mFormat); - } else { - return sizeof(uint8_t); - } -} - audio_source_t AudioRecord::inputSource() const { return mInputSource; @@ -291,17 +300,19 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession) mActive = true; cblk->lock.lock(); - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { cblk->lock.unlock(); ALOGV("mAudioRecord->start()"); ret = mAudioRecord->start(event, triggerSession); cblk->lock.lock(); if (ret == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); } } - if (cblk->flags & CBLK_INVALID_MSK) { - ret = restoreRecord_l(cblk); + if (cblk->flags & CBLK_INVALID) { + audio_track_cblk_t* temp = cblk; + ret = restoreRecord_l(temp); + cblk = temp; } cblk->lock.unlock(); if (ret == NO_ERROR) { @@ -355,7 +366,7 @@ bool AudioRecord::stopped() const uint32_t AudioRecord::getSampleRate() const { - return mCblk->sampleRate; + return mSampleRate; } status_t AudioRecord::setMarkerPosition(uint32_t marker) @@ -425,13 +436,13 @@ unsigned int AudioRecord::getInputFramesLost() const status_t AudioRecord::openRecord_l( uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, audio_io_handle_t input) { status_t status; const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger(); if (audioFlinger == 0) { + ALOGE("Could not get audioflinger"); return NO_INIT; } @@ -439,9 +450,9 @@ status_t AudioRecord::openRecord_l( // FIXME see similar logic at AudioTrack int originalSessionId = mSessionId; - sp<IAudioRecord> record = audioFlinger->openRecord(getpid(), input, + sp<IAudioRecord> record = audioFlinger->openRecord(input, sampleRate, format, - channelMask, + mChannelMask, frameCount, IAudioFlinger::TRACK_DEFAULT, tid, @@ -454,25 +465,32 @@ status_t AudioRecord::openRecord_l( ALOGE("AudioFlinger could not create record track, status: %d", status); return status; } - sp<IMemory> cblk = record->getCblk(); - if (cblk == 0) { + sp<IMemory> iMem = record->getCblk(); + if (iMem == 0) { ALOGE("Could not get control block"); return NO_INIT; } mAudioRecord.clear(); mAudioRecord = record; mCblkMemory.clear(); - mCblkMemory = cblk; - mCblk = static_cast<audio_track_cblk_t*>(cblk->pointer()); - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); - android_atomic_and(~CBLK_DIRECTION_MSK, &mCblk->flags); - mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - mCblk->waitTimeMs = 0; + mCblkMemory = iMem; + audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer()); + mCblk = cblk; + mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); + cblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + cblk->waitTimeMs = 0; + + // update proxy + delete mProxy; + mProxy = new AudioRecordClientProxy(cblk, mBuffers, frameCount, mFrameSize); + return NO_ERROR; } status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); bool active; status_t result = NO_ERROR; @@ -483,7 +501,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesReady = cblk->framesReady(); + size_t framesReady = mProxy->framesReady(); if (framesReady == 0) { cblk->lock.lock(); @@ -498,17 +516,22 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) cblk->lock.unlock(); return WOULD_BLOCK; } - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { mLock.unlock(); + // this condition is in shared memory, so if IAudioRecord and control block + // are replaced due to mediaserver death or IAudioRecord invalidation then + // cv won't be signalled, but fortunately the timeout will limit the wait result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); cblk->lock.unlock(); mLock.lock(); if (!mActive) { return status_t(STOPPED); } + // IAudioRecord may have been re-created while mLock was unlocked + cblk = mCblk; cblk->lock.lock(); } - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { goto create_new_record; } if (CC_UNLIKELY(result != NO_ERROR)) { @@ -521,9 +544,11 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); cblk->lock.lock(); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_record: - result = AudioRecord::restoreRecord_l(cblk); + audio_track_cblk_t* temp = cblk; + result = AudioRecord::restoreRecord_l(temp); + cblk = temp; } if (result != NO_ERROR) { ALOGW("obtainBuffer create Track error %d", result); @@ -539,7 +564,7 @@ create_new_record: } // read the server count again start_loop_here: - framesReady = cblk->framesReady(); + framesReady = mProxy->framesReady(); } cblk->lock.unlock(); } @@ -553,26 +578,25 @@ create_new_record: } uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + cblk->frameCount; + uint32_t bufferEnd = cblk->userBase + mFrameCount; if (framesReq > bufferEnd - u) { framesReq = bufferEnd - u; } - audioBuffer->flags = 0; - audioBuffer->channelCount= mChannelCount; - audioBuffer->format = mFormat; audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq*cblk->frameSize; - audioBuffer->raw = (int8_t*)cblk->buffer(u); + audioBuffer->size = framesReq * mFrameSize; + audioBuffer->raw = mProxy->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } void AudioRecord::releaseBuffer(Buffer* audioBuffer) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); - mCblk->stepUser(audioBuffer->frameCount); + (void) mProxy->stepUser(audioBuffer->frameCount); } audio_io_handle_t AudioRecord::getInput() const @@ -585,7 +609,7 @@ audio_io_handle_t AudioRecord::getInput() const audio_io_handle_t AudioRecord::getInput_l() { mInput = AudioSystem::getInput(mInputSource, - mCblk->sampleRate, + mSampleRate, mFormat, mChannelMask, mSessionId); @@ -631,10 +655,13 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize) status_t err = obtainBuffer(&audioBuffer, ((2 * MAX_RUN_TIMEOUT_MS) / WAIT_PERIOD_MS)); if (err < 0) { // out of buffers, return #bytes written - if (err == status_t(NO_MORE_BUFFERS)) + if (err == status_t(NO_MORE_BUFFERS)) { break; - if (err == status_t(TIMED_OUT)) - err = 0; + } + if (err == status_t(TIMED_OUT)) { + // return partial transfer count + return read; + } return ssize_t(err); } @@ -701,7 +728,8 @@ bool AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread) status_t err = obtainBuffer(&audioBuffer, 1); if (err < NO_ERROR) { if (err != TIMED_OUT) { - ALOGE_IF(err != status_t(NO_MORE_BUFFERS), "Error obtaining an audio buffer, giving up."); + ALOGE_IF(err != status_t(NO_MORE_BUFFERS), + "Error obtaining an audio buffer, giving up."); return false; } break; @@ -733,11 +761,11 @@ bool AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread) // Manage overrun callback - if (active && (cblk->framesAvailable() == 0)) { + if (active && (mProxy->framesAvailable() == 0)) { // The value of active is stale, but we are almost sure to be active here because // otherwise we would have exited when obtainBuffer returned STOPPED earlier. ALOGV("Overrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); - if (!(android_atomic_or(CBLK_UNDERRUN_ON, &cblk->flags) & CBLK_UNDERRUN_MSK)) { + if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_OVERRUN, mUserData, NULL); } } @@ -753,57 +781,40 @@ bool AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread) // must be called with mLock and cblk.lock held. Callers must also hold strong references on // the IAudioRecord and IMemory in case they are recreated here. // If the IAudioRecord is successfully restored, the cblk pointer is updated -status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& cblk) +status_t AudioRecord::restoreRecord_l(audio_track_cblk_t*& refCblk) { status_t result; - if (!(android_atomic_or(CBLK_RESTORING_ON, &cblk->flags) & CBLK_RESTORING_MSK)) { - ALOGW("dead IAudioRecord, creating a new one"); - // signal old cblk condition so that other threads waiting for available buffers stop - // waiting now - cblk->cv.broadcast(); - cblk->lock.unlock(); + audio_track_cblk_t* cblk = refCblk; + audio_track_cblk_t* newCblk = cblk; + ALOGW("dead IAudioRecord, creating a new one"); - // if the new IAudioRecord is created, openRecord_l() will modify the - // following member variables: mAudioRecord, mCblkMemory and mCblk. - // It will also delete the strong references on previous IAudioRecord and IMemory - result = openRecord_l(cblk->sampleRate, mFormat, mChannelMask, - mFrameCount, getInput_l()); - if (result == NO_ERROR) { - // callback thread or sync event hasn't changed - result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); - } - if (result != NO_ERROR) { - mActive = false; - } + // signal old cblk condition so that other threads waiting for available buffers stop + // waiting now + cblk->cv.broadcast(); + cblk->lock.unlock(); - // signal old cblk condition for other threads waiting for restore completion - android_atomic_or(CBLK_RESTORED_ON, &cblk->flags); - cblk->cv.broadcast(); - } else { - if (!(cblk->flags & CBLK_RESTORED_MSK)) { - ALOGW("dead IAudioRecord, waiting for a new one to be created"); - mLock.unlock(); - result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); - cblk->lock.unlock(); - mLock.lock(); - } else { - ALOGW("dead IAudioRecord, already restored"); - result = NO_ERROR; - cblk->lock.unlock(); - } - if (result != NO_ERROR || !mActive) { - result = status_t(STOPPED); - } + // if the new IAudioRecord is created, openRecord_l() will modify the + // following member variables: mAudioRecord, mCblkMemory and mCblk. + // It will also delete the strong references on previous IAudioRecord and IMemory + result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l()); + if (result == NO_ERROR) { + newCblk = mCblk; + // callback thread or sync event hasn't changed + result = mAudioRecord->start(AudioSystem::SYNC_EVENT_SAME, 0); } + if (result != NO_ERROR) { + mActive = false; + } + ALOGV("restoreRecord_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", - result, mActive, mCblk, cblk, mCblk->flags, cblk->flags); + result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); if (result == NO_ERROR) { // from now on we switch to the newly created cblk - cblk = mCblk; + refCblk = newCblk; } - cblk->lock.lock(); + newCblk->lock.lock(); ALOGW_IF(result != NO_ERROR, "restoreRecord_l() error %d", result); diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 207f96f..693df60 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -205,12 +205,7 @@ int AudioSystem::logToLinear(float volume) return volume ? 100 - int(dBConvertInverse * log(volume) + 0.5) : 0; } -// DEPRECATED -status_t AudioSystem::getOutputSamplingRate(int* samplingRate, int streamType) { - return getOutputSamplingRate(samplingRate, (audio_stream_type_t)streamType); -} - -status_t AudioSystem::getOutputSamplingRate(int* samplingRate, audio_stream_type_t streamType) +status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t streamType) { audio_io_handle_t output; @@ -228,7 +223,7 @@ status_t AudioSystem::getOutputSamplingRate(int* samplingRate, audio_stream_type status_t AudioSystem::getSamplingRate(audio_io_handle_t output, audio_stream_type_t streamType, - int* samplingRate) + uint32_t* samplingRate) { OutputDescriptor *outputDesc; @@ -246,17 +241,13 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output, gLock.unlock(); } - ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %d", streamType, output, *samplingRate); + ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %u", streamType, output, + *samplingRate); return NO_ERROR; } -// DEPRECATED -status_t AudioSystem::getOutputFrameCount(int* frameCount, int streamType) { - return getOutputFrameCount(frameCount, (audio_stream_type_t)streamType); -} - -status_t AudioSystem::getOutputFrameCount(int* frameCount, audio_stream_type_t streamType) +status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_t streamType) { audio_io_handle_t output; @@ -274,7 +265,7 @@ status_t AudioSystem::getOutputFrameCount(int* frameCount, audio_stream_type_t s status_t AudioSystem::getFrameCount(audio_io_handle_t output, audio_stream_type_t streamType, - int* frameCount) + size_t* frameCount) { OutputDescriptor *outputDesc; @@ -290,7 +281,8 @@ status_t AudioSystem::getFrameCount(audio_io_handle_t output, gLock.unlock(); } - ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output, *frameCount); + ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output, + *frameCount); return NO_ERROR; } @@ -369,7 +361,8 @@ status_t AudioSystem::setVoiceVolume(float value) return af->setVoiceVolume(value); } -status_t AudioSystem::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, audio_stream_type_t stream) +status_t AudioSystem::getRenderPosition(size_t *halFrames, size_t *dspFrames, + audio_stream_type_t stream) { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return PERMISSION_DENIED; @@ -381,7 +374,7 @@ status_t AudioSystem::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames return af->getRenderPosition(halFrames, dspFrames, getOutput(stream)); } -unsigned int AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) { +size_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); unsigned int result = 0; if (af == 0) return result; @@ -449,8 +442,10 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); - ALOGV("ioConfigChanged() new output samplingRate %d, format %d channels %#x frameCount %d latency %d", - outputDesc->samplingRate, outputDesc->format, outputDesc->channels, outputDesc->frameCount, outputDesc->latency); + ALOGV("ioConfigChanged() new output samplingRate %u, format %d channels %#x frameCount %u " + "latency %d", + outputDesc->samplingRate, outputDesc->format, outputDesc->channels, + outputDesc->frameCount, outputDesc->latency); } break; case OUTPUT_CLOSED: { if (gOutputs.indexOfKey(ioHandle) < 0) { @@ -471,7 +466,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle if (param2 == NULL) break; desc = (const OutputDescriptor *)param2; - ALOGV("ioConfigChanged() new config for output %d samplingRate %d, format %d channels %#x frameCount %d latency %d", + ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channels %#x " + "frameCount %d latency %d", ioHandle, desc->samplingRate, desc->format, desc->channels, desc->frameCount, desc->latency); OutputDescriptor *outputDesc = gOutputs.valueAt(index); @@ -510,7 +506,7 @@ sp<IAudioPolicyService> AudioSystem::gAudioPolicyService; sp<AudioSystem::AudioPolicyServiceClient> AudioSystem::gAudioPolicyServiceClient; -// establish binder interface to AudioFlinger service +// establish binder interface to AudioPolicy service const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service() { gLock.lock(); @@ -735,6 +731,16 @@ status_t AudioSystem::isStreamActive(audio_stream_type_t stream, bool* state, ui return NO_ERROR; } +status_t AudioSystem::isStreamActiveRemotely(audio_stream_type_t stream, bool* state, + uint32_t inPastMs) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + if (state == NULL) return BAD_VALUE; + *state = aps->isStreamActiveRemotely(stream, inPastMs); + return NO_ERROR; +} + status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) { const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); @@ -744,14 +750,14 @@ status_t AudioSystem::isSourceActive(audio_source_t stream, bool* state) return NO_ERROR; } -int32_t AudioSystem::getPrimaryOutputSamplingRate() +uint32_t AudioSystem::getPrimaryOutputSamplingRate() { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return 0; return af->getPrimaryOutputSamplingRate(); } -int32_t AudioSystem::getPrimaryOutputFrameCount() +size_t AudioSystem::getPrimaryOutputFrameCount() { const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); if (af == 0) return 0; diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index aec8c4a..7eeb4f8 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -50,11 +50,13 @@ namespace android { // static status_t AudioTrack::getMinFrameCount( - int* frameCount, + size_t* frameCount, audio_stream_type_t streamType, uint32_t sampleRate) { - if (frameCount == NULL) return BAD_VALUE; + if (frameCount == NULL) { + return BAD_VALUE; + } // default to 0 in case of error *frameCount = 0; @@ -65,11 +67,11 @@ status_t AudioTrack::getMinFrameCount( // audio_format_t format // audio_channel_mask_t channelMask // audio_output_flags_t flags - int afSampleRate; + uint32_t afSampleRate; if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) { return NO_INIT; } - int afFrameCount; + size_t afFrameCount; if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) { return NO_INIT; } @@ -95,7 +97,8 @@ AudioTrack::AudioTrack() : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { } @@ -113,35 +116,14 @@ AudioTrack::AudioTrack( : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { mStatus = set(streamType, sampleRate, format, channelMask, frameCount, flags, cbf, user, notificationFrames, 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId); } -// DEPRECATED -AudioTrack::AudioTrack( - int streamType, - uint32_t sampleRate, - int format, - int channelMask, - int frameCount, - uint32_t flags, - callback_t cbf, - void* user, - int notificationFrames, - int sessionId) - : mStatus(NO_INIT), - mIsTimed(false), - mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) -{ - mStatus = set((audio_stream_type_t)streamType, sampleRate, (audio_format_t)format, - (audio_channel_mask_t) channelMask, - frameCount, (audio_output_flags_t)flags, cbf, user, notificationFrames, - 0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId); -} - AudioTrack::AudioTrack( audio_stream_type_t streamType, uint32_t sampleRate, @@ -156,8 +138,14 @@ AudioTrack::AudioTrack( : mStatus(NO_INIT), mIsTimed(false), mPreviousPriority(ANDROID_PRIORITY_NORMAL), - mPreviousSchedulingGroup(SP_DEFAULT) + mPreviousSchedulingGroup(SP_DEFAULT), + mProxy(NULL) { + if (sharedBuffer == 0) { + ALOGE("sharedBuffer must be non-0"); + mStatus = BAD_VALUE; + return; + } mStatus = set(streamType, sampleRate, format, channelMask, 0 /*frameCount*/, flags, cbf, user, notificationFrames, sharedBuffer, false /*threadCanCallJava*/, sessionId); @@ -181,6 +169,7 @@ AudioTrack::~AudioTrack() IPCThreadState::self()->flushCommands(); AudioSystem::releaseAudioSessionId(mSessionId); } + delete mProxy; } status_t AudioTrack::set( @@ -188,7 +177,7 @@ status_t AudioTrack::set( uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + int frameCountInt, audio_output_flags_t flags, callback_t cbf, void* user, @@ -197,10 +186,17 @@ status_t AudioTrack::set( bool threadCanCallJava, int sessionId) { + // FIXME "int" here is legacy and will be replaced by size_t later + if (frameCountInt < 0) { + ALOGE("Invalid frame count %d", frameCountInt); + return BAD_VALUE; + } + size_t frameCount = frameCountInt; - ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), sharedBuffer->size()); + ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), + sharedBuffer->size()); - ALOGV("set() streamType %d frameCount %d flags %04x", streamType, frameCount, flags); + ALOGV("set() streamType %d frameCount %u flags %04x", streamType, frameCount, flags); AutoMutex lock(mLock); if (mAudioTrack != 0) { @@ -214,12 +210,13 @@ status_t AudioTrack::set( } if (sampleRate == 0) { - int afSampleRate; + uint32_t afSampleRate; if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) { return NO_INIT; } sampleRate = afSampleRate; } + mSampleRate = sampleRate; // these below should probably come from the audioFlinger too... if (format == AUDIO_FORMAT_DEFAULT) { @@ -256,7 +253,17 @@ status_t AudioTrack::set( ALOGE("Invalid channel mask %#x", channelMask); return BAD_VALUE; } + mChannelMask = channelMask; uint32_t channelCount = popcount(channelMask); + mChannelCount = channelCount; + + if (audio_is_linear_pcm(format)) { + mFrameSize = channelCount * audio_bytes_per_sample(format); + mFrameSizeAF = channelCount * sizeof(int16_t); + } else { + mFrameSize = sizeof(uint8_t); + mFrameSizeAF = sizeof(uint8_t); + } audio_io_handle_t output = AudioSystem::getOutput( streamType, @@ -272,6 +279,7 @@ status_t AudioTrack::set( mVolume[RIGHT] = 1.0f; mSendLevel = 0.0f; mFrameCount = frameCount; + mReqFrameCount = frameCount; mNotificationFramesReq = notificationFrames; mSessionId = sessionId; mAuxEffectId = 0; @@ -287,7 +295,6 @@ status_t AudioTrack::set( status_t status = createTrack_l(streamType, sampleRate, format, - channelMask, frameCount, flags, sharedBuffer, @@ -305,10 +312,8 @@ status_t AudioTrack::set( mStreamType = streamType; mFormat = format; - mChannelMask = channelMask; - mChannelCount = channelCount; + mSharedBuffer = sharedBuffer; - mMuted = false; mActive = false; mUserData = user; mLoopCount = 0; @@ -318,56 +323,9 @@ status_t AudioTrack::set( mUpdatePeriod = 0; mFlushed = false; AudioSystem::acquireAudioSessionId(mSessionId); - mRestoreStatus = NO_ERROR; return NO_ERROR; } -status_t AudioTrack::initCheck() const -{ - return mStatus; -} - -// ------------------------------------------------------------------------- - -uint32_t AudioTrack::latency() const -{ - return mLatency; -} - -audio_stream_type_t AudioTrack::streamType() const -{ - return mStreamType; -} - -audio_format_t AudioTrack::format() const -{ - return mFormat; -} - -int AudioTrack::channelCount() const -{ - return mChannelCount; -} - -uint32_t AudioTrack::frameCount() const -{ - return mCblk->frameCount; -} - -size_t AudioTrack::frameSize() const -{ - if (audio_is_linear_pcm(mFormat)) { - return channelCount()*audio_bytes_per_sample(mFormat); - } else { - return sizeof(uint8_t); - } -} - -sp<IMemory>& AudioTrack::sharedBuffer() -{ - return mSharedBuffer; -} - // ------------------------------------------------------------------------- void AudioTrack::start() @@ -390,7 +348,7 @@ void AudioTrack::start() cblk->lock.lock(); cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; cblk->waitTimeMs = 0; - android_atomic_and(~CBLK_DISABLED_ON, &cblk->flags); + android_atomic_and(~CBLK_DISABLED, &cblk->flags); if (t != 0) { t->resume(); } else { @@ -399,19 +357,21 @@ void AudioTrack::start() androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); } - ALOGV("start %p before lock cblk %p", this, mCblk); + ALOGV("start %p before lock cblk %p", this, cblk); status_t status = NO_ERROR; - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { cblk->lock.unlock(); ALOGV("mAudioTrack->start()"); status = mAudioTrack->start(); cblk->lock.lock(); if (status == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); } } - if (cblk->flags & CBLK_INVALID_MSK) { - status = restoreTrack_l(cblk, true); + if (cblk->flags & CBLK_INVALID) { + audio_track_cblk_t* temp = cblk; + status = restoreTrack_l(temp, true /*fromStart*/); + cblk = temp; } cblk->lock.unlock(); if (status != NO_ERROR) { @@ -447,6 +407,7 @@ void AudioTrack::stop() mMarkerReached = false; // Force flush if a shared buffer is used otherwise audioflinger // will not stop before end of buffer is reached. + // It may be needed to make sure that we stop playback, likely in case looping is on. if (mSharedBuffer != 0) { flush_l(); } @@ -469,26 +430,26 @@ bool AudioTrack::stopped() const void AudioTrack::flush() { AutoMutex lock(mLock); - flush_l(); + if (!mActive && mSharedBuffer == 0) { + flush_l(); + } } -// must be called with mLock held void AudioTrack::flush_l() { ALOGV("flush"); + ALOG_ASSERT(!mActive); // clear playback marker and periodic update counter mMarkerPosition = 0; mMarkerReached = false; mUpdatePeriod = 0; - if (!mActive) { - mFlushed = true; - mAudioTrack->flush(); - // Release AudioTrack callback thread in case it was waiting for new buffers - // in AudioTrack::obtainBuffer() - mCblk->cv.signal(); - } + mFlushed = true; + mAudioTrack->flush(); + // Release AudioTrack callback thread in case it was waiting for new buffers + // in AudioTrack::obtainBuffer() + mCblk->cv.signal(); } void AudioTrack::pause() @@ -502,19 +463,13 @@ void AudioTrack::pause() } } -void AudioTrack::mute(bool e) -{ - mAudioTrack->mute(e); - mMuted = e; -} - -bool AudioTrack::muted() const -{ - return mMuted; -} - status_t AudioTrack::setVolume(float left, float right) { + if (mStatus != NO_ERROR) { + return mStatus; + } + ALOG_ASSERT(mProxy != NULL); + if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) { return BAD_VALUE; } @@ -523,32 +478,32 @@ status_t AudioTrack::setVolume(float left, float right) mVolume[LEFT] = left; mVolume[RIGHT] = right; - mCblk->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000)); + mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000)); return NO_ERROR; } -void AudioTrack::getVolume(float* left, float* right) const +status_t AudioTrack::setVolume(float volume) { - if (left != NULL) { - *left = mVolume[LEFT]; - } - if (right != NULL) { - *right = mVolume[RIGHT]; - } + return setVolume(volume, volume); } status_t AudioTrack::setAuxEffectSendLevel(float level) { ALOGV("setAuxEffectSendLevel(%f)", level); + + if (mStatus != NO_ERROR) { + return mStatus; + } + ALOG_ASSERT(mProxy != NULL); + if (level < 0.0f || level > 1.0f) { return BAD_VALUE; } AutoMutex lock(mLock); mSendLevel = level; - - mCblk->setSendLevel(level); + mProxy->setSendLevel(level); return NO_ERROR; } @@ -560,9 +515,9 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const } } -status_t AudioTrack::setSampleRate(int rate) +status_t AudioTrack::setSampleRate(uint32_t rate) { - int afSamplingRate; + uint32_t afSamplingRate; if (mIsTimed) { return INVALID_OPERATION; @@ -572,21 +527,25 @@ status_t AudioTrack::setSampleRate(int rate) return NO_INIT; } // Resampler implementation limits input sampling rate to 2 x output sampling rate. - if (rate <= 0 || rate > afSamplingRate*2 ) return BAD_VALUE; + if (rate == 0 || rate > afSamplingRate*2 ) { + return BAD_VALUE; + } AutoMutex lock(mLock); - mCblk->sampleRate = rate; + mSampleRate = rate; + mProxy->setSampleRate(rate); + return NO_ERROR; } uint32_t AudioTrack::getSampleRate() const { if (mIsTimed) { - return INVALID_OPERATION; + return 0; } AutoMutex lock(mLock); - return mCblk->sampleRate; + return mSampleRate; } status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount) @@ -598,6 +557,10 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount // must be called with mLock held status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) { + if (mSharedBuffer == 0 || mIsTimed) { + return INVALID_OPERATION; + } + audio_track_cblk_t* cblk = mCblk; Mutex::Autolock _l(cblk->lock); @@ -610,20 +573,18 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou return NO_ERROR; } - if (mIsTimed) { - return INVALID_OPERATION; - } - if (loopStart >= loopEnd || - loopEnd - loopStart > cblk->frameCount || + loopEnd - loopStart > mFrameCount || cblk->server > loopStart) { - ALOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, user %d", loopStart, loopEnd, loopCount, cblk->frameCount, cblk->user); + ALOGE("setLoop invalid value: loopStart %d, loopEnd %d, loopCount %d, framecount %d, " + "user %d", loopStart, loopEnd, loopCount, mFrameCount, cblk->user); return BAD_VALUE; } - if ((mSharedBuffer != 0) && (loopEnd > cblk->frameCount)) { - ALOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, framecount %d", - loopStart, loopEnd, cblk->frameCount); + if ((mSharedBuffer != 0) && (loopEnd > mFrameCount)) { + ALOGE("setLoop invalid value: loop markers beyond data: loopStart %d, loopEnd %d, " + "framecount %d", + loopStart, loopEnd, mFrameCount); return BAD_VALUE; } @@ -637,7 +598,9 @@ status_t AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCou status_t AudioTrack::setMarkerPosition(uint32_t marker) { - if (mCbf == NULL) return INVALID_OPERATION; + if (mCbf == NULL) { + return INVALID_OPERATION; + } mMarkerPosition = marker; mMarkerReached = false; @@ -647,7 +610,9 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker) status_t AudioTrack::getMarkerPosition(uint32_t *marker) const { - if (marker == NULL) return BAD_VALUE; + if (marker == NULL) { + return BAD_VALUE; + } *marker = mMarkerPosition; @@ -656,7 +621,9 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) { - if (mCbf == NULL) return INVALID_OPERATION; + if (mCbf == NULL) { + return INVALID_OPERATION; + } uint32_t curPosition; getPosition(&curPosition); @@ -668,7 +635,9 @@ status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const { - if (updatePeriod == NULL) return BAD_VALUE; + if (updatePeriod == NULL) { + return BAD_VALUE; + } *updatePeriod = mUpdatePeriod; @@ -677,25 +646,34 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const status_t AudioTrack::setPosition(uint32_t position) { - if (mIsTimed) return INVALID_OPERATION; + if (mSharedBuffer == 0 || mIsTimed) { + return INVALID_OPERATION; + } AutoMutex lock(mLock); - if (!stopped_l()) return INVALID_OPERATION; + if (!stopped_l()) { + return INVALID_OPERATION; + } - Mutex::Autolock _l(mCblk->lock); + audio_track_cblk_t* cblk = mCblk; + Mutex::Autolock _l(cblk->lock); - if (position > mCblk->user) return BAD_VALUE; + if (position > cblk->user) { + return BAD_VALUE; + } - mCblk->server = position; - android_atomic_or(CBLK_FORCEREADY_ON, &mCblk->flags); + cblk->server = position; + android_atomic_or(CBLK_FORCEREADY, &cblk->flags); return NO_ERROR; } status_t AudioTrack::getPosition(uint32_t *position) { - if (position == NULL) return BAD_VALUE; + if (position == NULL) { + return BAD_VALUE; + } AutoMutex lock(mLock); *position = mFlushed ? 0 : mCblk->server; @@ -704,13 +682,24 @@ status_t AudioTrack::getPosition(uint32_t *position) status_t AudioTrack::reload() { + if (mStatus != NO_ERROR) { + return mStatus; + } + ALOG_ASSERT(mProxy != NULL); + + if (mSharedBuffer == 0 || mIsTimed) { + return INVALID_OPERATION; + } + AutoMutex lock(mLock); - if (!stopped_l()) return INVALID_OPERATION; + if (!stopped_l()) { + return INVALID_OPERATION; + } flush_l(); - mCblk->stepUser(mCblk->frameCount); + (void) mProxy->stepUser(mFrameCount); return NO_ERROR; } @@ -725,12 +714,7 @@ audio_io_handle_t AudioTrack::getOutput() audio_io_handle_t AudioTrack::getOutput_l() { return AudioSystem::getOutput(mStreamType, - mCblk->sampleRate, mFormat, mChannelMask, mFlags); -} - -int AudioTrack::getSessionId() const -{ - return mSessionId; + mSampleRate, mFormat, mChannelMask, mFlags); } status_t AudioTrack::attachAuxEffect(int effectId) @@ -750,8 +734,7 @@ status_t AudioTrack::createTrack_l( audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, audio_output_flags_t flags, const sp<IMemory>& sharedBuffer, audio_io_handle_t output) @@ -791,7 +774,7 @@ status_t AudioTrack::createTrack_l( // Same comment as below about ignoring frameCount parameter for set() frameCount = sharedBuffer->size(); } else if (frameCount == 0) { - int afFrameCount; + size_t afFrameCount; if (AudioSystem::getFrameCount(output, streamType, &afFrameCount) != NO_ERROR) { return NO_INIT; } @@ -800,17 +783,16 @@ status_t AudioTrack::createTrack_l( } else if (sharedBuffer != 0) { - // Ensure that buffer alignment matches channelCount - int channelCount = popcount(channelMask); + // Ensure that buffer alignment matches channel count // 8-bit data in shared memory is not currently supported by AudioFlinger size_t alignment = /* format == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2; - if (channelCount > 1) { + if (mChannelCount > 1) { // More than 2 channels does not require stronger alignment than stereo alignment <<= 1; } - if (((uint32_t)sharedBuffer->pointer() & (alignment - 1)) != 0) { - ALOGE("Invalid buffer alignment: address %p, channelCount %d", - sharedBuffer->pointer(), channelCount); + if (((size_t)sharedBuffer->pointer() & (alignment - 1)) != 0) { + ALOGE("Invalid buffer alignment: address %p, channel count %u", + sharedBuffer->pointer(), mChannelCount); return BAD_VALUE; } @@ -818,16 +800,16 @@ status_t AudioTrack::createTrack_l( // there's no frameCount parameter. // But when initializing a shared buffer AudioTrack via set(), // there _is_ a frameCount parameter. We silently ignore it. - frameCount = sharedBuffer->size()/channelCount/sizeof(int16_t); + frameCount = sharedBuffer->size()/mChannelCount/sizeof(int16_t); } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) { // FIXME move these calculations and associated checks to server - int afSampleRate; + uint32_t afSampleRate; if (AudioSystem::getSamplingRate(output, streamType, &afSampleRate) != NO_ERROR) { return NO_INIT; } - int afFrameCount; + size_t afFrameCount; if (AudioSystem::getFrameCount(output, streamType, &afFrameCount) != NO_ERROR) { return NO_INIT; } @@ -836,8 +818,8 @@ status_t AudioTrack::createTrack_l( uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); if (minBufCount < 2) minBufCount = 2; - int minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; - ALOGV("minFrameCount: %d, afFrameCount=%d, minBufCount=%d, sampleRate=%d, afSampleRate=%d" + size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate; + ALOGV("minFrameCount: %u, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u" ", afLatency=%d", minFrameCount, afFrameCount, minBufCount, sampleRate, afSampleRate, afLatency); @@ -849,7 +831,7 @@ status_t AudioTrack::createTrack_l( } // Make sure that application is notified with sufficient margin // before underrun - if (mNotificationFramesAct > (uint32_t)frameCount/2) { + if (mNotificationFramesAct > frameCount/2) { mNotificationFramesAct = frameCount/2; } if (frameCount < minFrameCount) { @@ -876,13 +858,14 @@ status_t AudioTrack::createTrack_l( } } - sp<IAudioTrack> track = audioFlinger->createTrack(getpid(), - streamType, + sp<IAudioTrack> track = audioFlinger->createTrack(streamType, sampleRate, - format, - channelMask, + // AudioFlinger only sees 16-bit PCM + format == AUDIO_FORMAT_PCM_8_BIT ? + AUDIO_FORMAT_PCM_16_BIT : format, + mChannelMask, frameCount, - trackFlags, + &trackFlags, sharedBuffer, output, tid, @@ -893,55 +876,76 @@ status_t AudioTrack::createTrack_l( ALOGE("AudioFlinger could not create track, status: %d", status); return status; } - sp<IMemory> cblk = track->getCblk(); - if (cblk == 0) { + sp<IMemory> iMem = track->getCblk(); + if (iMem == 0) { ALOGE("Could not get control block"); return NO_INIT; } mAudioTrack = track; - mCblkMemory = cblk; - mCblk = static_cast<audio_track_cblk_t*>(cblk->pointer()); - // old has the previous value of mCblk->flags before the "or" operation - int32_t old = android_atomic_or(CBLK_DIRECTION_OUT, &mCblk->flags); + mCblkMemory = iMem; + audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer()); + mCblk = cblk; + size_t temp = cblk->frameCount_; + if (temp < frameCount || (frameCount == 0 && temp == 0)) { + // In current design, AudioTrack client checks and ensures frame count validity before + // passing it to AudioFlinger so AudioFlinger should not return a different value except + // for fast track as it uses a special method of assigning frame count. + ALOGW("Requested frameCount %u but received frameCount %u", frameCount, temp); + } + frameCount = temp; + mAwaitBoost = false; if (flags & AUDIO_OUTPUT_FLAG_FAST) { - if (old & CBLK_FAST) { - ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", mCblk->frameCount); + if (trackFlags & IAudioFlinger::TRACK_FAST) { + ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount); + mAwaitBoost = true; } else { - ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", mCblk->frameCount); + ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount); // once denied, do not request again if IAudioTrack is re-created flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST); mFlags = flags; } if (sharedBuffer == 0) { - mNotificationFramesAct = mCblk->frameCount/2; + mNotificationFramesAct = frameCount/2; } } if (sharedBuffer == 0) { - mCblk->buffers = (char*)mCblk + sizeof(audio_track_cblk_t); + mBuffers = (char*)cblk + sizeof(audio_track_cblk_t); } else { - mCblk->buffers = sharedBuffer->pointer(); - // Force buffer full condition as data is already present in shared memory - mCblk->stepUser(mCblk->frameCount); + mBuffers = sharedBuffer->pointer(); } - mCblk->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | uint16_t(mVolume[LEFT] * 0x1000)); - mCblk->setSendLevel(mSendLevel); mAudioTrack->attachAuxEffect(mAuxEffectId); - mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; - mCblk->waitTimeMs = 0; + cblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; + cblk->waitTimeMs = 0; mRemainingFrames = mNotificationFramesAct; // FIXME don't believe this lie - mLatency = afLatency + (1000*mCblk->frameCount) / sampleRate; + mLatency = afLatency + (1000*frameCount) / sampleRate; + mFrameCount = frameCount; // If IAudioTrack is re-created, don't let the requested frameCount // decrease. This can confuse clients that cache frameCount(). - if (mCblk->frameCount > mFrameCount) { - mFrameCount = mCblk->frameCount; + if (frameCount > mReqFrameCount) { + mReqFrameCount = frameCount; + } + + // update proxy + delete mProxy; + mProxy = new AudioTrackClientProxy(cblk, mBuffers, frameCount, mFrameSizeAF); + mProxy->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) | + uint16_t(mVolume[LEFT] * 0x1000)); + mProxy->setSendLevel(mSendLevel); + mProxy->setSampleRate(mSampleRate); + if (sharedBuffer != 0) { + // Force buffer full condition as data is already present in shared memory + mProxy->stepUser(frameCount); } + return NO_ERROR; } status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); bool active; status_t result = NO_ERROR; @@ -952,10 +956,10 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) audioBuffer->frameCount = 0; audioBuffer->size = 0; - uint32_t framesAvail = cblk->framesAvailable(); + size_t framesAvail = mProxy->framesAvailable(); cblk->lock.lock(); - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { goto create_new_track; } cblk->lock.unlock(); @@ -974,18 +978,23 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) cblk->lock.unlock(); return WOULD_BLOCK; } - if (!(cblk->flags & CBLK_INVALID_MSK)) { + if (!(cblk->flags & CBLK_INVALID)) { mLock.unlock(); + // this condition is in shared memory, so if IAudioTrack and control block + // are replaced due to mediaserver death or IAudioTrack invalidation then + // cv won't be signalled, but fortunately the timeout will limit the wait result = cblk->cv.waitRelative(cblk->lock, milliseconds(waitTimeMs)); cblk->lock.unlock(); mLock.lock(); if (!mActive) { return status_t(STOPPED); } + // IAudioTrack may have been re-created while mLock was unlocked + cblk = mCblk; cblk->lock.lock(); } - if (cblk->flags & CBLK_INVALID_MSK) { + if (cblk->flags & CBLK_INVALID) { goto create_new_track; } if (CC_UNLIKELY(result != NO_ERROR)) { @@ -994,16 +1003,18 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) // timing out when a loop has been set and we have already written upto loop end // is a normal condition: no need to wake AudioFlinger up. if (cblk->user < cblk->loopEnd) { - ALOGW( "obtainBuffer timed out (is the CPU pegged?) %p name=%#x" - "user=%08x, server=%08x", this, cblk->mName, cblk->user, cblk->server); + ALOGW("obtainBuffer timed out (is the CPU pegged?) %p name=%#x user=%08x, " + "server=%08x", this, cblk->mName, cblk->user, cblk->server); //unlock cblk mutex before calling mAudioTrack->start() (see issue #1617140) cblk->lock.unlock(); result = mAudioTrack->start(); cblk->lock.lock(); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &cblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); create_new_track: - result = restoreTrack_l(cblk, false); + audio_track_cblk_t* temp = cblk; + result = restoreTrack_l(temp, false /*fromStart*/); + cblk = temp; } if (result != NO_ERROR) { ALOGW("obtainBuffer create Track error %d", result); @@ -1021,7 +1032,7 @@ create_new_track: } // read the server count again start_loop_here: - framesAvail = cblk->framesAvailable_l(); + framesAvail = mProxy->framesAvailable_l(); } cblk->lock.unlock(); } @@ -1033,35 +1044,31 @@ create_new_track: } uint32_t u = cblk->user; - uint32_t bufferEnd = cblk->userBase + cblk->frameCount; + uint32_t bufferEnd = cblk->userBase + mFrameCount; if (framesReq > bufferEnd - u) { framesReq = bufferEnd - u; } - audioBuffer->flags = mMuted ? Buffer::MUTE : 0; - audioBuffer->channelCount = mChannelCount; audioBuffer->frameCount = framesReq; - audioBuffer->size = framesReq * cblk->frameSize; - if (audio_is_linear_pcm(mFormat)) { - audioBuffer->format = AUDIO_FORMAT_PCM_16_BIT; - } else { - audioBuffer->format = mFormat; - } - audioBuffer->raw = (int8_t *)cblk->buffer(u); + audioBuffer->size = framesReq * mFrameSizeAF; + audioBuffer->raw = mProxy->buffer(u); active = mActive; return active ? status_t(NO_ERROR) : status_t(STOPPED); } void AudioTrack::releaseBuffer(Buffer* audioBuffer) { + ALOG_ASSERT(mStatus == NO_ERROR && mProxy != NULL); + AutoMutex lock(mLock); - mCblk->stepUser(audioBuffer->frameCount); + audio_track_cblk_t* cblk = mCblk; + (void) mProxy->stepUser(audioBuffer->frameCount); if (audioBuffer->frameCount > 0) { // restart track if it was disabled by audioflinger due to previous underrun - if (mActive && (mCblk->flags & CBLK_DISABLED_MSK)) { - android_atomic_and(~CBLK_DISABLED_ON, &mCblk->flags); - ALOGW("releaseBuffer() track %p name=%#x disabled, restarting", this, mCblk->mName); + if (mActive && (cblk->flags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &cblk->flags); + ALOGW("releaseBuffer() track %p name=%#x disabled, restarting", this, cblk->mName); mAudioTrack->start(); } } @@ -1072,8 +1079,9 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer) ssize_t AudioTrack::write(const void* buffer, size_t userSize) { - if (mSharedBuffer != 0) return INVALID_OPERATION; - if (mIsTimed) return INVALID_OPERATION; + if (mSharedBuffer != 0 || mIsTimed) { + return INVALID_OPERATION; + } if (ssize_t(userSize) < 0) { // Sanity-check: user is most-likely passing an error code, and it would @@ -1096,6 +1104,9 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) sp<IMemory> iMem = mCblkMemory; mLock.unlock(); + // since mLock is unlocked the IAudioTrack and shared memory may be re-created, + // so all cblk references might still refer to old shared memory, but that should be benign + ssize_t written = 0; const int8_t *src = (const int8_t *)buffer; Buffer audioBuffer; @@ -1107,8 +1118,9 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) status_t err = obtainBuffer(&audioBuffer, -1); if (err < 0) { // out of buffers, return #bytes written - if (err == status_t(NO_MORE_BUFFERS)) + if (err == status_t(NO_MORE_BUFFERS)) { break; + } return ssize_t(err); } @@ -1121,8 +1133,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize) } else { toWrite = audioBuffer.size; memcpy(audioBuffer.i8, src, toWrite); - src += toWrite; } + src += toWrite; userSize -= toWrite; written += toWrite; @@ -1140,27 +1152,37 @@ TimedAudioTrack::TimedAudioTrack() { status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp<IMemory>* buffer) { + AutoMutex lock(mLock); status_t result = UNKNOWN_ERROR; + // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed + // while we are accessing the cblk + sp<IAudioTrack> audioTrack = mAudioTrack; + sp<IMemory> iMem = mCblkMemory; + // If the track is not invalid already, try to allocate a buffer. alloc // fails indicating that the server is dead, flag the track as invalid so // we can attempt to restore in just a bit. - if (!(mCblk->flags & CBLK_INVALID_MSK)) { + audio_track_cblk_t* cblk = mCblk; + if (!(cblk->flags & CBLK_INVALID)) { result = mAudioTrack->allocateTimedBuffer(size, buffer); if (result == DEAD_OBJECT) { - android_atomic_or(CBLK_INVALID_ON, &mCblk->flags); + android_atomic_or(CBLK_INVALID, &cblk->flags); } } // If the track is invalid at this point, attempt to restore it. and try the // allocation one more time. - if (mCblk->flags & CBLK_INVALID_MSK) { - mCblk->lock.lock(); - result = restoreTrack_l(mCblk, false); - mCblk->lock.unlock(); + if (cblk->flags & CBLK_INVALID) { + cblk->lock.lock(); + audio_track_cblk_t* temp = cblk; + result = restoreTrack_l(temp, false /*fromStart*/); + cblk = temp; + cblk->lock.unlock(); - if (result == OK) + if (result == OK) { result = mAudioTrack->allocateTimedBuffer(size, buffer); + } } return result; @@ -1172,10 +1194,11 @@ status_t TimedAudioTrack::queueTimedBuffer(const sp<IMemory>& buffer, status_t status = mAudioTrack->queueTimedBuffer(buffer, pts); { AutoMutex lock(mLock); + audio_track_cblk_t* cblk = mCblk; // restart track if it was disabled by audioflinger due to previous underrun if (buffer->size() != 0 && status == NO_ERROR && - mActive && (mCblk->flags & CBLK_DISABLED_MSK)) { - android_atomic_and(~CBLK_DISABLED_ON, &mCblk->flags); + mActive && (cblk->flags & CBLK_DISABLED)) { + android_atomic_and(~CBLK_DISABLED, &cblk->flags); ALOGW("queueTimedBuffer() track %p disabled, restarting", this); mAudioTrack->start(); } @@ -1198,6 +1221,25 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) size_t writtenSize; mLock.lock(); + if (mAwaitBoost) { + mAwaitBoost = false; + mLock.unlock(); + static const int32_t kMaxTries = 5; + int32_t tryCounter = kMaxTries; + uint32_t pollUs = 10000; + do { + int policy = sched_getscheduler(0); + if (policy == SCHED_FIFO || policy == SCHED_RR) { + break; + } + usleep(pollUs); + pollUs <<= 1; + } while (tryCounter-- > 0); + if (tryCounter < 0) { + ALOGE("did not receive expected priority boost on time"); + } + return true; + } // acquire a strong reference on the IMemory and IAudioTrack so that they cannot be destroyed // while we are accessing the cblk sp<IAudioTrack> audioTrack = mAudioTrack; @@ -1206,15 +1248,20 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) bool active = mActive; mLock.unlock(); + // since mLock is unlocked the IAudioTrack and shared memory may be re-created, + // so all cblk references might still refer to old shared memory, but that should be benign + // Manage underrun callback - if (active && (cblk->framesAvailable() == cblk->frameCount)) { + if (active && (mProxy->framesAvailable() == mFrameCount)) { ALOGV("Underrun user: %x, server: %x, flags %04x", cblk->user, cblk->server, cblk->flags); - if (!(android_atomic_or(CBLK_UNDERRUN_ON, &cblk->flags) & CBLK_UNDERRUN_MSK)) { + if (!(android_atomic_or(CBLK_UNDERRUN, &cblk->flags) & CBLK_UNDERRUN)) { mCbf(EVENT_UNDERRUN, mUserData, 0); - if (cblk->server == cblk->frameCount) { + if (cblk->server == mFrameCount) { mCbf(EVENT_BUFFER_END, mUserData, 0); } - if (mSharedBuffer != 0) return false; + if (mSharedBuffer != 0) { + return false; + } } } @@ -1265,12 +1312,15 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) status_t err = obtainBuffer(&audioBuffer, waitCount); if (err < NO_ERROR) { if (err != TIMED_OUT) { - ALOGE_IF(err != status_t(NO_MORE_BUFFERS), "Error obtaining an audio buffer, giving up."); + ALOGE_IF(err != status_t(NO_MORE_BUFFERS), + "Error obtaining an audio buffer, giving up."); return false; } break; } - if (err == status_t(STOPPED)) return false; + if (err == status_t(STOPPED)) { + return false; + } // Divide buffer size by 2 to take into account the expansion // due to 8 to 16 bit conversion: the callback must fill only half @@ -1293,7 +1343,9 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) break; } - if (writtenSize > reqSize) writtenSize = reqSize; + if (writtenSize > reqSize) { + writtenSize = reqSize; + } if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { // 8 to 16 bit conversion, note that source and destination are the same address @@ -1302,10 +1354,10 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) } audioBuffer.size = writtenSize; - // NOTE: mCblk->frameSize is not equal to AudioTrack::frameSize() for - // 8 bit PCM data: in this case, mCblk->frameSize is based on a sample size of + // NOTE: cblk->frameSize is not equal to AudioTrack::frameSize() for + // 8 bit PCM data: in this case, cblk->frameSize is based on a sample size of // 16 bit. - audioBuffer.frameCount = writtenSize/mCblk->frameSize; + audioBuffer.frameCount = writtenSize / mFrameSizeAF; frames -= audioBuffer.frameCount; @@ -1321,112 +1373,93 @@ bool AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread) return true; } -// must be called with mLock and cblk.lock held. Callers must also hold strong references on +// must be called with mLock and refCblk.lock held. Callers must also hold strong references on // the IAudioTrack and IMemory in case they are recreated here. -// If the IAudioTrack is successfully restored, the cblk pointer is updated -status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& cblk, bool fromStart) +// If the IAudioTrack is successfully restored, the refCblk pointer is updated +// FIXME Don't depend on caller to hold strong references. +status_t AudioTrack::restoreTrack_l(audio_track_cblk_t*& refCblk, bool fromStart) { status_t result; - if (!(android_atomic_or(CBLK_RESTORING_ON, &cblk->flags) & CBLK_RESTORING_MSK)) { - ALOGW("dead IAudioTrack, creating a new one from %s TID %d", - fromStart ? "start()" : "obtainBuffer()", gettid()); + audio_track_cblk_t* cblk = refCblk; + audio_track_cblk_t* newCblk = cblk; + ALOGW("dead IAudioTrack, creating a new one from %s", + fromStart ? "start()" : "obtainBuffer()"); - // signal old cblk condition so that other threads waiting for available buffers stop - // waiting now - cblk->cv.broadcast(); - cblk->lock.unlock(); + // signal old cblk condition so that other threads waiting for available buffers stop + // waiting now + cblk->cv.broadcast(); + cblk->lock.unlock(); - // refresh the audio configuration cache in this process to make sure we get new - // output parameters in getOutput_l() and createTrack_l() - AudioSystem::clearAudioConfigCache(); - - // if the new IAudioTrack is created, createTrack_l() will modify the - // following member variables: mAudioTrack, mCblkMemory and mCblk. - // It will also delete the strong references on previous IAudioTrack and IMemory - result = createTrack_l(mStreamType, - cblk->sampleRate, - mFormat, - mChannelMask, - mFrameCount, - mFlags, - mSharedBuffer, - getOutput_l()); - - if (result == NO_ERROR) { - uint32_t user = cblk->user; - uint32_t server = cblk->server; - // restore write index and set other indexes to reflect empty buffer status - mCblk->user = user; - mCblk->server = user; - mCblk->userBase = user; - mCblk->serverBase = user; - // restore loop: this is not guaranteed to succeed if new frame count is not - // compatible with loop length - setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); - if (!fromStart) { - mCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - // Make sure that a client relying on callback events indicating underrun or - // the actual amount of audio frames played (e.g SoundPool) receives them. - if (mSharedBuffer == 0) { - uint32_t frames = 0; - if (user > server) { - frames = ((user - server) > mCblk->frameCount) ? - mCblk->frameCount : (user - server); - memset(mCblk->buffers, 0, frames * mCblk->frameSize); - } - // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY_ON, &mCblk->flags); - // stepUser() clears CBLK_UNDERRUN_ON flag enabling underrun callbacks to - // the client - mCblk->stepUser(frames); + // refresh the audio configuration cache in this process to make sure we get new + // output parameters in getOutput_l() and createTrack_l() + AudioSystem::clearAudioConfigCache(); + + // if the new IAudioTrack is created, createTrack_l() will modify the + // following member variables: mAudioTrack, mCblkMemory and mCblk. + // It will also delete the strong references on previous IAudioTrack and IMemory + result = createTrack_l(mStreamType, + mSampleRate, + mFormat, + mReqFrameCount, // so that frame count never goes down + mFlags, + mSharedBuffer, + getOutput_l()); + + if (result == NO_ERROR) { + uint32_t user = cblk->user; + uint32_t server = cblk->server; + // restore write index and set other indexes to reflect empty buffer status + newCblk = mCblk; + newCblk->user = user; + newCblk->server = user; + newCblk->userBase = user; + newCblk->serverBase = user; + // restore loop: this is not guaranteed to succeed if new frame count is not + // compatible with loop length + setLoop_l(cblk->loopStart, cblk->loopEnd, cblk->loopCount); + size_t frames = 0; + if (!fromStart) { + newCblk->bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + // Make sure that a client relying on callback events indicating underrun or + // the actual amount of audio frames played (e.g SoundPool) receives them. + if (mSharedBuffer == 0) { + if (user > server) { + frames = ((user - server) > mFrameCount) ? + mFrameCount : (user - server); + memset(mBuffers, 0, frames * mFrameSizeAF); } - } - if (mSharedBuffer != 0) { - mCblk->stepUser(mCblk->frameCount); - } - if (mActive) { - result = mAudioTrack->start(); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() start() failed status %d", result); - } - if (fromStart && result == NO_ERROR) { - mNewPosition = mCblk->server + mUpdatePeriod; + // restart playback even if buffer is not completely filled. + android_atomic_or(CBLK_FORCEREADY, &newCblk->flags); } } - if (result != NO_ERROR) { - android_atomic_and(~CBLK_RESTORING_ON, &cblk->flags); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() failed status %d", result); + if (mSharedBuffer != 0) { + frames = mFrameCount; } - mRestoreStatus = result; - // signal old cblk condition for other threads waiting for restore completion - android_atomic_or(CBLK_RESTORED_ON, &cblk->flags); - cblk->cv.broadcast(); - } else { - if (!(cblk->flags & CBLK_RESTORED_MSK)) { - ALOGW("dead IAudioTrack, waiting for a new one TID %d", gettid()); - mLock.unlock(); - result = cblk->cv.waitRelative(cblk->lock, milliseconds(RESTORE_TIMEOUT_MS)); - if (result == NO_ERROR) { - result = mRestoreStatus; - } - cblk->lock.unlock(); - mLock.lock(); - } else { - ALOGW("dead IAudioTrack, already restored TID %d", gettid()); - result = mRestoreStatus; - cblk->lock.unlock(); + if (frames > 0) { + // stepUser() clears CBLK_UNDERRUN flag enabling underrun callbacks to + // the client + mProxy->stepUser(frames); + } + if (mActive) { + result = mAudioTrack->start(); + ALOGW_IF(result != NO_ERROR, "restoreTrack_l() start() failed status %d", result); + } + if (fromStart && result == NO_ERROR) { + mNewPosition = newCblk->server + mUpdatePeriod; } } + ALOGW_IF(result != NO_ERROR, "restoreTrack_l() failed status %d", result); ALOGV("restoreTrack_l() status %d mActive %d cblk %p, old cblk %p flags %08x old flags %08x", - result, mActive, mCblk, cblk, mCblk->flags, cblk->flags); + result, mActive, newCblk, cblk, newCblk->flags, cblk->flags); if (result == NO_ERROR) { // from now on we switch to the newly created cblk - cblk = mCblk; + refCblk = newCblk; } - cblk->lock.lock(); + newCblk->lock.lock(); - ALOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d TID %d", result, gettid()); + ALOGW_IF(result != NO_ERROR, "restoreTrack_l() error %d", result); return result; } @@ -1439,11 +1472,13 @@ status_t AudioTrack::dump(int fd, const Vector<String16>& args) const String8 result; result.append(" AudioTrack::dump\n"); - snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType, mVolume[0], mVolume[1]); + snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType, + mVolume[0], mVolume[1]); result.append(buffer); - snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, mChannelCount, (mCblk == 0) ? 0 : mCblk->frameCount); + snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%d)\n", mFormat, + mChannelCount, mFrameCount); result.append(buffer); - snprintf(buffer, 255, " sample rate(%d), status(%d), muted(%d)\n", (mCblk == 0) ? 0 : mCblk->sampleRate, mStatus, mMuted); + snprintf(buffer, 255, " sample rate(%u), status(%d)\n", mSampleRate, mStatus); result.append(buffer); snprintf(buffer, 255, " active(%d), latency (%d)\n", mActive, mLatency); result.append(buffer); @@ -1500,182 +1535,4 @@ void AudioTrack::AudioTrackThread::resume() } } -// ========================================================================= - - -audio_track_cblk_t::audio_track_cblk_t() - : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), - userBase(0), serverBase(0), buffers(NULL), frameCount(0), - loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), - mSendLevel(0), flags(0) -{ -} - -uint32_t audio_track_cblk_t::stepUser(uint32_t frameCount) -{ - ALOGV("stepuser %08x %08x %d", user, server, frameCount); - - uint32_t u = user; - u += frameCount; - // Ensure that user is never ahead of server for AudioRecord - if (flags & CBLK_DIRECTION_MSK) { - // If stepServer() has been called once, switch to normal obtainBuffer() timeout period - if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { - bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; - } - } else if (u > server) { - ALOGW("stepUser occurred after track reset"); - u = server; - } - - uint32_t fc = this->frameCount; - if (u >= fc) { - // common case, user didn't just wrap - if (u - fc >= userBase ) { - userBase += fc; - } - } else if (u >= userBase + fc) { - // user just wrapped - userBase += fc; - } - - user = u; - - // Clear flow control error condition as new data has been written/read to/from buffer. - if (flags & CBLK_UNDERRUN_MSK) { - android_atomic_and(~CBLK_UNDERRUN_MSK, &flags); - } - - return u; -} - -bool audio_track_cblk_t::stepServer(uint32_t frameCount) -{ - ALOGV("stepserver %08x %08x %d", user, server, frameCount); - - if (!tryLock()) { - ALOGW("stepServer() could not lock cblk"); - return false; - } - - uint32_t s = server; - bool flushed = (s == user); - - s += frameCount; - if (flags & CBLK_DIRECTION_MSK) { - // Mark that we have read the first buffer so that next time stepUser() is called - // we switch to normal obtainBuffer() timeout period - if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { - bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1; - } - // It is possible that we receive a flush() - // while the mixer is processing a block: in this case, - // stepServer() is called After the flush() has reset u & s and - // we have s > u - if (flushed) { - ALOGW("stepServer occurred after track reset"); - s = user; - } - } - - if (s >= loopEnd) { - ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd); - s = loopStart; - if (--loopCount == 0) { - loopEnd = UINT_MAX; - loopStart = UINT_MAX; - } - } - - uint32_t fc = this->frameCount; - if (s >= fc) { - // common case, server didn't just wrap - if (s - fc >= serverBase ) { - serverBase += fc; - } - } else if (s >= serverBase + fc) { - // server just wrapped - serverBase += fc; - } - - server = s; - - if (!(flags & CBLK_INVALID_MSK)) { - cv.signal(); - } - lock.unlock(); - return true; -} - -void* audio_track_cblk_t::buffer(uint32_t offset) const -{ - return (int8_t *)buffers + (offset - userBase) * frameSize; -} - -uint32_t audio_track_cblk_t::framesAvailable() -{ - Mutex::Autolock _l(lock); - return framesAvailable_l(); -} - -uint32_t audio_track_cblk_t::framesAvailable_l() -{ - uint32_t u = user; - uint32_t s = server; - - if (flags & CBLK_DIRECTION_MSK) { - uint32_t limit = (s < loopStart) ? s : loopStart; - return limit + frameCount - u; - } else { - return frameCount + u - s; - } -} - -uint32_t audio_track_cblk_t::framesReady() -{ - uint32_t u = user; - uint32_t s = server; - - if (flags & CBLK_DIRECTION_MSK) { - if (u < loopEnd) { - return u - s; - } else { - // do not block on mutex shared with client on AudioFlinger side - if (!tryLock()) { - ALOGW("framesReady() could not lock cblk"); - return 0; - } - uint32_t frames = UINT_MAX; - if (loopCount >= 0) { - frames = (loopEnd - loopStart)*loopCount + u - s; - } - lock.unlock(); - return frames; - } - } else { - return s - u; - } -} - -bool audio_track_cblk_t::tryLock() -{ - // the code below simulates lock-with-timeout - // we MUST do this to protect the AudioFlinger server - // as this lock is shared with the client. - status_t err; - - err = lock.tryLock(); - if (err == -EBUSY) { // just wait a bit - usleep(1000); - err = lock.tryLock(); - } - if (err != NO_ERROR) { - // probably, the client just died. - return false; - } - return true; -} - -// ------------------------------------------------------------------------- - }; // namespace android diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp new file mode 100644 index 0000000..13d47c9 --- /dev/null +++ b/media/libmedia/AudioTrackShared.cpp @@ -0,0 +1,196 @@ +/* + * Copyright (C) 2007 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AudioTrackShared" +//#define LOG_NDEBUG 0 + +#include <private/media/AudioTrackShared.h> +#include <utils/Log.h> + +namespace android { + +audio_track_cblk_t::audio_track_cblk_t() + : lock(Mutex::SHARED), cv(Condition::SHARED), user(0), server(0), + userBase(0), serverBase(0), frameCount_(0), + loopStart(UINT_MAX), loopEnd(UINT_MAX), loopCount(0), mVolumeLR(0x10001000), + mSampleRate(0), mSendLevel(0), flags(0) +{ +} + +uint32_t audio_track_cblk_t::stepUser(size_t stepCount, size_t frameCount, bool isOut) +{ + ALOGV("stepuser %08x %08x %d", user, server, stepCount); + + uint32_t u = user; + u += stepCount; + // Ensure that user is never ahead of server for AudioRecord + if (isOut) { + // If stepServer() has been called once, switch to normal obtainBuffer() timeout period + if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS-1) { + bufferTimeoutMs = MAX_RUN_TIMEOUT_MS; + } + } else if (u > server) { + ALOGW("stepUser occurred after track reset"); + u = server; + } + + if (u >= frameCount) { + // common case, user didn't just wrap + if (u - frameCount >= userBase ) { + userBase += frameCount; + } + } else if (u >= userBase + frameCount) { + // user just wrapped + userBase += frameCount; + } + + user = u; + + // Clear flow control error condition as new data has been written/read to/from buffer. + if (flags & CBLK_UNDERRUN) { + android_atomic_and(~CBLK_UNDERRUN, &flags); + } + + return u; +} + +bool audio_track_cblk_t::stepServer(size_t stepCount, size_t frameCount, bool isOut) +{ + ALOGV("stepserver %08x %08x %d", user, server, stepCount); + + if (!tryLock()) { + ALOGW("stepServer() could not lock cblk"); + return false; + } + + uint32_t s = server; + bool flushed = (s == user); + + s += stepCount; + if (isOut) { + // Mark that we have read the first buffer so that next time stepUser() is called + // we switch to normal obtainBuffer() timeout period + if (bufferTimeoutMs == MAX_STARTUP_TIMEOUT_MS) { + bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS - 1; + } + // It is possible that we receive a flush() + // while the mixer is processing a block: in this case, + // stepServer() is called After the flush() has reset u & s and + // we have s > u + if (flushed) { + ALOGW("stepServer occurred after track reset"); + s = user; + } + } + + if (s >= loopEnd) { + ALOGW_IF(s > loopEnd, "stepServer: s %u > loopEnd %u", s, loopEnd); + s = loopStart; + if (--loopCount == 0) { + loopEnd = UINT_MAX; + loopStart = UINT_MAX; + } + } + + if (s >= frameCount) { + // common case, server didn't just wrap + if (s - frameCount >= serverBase ) { + serverBase += frameCount; + } + } else if (s >= serverBase + frameCount) { + // server just wrapped + serverBase += frameCount; + } + + server = s; + + if (!(flags & CBLK_INVALID)) { + cv.signal(); + } + lock.unlock(); + return true; +} + +void* audio_track_cblk_t::buffer(void *buffers, size_t frameSize, uint32_t offset) const +{ + return (int8_t *)buffers + (offset - userBase) * frameSize; +} + +uint32_t audio_track_cblk_t::framesAvailable(size_t frameCount, bool isOut) +{ + Mutex::Autolock _l(lock); + return framesAvailable_l(frameCount, isOut); +} + +uint32_t audio_track_cblk_t::framesAvailable_l(size_t frameCount, bool isOut) +{ + uint32_t u = user; + uint32_t s = server; + + if (isOut) { + uint32_t limit = (s < loopStart) ? s : loopStart; + return limit + frameCount - u; + } else { + return frameCount + u - s; + } +} + +uint32_t audio_track_cblk_t::framesReady(bool isOut) +{ + uint32_t u = user; + uint32_t s = server; + + if (isOut) { + if (u < loopEnd) { + return u - s; + } else { + // do not block on mutex shared with client on AudioFlinger side + if (!tryLock()) { + ALOGW("framesReady() could not lock cblk"); + return 0; + } + uint32_t frames = UINT_MAX; + if (loopCount >= 0) { + frames = (loopEnd - loopStart)*loopCount + u - s; + } + lock.unlock(); + return frames; + } + } else { + return s - u; + } +} + +bool audio_track_cblk_t::tryLock() +{ + // the code below simulates lock-with-timeout + // we MUST do this to protect the AudioFlinger server + // as this lock is shared with the client. + status_t err; + + err = lock.tryLock(); + if (err == -EBUSY) { // just wait a bit + usleep(1000); + err = lock.tryLock(); + } + if (err != NO_ERROR) { + // probably, the client just died. + return false; + } + return true; +} + +} // namespace android diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index ce8ffc4..2f18680 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -32,7 +32,7 @@ enum { CREATE_TRACK = IBinder::FIRST_CALL_TRANSACTION, OPEN_RECORD, SAMPLE_RATE, - CHANNEL_COUNT, // obsolete + RESERVED, // obsolete, was CHANNEL_COUNT FORMAT, FRAME_COUNT, LATENCY, @@ -84,13 +84,12 @@ public: } virtual sp<IAudioTrack> createTrack( - pid_t pid, audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, - track_flags_t flags, + size_t frameCount, + track_flags_t *flags, const sp<IMemory>& sharedBuffer, audio_io_handle_t output, pid_t tid, @@ -100,13 +99,13 @@ public: Parcel data, reply; sp<IAudioTrack> track; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(pid); data.writeInt32((int32_t) streamType); data.writeInt32(sampleRate); data.writeInt32(format); data.writeInt32(channelMask); data.writeInt32(frameCount); - data.writeInt32((int32_t) flags); + track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT; + data.writeInt32(lFlags); data.writeStrongBinder(sharedBuffer->asBinder()); data.writeInt32((int32_t) output); data.writeInt32((int32_t) tid); @@ -119,6 +118,10 @@ public: if (lStatus != NO_ERROR) { ALOGE("createTrack error: %s", strerror(-lStatus)); } else { + lFlags = reply.readInt32(); + if (flags != NULL) { + *flags = lFlags; + } lSessionId = reply.readInt32(); if (sessionId != NULL) { *sessionId = lSessionId; @@ -133,12 +136,11 @@ public: } virtual sp<IAudioRecord> openRecord( - pid_t pid, audio_io_handle_t input, uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, - int frameCount, + size_t frameCount, track_flags_t flags, pid_t tid, int *sessionId, @@ -147,7 +149,6 @@ public: Parcel data, reply; sp<IAudioRecord> record; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(pid); data.writeInt32((int32_t) input); data.writeInt32(sampleRate); data.writeInt32(format); @@ -186,17 +187,6 @@ public: return reply.readInt32(); } -#if 0 - virtual int channelCount(audio_io_handle_t output) const - { - Parcel data, reply; - data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32((int32_t) output); - remote()->transact(CHANNEL_COUNT, data, &reply); - return reply.readInt32(); - } -#endif - virtual audio_format_t format(audio_io_handle_t output) const { Parcel data, reply; @@ -501,7 +491,7 @@ public: return reply.readInt32(); } - virtual status_t getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames, + virtual status_t getRenderPosition(size_t *halFrames, size_t *dspFrames, audio_io_handle_t output) const { Parcel data, reply; @@ -522,7 +512,7 @@ public: return status; } - virtual unsigned int getInputFramesLost(audio_io_handle_t ioHandle) const + virtual size_t getInputFramesLost(audio_io_handle_t ioHandle) const { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); @@ -618,7 +608,7 @@ public: return NO_ERROR; } - virtual sp<IEffect> createEffect(pid_t pid, + virtual sp<IEffect> createEffect( effect_descriptor_t *pDesc, const sp<IEffectClient>& client, int32_t priority, @@ -639,7 +629,6 @@ public: } data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); - data.writeInt32(pid); data.write(pDesc, sizeof(effect_descriptor_t)); data.writeStrongBinder(client->asBinder()); data.writeInt32(priority); @@ -690,7 +679,7 @@ public: return (audio_module_handle_t) reply.readInt32(); } - virtual int32_t getPrimaryOutputSamplingRate() + virtual uint32_t getPrimaryOutputSamplingRate() { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); @@ -698,7 +687,7 @@ public: return reply.readInt32(); } - virtual int32_t getPrimaryOutputFrameCount() + virtual size_t getPrimaryOutputFrameCount() { Parcel data, reply; data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); @@ -718,21 +707,21 @@ status_t BnAudioFlinger::onTransact( switch (code) { case CREATE_TRACK: { CHECK_INTERFACE(IAudioFlinger, data, reply); - pid_t pid = data.readInt32(); int streamType = data.readInt32(); uint32_t sampleRate = data.readInt32(); audio_format_t format = (audio_format_t) data.readInt32(); audio_channel_mask_t channelMask = data.readInt32(); - size_t bufferCount = data.readInt32(); + size_t frameCount = data.readInt32(); track_flags_t flags = (track_flags_t) data.readInt32(); sp<IMemory> buffer = interface_cast<IMemory>(data.readStrongBinder()); audio_io_handle_t output = (audio_io_handle_t) data.readInt32(); pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); status_t status; - sp<IAudioTrack> track = createTrack(pid, + sp<IAudioTrack> track = createTrack( (audio_stream_type_t) streamType, sampleRate, format, - channelMask, bufferCount, flags, buffer, output, tid, &sessionId, &status); + channelMask, frameCount, &flags, buffer, output, tid, &sessionId, &status); + reply->writeInt32(flags); reply->writeInt32(sessionId); reply->writeInt32(status); reply->writeStrongBinder(track->asBinder()); @@ -740,18 +729,17 @@ status_t BnAudioFlinger::onTransact( } break; case OPEN_RECORD: { CHECK_INTERFACE(IAudioFlinger, data, reply); - pid_t pid = data.readInt32(); audio_io_handle_t input = (audio_io_handle_t) data.readInt32(); uint32_t sampleRate = data.readInt32(); audio_format_t format = (audio_format_t) data.readInt32(); audio_channel_mask_t channelMask = data.readInt32(); - size_t bufferCount = data.readInt32(); + size_t frameCount = data.readInt32(); track_flags_t flags = (track_flags_t) data.readInt32(); pid_t tid = (pid_t) data.readInt32(); int sessionId = data.readInt32(); status_t status; - sp<IAudioRecord> record = openRecord(pid, input, - sampleRate, format, channelMask, bufferCount, flags, tid, &sessionId, &status); + sp<IAudioRecord> record = openRecord(input, + sampleRate, format, channelMask, frameCount, flags, tid, &sessionId, &status); reply->writeInt32(sessionId); reply->writeInt32(status); reply->writeStrongBinder(record->asBinder()); @@ -762,13 +750,6 @@ status_t BnAudioFlinger::onTransact( reply->writeInt32( sampleRate((audio_io_handle_t) data.readInt32()) ); return NO_ERROR; } break; -#if 0 - case CHANNEL_COUNT: { - CHECK_INTERFACE(IAudioFlinger, data, reply); - reply->writeInt32( channelCount((audio_io_handle_t) data.readInt32()) ); - return NO_ERROR; - } break; -#endif case FORMAT: { CHECK_INTERFACE(IAudioFlinger, data, reply); reply->writeInt32( format((audio_io_handle_t) data.readInt32()) ); @@ -865,7 +846,8 @@ status_t BnAudioFlinger::onTransact( case REGISTER_CLIENT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>(data.readStrongBinder()); + sp<IAudioFlingerClient> client = interface_cast<IAudioFlingerClient>( + data.readStrongBinder()); registerClient(client); return NO_ERROR; } break; @@ -965,8 +947,8 @@ status_t BnAudioFlinger::onTransact( case GET_RENDER_POSITION: { CHECK_INTERFACE(IAudioFlinger, data, reply); audio_io_handle_t output = (audio_io_handle_t) data.readInt32(); - uint32_t halFrames; - uint32_t dspFrames; + size_t halFrames; + size_t dspFrames; status_t status = getRenderPosition(&halFrames, &dspFrames, output); reply->writeInt32(status); if (status == NO_ERROR) { @@ -1032,7 +1014,6 @@ status_t BnAudioFlinger::onTransact( } case CREATE_EFFECT: { CHECK_INTERFACE(IAudioFlinger, data, reply); - pid_t pid = data.readInt32(); effect_descriptor_t desc; data.read(&desc, sizeof(effect_descriptor_t)); sp<IEffectClient> client = interface_cast<IEffectClient>(data.readStrongBinder()); @@ -1043,7 +1024,8 @@ status_t BnAudioFlinger::onTransact( int id; int enabled; - sp<IEffect> effect = createEffect(pid, &desc, client, priority, output, sessionId, &status, &id, &enabled); + sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId, + &status, &id, &enabled); reply->writeInt32(status); reply->writeInt32(id); reply->writeInt32(enabled); diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 4178b29..2d1e0f8 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -50,7 +50,8 @@ public: ALOGV("ioConfigChanged stream %d", stream); data.writeInt32(stream); } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { - const AudioSystem::OutputDescriptor *desc = (const AudioSystem::OutputDescriptor *)param2; + const AudioSystem::OutputDescriptor *desc = + (const AudioSystem::OutputDescriptor *)param2; data.writeInt32(desc->samplingRate); data.writeInt32(desc->format); data.writeInt32(desc->channels); diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index 401437c..109044d 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -23,6 +23,7 @@ #include <binder/Parcel.h> +#include <media/AudioEffect.h> #include <media/IAudioPolicyService.h> #include <system/audio.h> @@ -55,7 +56,8 @@ enum { IS_SOURCE_ACTIVE, GET_DEVICES_FOR_STREAM, QUERY_DEFAULT_PRE_PROCESSING, - SET_EFFECT_ENABLED + SET_EFFECT_ENABLED, + IS_STREAM_ACTIVE_REMOTELY }; class BpAudioPolicyService : public BpInterface<IAudioPolicyService> @@ -330,6 +332,16 @@ public: return reply.readInt32(); } + virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs) const + { + Parcel data, reply; + data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); + data.writeInt32((int32_t) stream); + data.writeInt32(inPastMs); + remote()->transact(IS_STREAM_ACTIVE_REMOTELY, data, &reply); + return reply.readInt32(); + } + virtual bool isSourceActive(audio_source_t source) const { Parcel data, reply; @@ -399,13 +411,15 @@ status_t BnAudioPolicyService::onTransact( case SET_PHONE_STATE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); - reply->writeInt32(static_cast <uint32_t>(setPhoneState((audio_mode_t) data.readInt32()))); + reply->writeInt32(static_cast <uint32_t>(setPhoneState( + (audio_mode_t) data.readInt32()))); return NO_ERROR; } break; case SET_FORCE_USE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); - audio_policy_force_use_t usage = static_cast <audio_policy_force_use_t>(data.readInt32()); + audio_policy_force_use_t usage = static_cast <audio_policy_force_use_t>( + data.readInt32()); audio_policy_forced_cfg_t config = static_cast <audio_policy_forced_cfg_t>(data.readInt32()); reply->writeInt32(static_cast <uint32_t>(setForceUse(usage, config))); @@ -414,7 +428,8 @@ status_t BnAudioPolicyService::onTransact( case GET_FORCE_USE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); - audio_policy_force_use_t usage = static_cast <audio_policy_force_use_t>(data.readInt32()); + audio_policy_force_use_t usage = static_cast <audio_policy_force_use_t>( + data.readInt32()); reply->writeInt32(static_cast <uint32_t>(getForceUse(usage))); return NO_ERROR; } break; @@ -602,6 +617,14 @@ status_t BnAudioPolicyService::onTransact( return NO_ERROR; } break; + case IS_STREAM_ACTIVE_REMOTELY: { + CHECK_INTERFACE(IAudioPolicyService, data, reply); + audio_stream_type_t stream = (audio_stream_type_t) data.readInt32(); + uint32_t inPastMs = (uint32_t)data.readInt32(); + reply->writeInt32( isStreamActiveRemotely((audio_stream_type_t) stream, inPastMs) ); + return NO_ERROR; + } break; + case IS_SOURCE_ACTIVE: { CHECK_INTERFACE(IAudioPolicyService, data, reply); audio_source_t source = (audio_source_t) data.readInt32(); @@ -613,16 +636,18 @@ status_t BnAudioPolicyService::onTransact( CHECK_INTERFACE(IAudioPolicyService, data, reply); int audioSession = data.readInt32(); uint32_t count = data.readInt32(); + if (count > AudioEffect::kMaxPreProcessing) { + count = AudioEffect::kMaxPreProcessing; + } uint32_t retCount = count; - effect_descriptor_t *descriptors = - (effect_descriptor_t *)new char[count * sizeof(effect_descriptor_t)]; + effect_descriptor_t *descriptors = new effect_descriptor_t[count]; status_t status = queryDefaultPreProcessing(audioSession, descriptors, &retCount); reply->writeInt32(status); if (status != NO_ERROR && status != NO_MEMORY) { retCount = 0; } reply->writeInt32(retCount); - if (retCount) { + if (retCount != 0) { if (retCount < count) { count = retCount; } diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index 867d1a5..e92f8aa 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -33,7 +33,7 @@ enum { START, STOP, FLUSH, - MUTE, + RESERVED, // was MUTE PAUSE, ATTACH_AUX_EFFECT, ALLOCATE_TIMED_BUFFER, @@ -88,14 +88,6 @@ public: remote()->transact(FLUSH, data, &reply); } - virtual void mute(bool e) - { - Parcel data, reply; - data.writeInterfaceToken(IAudioTrack::getInterfaceDescriptor()); - data.writeInt32(e); - remote()->transact(MUTE, data, &reply); - } - virtual void pause() { Parcel data, reply; @@ -192,11 +184,6 @@ status_t BnAudioTrack::onTransact( flush(); return NO_ERROR; } break; - case MUTE: { - CHECK_INTERFACE(IAudioTrack, data, reply); - mute( data.readInt32() ); - return NO_ERROR; - } break; case PAUSE: { CHECK_INTERFACE(IAudioTrack, data, reply); pause(); diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp index 2defc2d..98b183a 100644 --- a/media/libmedia/ICrypto.cpp +++ b/media/libmedia/ICrypto.cpp @@ -48,7 +48,7 @@ struct BpCrypto : public BpInterface<ICrypto> { return reply.readInt32(); } - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) const { + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) { Parcel data, reply; data.writeInterfaceToken(ICrypto::getInterfaceDescriptor()); data.write(uuid, 16); diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp new file mode 100644 index 0000000..902aeb2 --- /dev/null +++ b/media/libmedia/IDrm.cpp @@ -0,0 +1,739 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IDrm" +#include <utils/Log.h> + +#include <binder/Parcel.h> +#include <media/IDrm.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AString.h> + +namespace android { + +enum { + INIT_CHECK = IBinder::FIRST_CALL_TRANSACTION, + IS_CRYPTO_SUPPORTED, + CREATE_PLUGIN, + DESTROY_PLUGIN, + OPEN_SESSION, + CLOSE_SESSION, + GET_KEY_REQUEST, + PROVIDE_KEY_RESPONSE, + REMOVE_KEYS, + RESTORE_KEYS, + QUERY_KEY_STATUS, + GET_PROVISION_REQUEST, + PROVIDE_PROVISION_RESPONSE, + GET_SECURE_STOPS, + RELEASE_SECURE_STOPS, + GET_PROPERTY_STRING, + GET_PROPERTY_BYTE_ARRAY, + SET_PROPERTY_STRING, + SET_PROPERTY_BYTE_ARRAY, + SET_CIPHER_ALGORITHM, + SET_MAC_ALGORITHM, + ENCRYPT, + DECRYPT, + SIGN, + VERIFY, + SET_LISTENER +}; + +struct BpDrm : public BpInterface<IDrm> { + BpDrm(const sp<IBinder> &impl) + : BpInterface<IDrm>(impl) { + } + + virtual status_t initCheck() const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + remote()->transact(INIT_CHECK, data, &reply); + + return reply.readInt32(); + } + + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + data.write(uuid, 16); + remote()->transact(IS_CRYPTO_SUPPORTED, data, &reply); + + return reply.readInt32() != 0; + } + + virtual status_t createPlugin(const uint8_t uuid[16]) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + data.write(uuid, 16); + + remote()->transact(CREATE_PLUGIN, data, &reply); + + return reply.readInt32(); + } + + virtual status_t destroyPlugin() { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + remote()->transact(DESTROY_PLUGIN, data, &reply); + + return reply.readInt32(); + } + + virtual status_t openSession(Vector<uint8_t> &sessionId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + remote()->transact(OPEN_SESSION, data, &reply); + readVector(reply, sessionId); + + return reply.readInt32(); + } + + virtual status_t closeSession(Vector<uint8_t> const &sessionId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + remote()->transact(CLOSE_SESSION, data, &reply); + + return reply.readInt32(); + } + + virtual status_t + getKeyRequest(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector<String8, String8> const &optionalParameters, + Vector<uint8_t> &request, String8 &defaultUrl) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, initData); + data.writeString8(mimeType); + data.writeInt32((uint32_t)keyType); + + data.writeInt32(optionalParameters.size()); + for (size_t i = 0; i < optionalParameters.size(); ++i) { + data.writeString8(optionalParameters.keyAt(i)); + data.writeString8(optionalParameters.valueAt(i)); + } + remote()->transact(GET_KEY_REQUEST, data, &reply); + + readVector(reply, request); + defaultUrl = reply.readString8(); + + return reply.readInt32(); + } + + virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &response, + Vector<uint8_t> &keySetId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + writeVector(data, sessionId); + writeVector(data, response); + remote()->transact(PROVIDE_KEY_RESPONSE, data, &reply); + readVector(reply, keySetId); + + return reply.readInt32(); + } + + virtual status_t removeKeys(Vector<uint8_t> const &keySetId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, keySetId); + remote()->transact(REMOVE_KEYS, data, &reply); + + return reply.readInt32(); + } + + virtual status_t restoreKeys(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keySetId) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keySetId); + remote()->transact(RESTORE_KEYS, data, &reply); + + return reply.readInt32(); + } + + virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId, + KeyedVector<String8, String8> &infoMap) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + remote()->transact(QUERY_KEY_STATUS, data, &reply); + + infoMap.clear(); + size_t count = reply.readInt32(); + for (size_t i = 0; i < count; i++) { + String8 key = reply.readString8(); + String8 value = reply.readString8(); + infoMap.add(key, value); + } + return reply.readInt32(); + } + + virtual status_t getProvisionRequest(Vector<uint8_t> &request, + String8 &defaultUrl) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + remote()->transact(GET_PROVISION_REQUEST, data, &reply); + + readVector(reply, request); + defaultUrl = reply.readString8(); + + return reply.readInt32(); + } + + virtual status_t provideProvisionResponse(Vector<uint8_t> const &response) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, response); + remote()->transact(PROVIDE_PROVISION_RESPONSE, data, &reply); + + return reply.readInt32(); + } + + virtual status_t getSecureStops(List<Vector<uint8_t> > &secureStops) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + remote()->transact(GET_SECURE_STOPS, data, &reply); + + secureStops.clear(); + uint32_t count = reply.readInt32(); + for (size_t i = 0; i < count; i++) { + Vector<uint8_t> secureStop; + readVector(reply, secureStop); + secureStops.push_back(secureStop); + } + return reply.readInt32(); + } + + virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, ssRelease); + remote()->transact(RELEASE_SECURE_STOPS, data, &reply); + + return reply.readInt32(); + } + + virtual status_t getPropertyString(String8 const &name, String8 &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + remote()->transact(GET_PROPERTY_STRING, data, &reply); + + value = reply.readString8(); + return reply.readInt32(); + } + + virtual status_t getPropertyByteArray(String8 const &name, Vector<uint8_t> &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + remote()->transact(GET_PROPERTY_BYTE_ARRAY, data, &reply); + + readVector(reply, value); + return reply.readInt32(); + } + + virtual status_t setPropertyString(String8 const &name, String8 const &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + data.writeString8(value); + remote()->transact(SET_PROPERTY_STRING, data, &reply); + + return reply.readInt32(); + } + + virtual status_t setPropertyByteArray(String8 const &name, + Vector<uint8_t> const &value) const { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + data.writeString8(name); + writeVector(data, value); + remote()->transact(SET_PROPERTY_BYTE_ARRAY, data, &reply); + + return reply.readInt32(); + } + + + virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId, + String8 const &algorithm) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + data.writeString8(algorithm); + remote()->transact(SET_CIPHER_ALGORITHM, data, &reply); + return reply.readInt32(); + } + + virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId, + String8 const &algorithm) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + data.writeString8(algorithm); + remote()->transact(SET_MAC_ALGORITHM, data, &reply); + return reply.readInt32(); + } + + virtual status_t encrypt(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &input, + Vector<uint8_t> const &iv, + Vector<uint8_t> &output) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, input); + writeVector(data, iv); + + remote()->transact(ENCRYPT, data, &reply); + readVector(reply, output); + + return reply.readInt32(); + } + + virtual status_t decrypt(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &input, + Vector<uint8_t> const &iv, + Vector<uint8_t> &output) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, input); + writeVector(data, iv); + + remote()->transact(DECRYPT, data, &reply); + readVector(reply, output); + + return reply.readInt32(); + } + + virtual status_t sign(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &message, + Vector<uint8_t> &signature) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, message); + + remote()->transact(SIGN, data, &reply); + readVector(reply, signature); + + return reply.readInt32(); + } + + virtual status_t verify(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &message, + Vector<uint8_t> const &signature, + bool &match) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + + writeVector(data, sessionId); + writeVector(data, keyId); + writeVector(data, message); + writeVector(data, signature); + + remote()->transact(VERIFY, data, &reply); + match = (bool)reply.readInt32(); + return reply.readInt32(); + } + + virtual status_t setListener(const sp<IDrmClient>& listener) { + Parcel data, reply; + data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); + data.writeStrongBinder(listener->asBinder()); + remote()->transact(SET_LISTENER, data, &reply); + return reply.readInt32(); + } + +private: + void readVector(Parcel &reply, Vector<uint8_t> &vector) const { + uint32_t size = reply.readInt32(); + vector.insertAt((size_t)0, size); + reply.read(vector.editArray(), size); + } + + void writeVector(Parcel &data, Vector<uint8_t> const &vector) const { + data.writeInt32(vector.size()); + data.write(vector.array(), vector.size()); + } + + DISALLOW_EVIL_CONSTRUCTORS(BpDrm); +}; + +IMPLEMENT_META_INTERFACE(Drm, "android.drm.IDrm"); + +//////////////////////////////////////////////////////////////////////////////// + +void BnDrm::readVector(const Parcel &data, Vector<uint8_t> &vector) const { + uint32_t size = data.readInt32(); + vector.insertAt((size_t)0, size); + data.read(vector.editArray(), size); +} + +void BnDrm::writeVector(Parcel *reply, Vector<uint8_t> const &vector) const { + reply->writeInt32(vector.size()); + reply->write(vector.array(), vector.size()); +} + +status_t BnDrm::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case INIT_CHECK: + { + CHECK_INTERFACE(IDrm, data, reply); + reply->writeInt32(initCheck()); + return OK; + } + + case IS_CRYPTO_SUPPORTED: + { + CHECK_INTERFACE(IDrm, data, reply); + uint8_t uuid[16]; + data.read(uuid, sizeof(uuid)); + reply->writeInt32(isCryptoSchemeSupported(uuid)); + return OK; + } + + case CREATE_PLUGIN: + { + CHECK_INTERFACE(IDrm, data, reply); + uint8_t uuid[16]; + data.read(uuid, sizeof(uuid)); + reply->writeInt32(createPlugin(uuid)); + return OK; + } + + case DESTROY_PLUGIN: + { + CHECK_INTERFACE(IDrm, data, reply); + reply->writeInt32(destroyPlugin()); + return OK; + } + + case OPEN_SESSION: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId; + status_t result = openSession(sessionId); + writeVector(reply, sessionId); + reply->writeInt32(result); + return OK; + } + + case CLOSE_SESSION: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId; + readVector(data, sessionId); + reply->writeInt32(closeSession(sessionId)); + return OK; + } + + case GET_KEY_REQUEST: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, initData; + + readVector(data, sessionId); + readVector(data, initData); + String8 mimeType = data.readString8(); + DrmPlugin::KeyType keyType = (DrmPlugin::KeyType)data.readInt32(); + + KeyedVector<String8, String8> optionalParameters; + uint32_t count = data.readInt32(); + for (size_t i = 0; i < count; ++i) { + String8 key, value; + key = data.readString8(); + value = data.readString8(); + optionalParameters.add(key, value); + } + + Vector<uint8_t> request; + String8 defaultUrl; + + status_t result = getKeyRequest(sessionId, initData, + mimeType, keyType, + optionalParameters, + request, defaultUrl); + writeVector(reply, request); + reply->writeString8(defaultUrl); + reply->writeInt32(result); + return OK; + } + + case PROVIDE_KEY_RESPONSE: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, response, keySetId; + readVector(data, sessionId); + readVector(data, response); + uint32_t result = provideKeyResponse(sessionId, response, keySetId); + writeVector(reply, keySetId); + reply->writeInt32(result); + return OK; + } + + case REMOVE_KEYS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> keySetId; + readVector(data, keySetId); + reply->writeInt32(removeKeys(keySetId)); + return OK; + } + + case RESTORE_KEYS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, keySetId; + readVector(data, sessionId); + readVector(data, keySetId); + reply->writeInt32(restoreKeys(sessionId, keySetId)); + return OK; + } + + case QUERY_KEY_STATUS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId; + readVector(data, sessionId); + KeyedVector<String8, String8> infoMap; + status_t result = queryKeyStatus(sessionId, infoMap); + size_t count = infoMap.size(); + reply->writeInt32(count); + for (size_t i = 0; i < count; ++i) { + reply->writeString8(infoMap.keyAt(i)); + reply->writeString8(infoMap.valueAt(i)); + } + reply->writeInt32(result); + return OK; + } + + case GET_PROVISION_REQUEST: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> request; + String8 defaultUrl; + status_t result = getProvisionRequest(request, defaultUrl); + writeVector(reply, request); + reply->writeString8(defaultUrl); + reply->writeInt32(result); + return OK; + } + + case PROVIDE_PROVISION_RESPONSE: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> response; + readVector(data, response); + reply->writeInt32(provideProvisionResponse(response)); + return OK; + } + + case GET_SECURE_STOPS: + { + CHECK_INTERFACE(IDrm, data, reply); + List<Vector<uint8_t> > secureStops; + status_t result = getSecureStops(secureStops); + size_t count = secureStops.size(); + reply->writeInt32(count); + List<Vector<uint8_t> >::iterator iter = secureStops.begin(); + while(iter != secureStops.end()) { + size_t size = iter->size(); + reply->writeInt32(size); + reply->write(iter->array(), iter->size()); + iter++; + } + reply->writeInt32(result); + return OK; + } + + case RELEASE_SECURE_STOPS: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> ssRelease; + readVector(data, ssRelease); + reply->writeInt32(releaseSecureStops(ssRelease)); + return OK; + } + + case GET_PROPERTY_STRING: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + String8 value; + status_t result = getPropertyString(name, value); + reply->writeString8(value); + reply->writeInt32(result); + return OK; + } + + case GET_PROPERTY_BYTE_ARRAY: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + Vector<uint8_t> value; + status_t result = getPropertyByteArray(name, value); + writeVector(reply, value); + reply->writeInt32(result); + return OK; + } + + case SET_PROPERTY_STRING: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + String8 value = data.readString8(); + reply->writeInt32(setPropertyString(name, value)); + return OK; + } + + case SET_PROPERTY_BYTE_ARRAY: + { + CHECK_INTERFACE(IDrm, data, reply); + String8 name = data.readString8(); + Vector<uint8_t> value; + readVector(data, value); + reply->writeInt32(setPropertyByteArray(name, value)); + return OK; + } + + case SET_CIPHER_ALGORITHM: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId; + readVector(data, sessionId); + String8 algorithm = data.readString8(); + reply->writeInt32(setCipherAlgorithm(sessionId, algorithm)); + return OK; + } + + case SET_MAC_ALGORITHM: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId; + readVector(data, sessionId); + String8 algorithm = data.readString8(); + reply->writeInt32(setMacAlgorithm(sessionId, algorithm)); + return OK; + } + + case ENCRYPT: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, keyId, input, iv, output; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, input); + readVector(data, iv); + uint32_t result = encrypt(sessionId, keyId, input, iv, output); + writeVector(reply, output); + reply->writeInt32(result); + return OK; + } + + case DECRYPT: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, keyId, input, iv, output; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, input); + readVector(data, iv); + uint32_t result = decrypt(sessionId, keyId, input, iv, output); + writeVector(reply, output); + reply->writeInt32(result); + return OK; + } + + case SIGN: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, keyId, message, signature; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, message); + uint32_t result = sign(sessionId, keyId, message, signature); + writeVector(reply, signature); + reply->writeInt32(result); + return OK; + } + + case VERIFY: + { + CHECK_INTERFACE(IDrm, data, reply); + Vector<uint8_t> sessionId, keyId, message, signature; + readVector(data, sessionId); + readVector(data, keyId); + readVector(data, message); + readVector(data, signature); + bool match; + uint32_t result = verify(sessionId, keyId, message, signature, match); + reply->writeInt32(match); + reply->writeInt32(result); + return OK; + } + + case SET_LISTENER: { + CHECK_INTERFACE(IDrm, data, reply); + sp<IDrmClient> listener = + interface_cast<IDrmClient>(data.readStrongBinder()); + reply->writeInt32(setListener(listener)); + return NO_ERROR; + } break; + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +} // namespace android + diff --git a/media/libmedia/IDrmClient.cpp b/media/libmedia/IDrmClient.cpp new file mode 100644 index 0000000..f50715e --- /dev/null +++ b/media/libmedia/IDrmClient.cpp @@ -0,0 +1,81 @@ +/* +** +** Copyright 2013, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IDrmClient" +#include <utils/Log.h> + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> + +#include <media/IMediaPlayerClient.h> +#include <media/IDrmClient.h> + +namespace android { + +enum { + NOTIFY = IBinder::FIRST_CALL_TRANSACTION, +}; + +class BpDrmClient: public BpInterface<IDrmClient> +{ +public: + BpDrmClient(const sp<IBinder>& impl) + : BpInterface<IDrmClient>(impl) + { + } + + virtual void notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) + { + Parcel data, reply; + data.writeInterfaceToken(IDrmClient::getInterfaceDescriptor()); + data.writeInt32((int)eventType); + data.writeInt32(extra); + if (obj && obj->dataSize() > 0) { + data.appendFrom(const_cast<Parcel *>(obj), 0, obj->dataSize()); + } + remote()->transact(NOTIFY, data, &reply, IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(DrmClient, "android.media.IDrmClient"); + +// ---------------------------------------------------------------------- + +status_t BnDrmClient::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch (code) { + case NOTIFY: { + CHECK_INTERFACE(IDrmClient, data, reply); + int eventType = data.readInt32(); + int extra = data.readInt32(); + Parcel obj; + if (data.dataAvail() > 0) { + obj.appendFrom(const_cast<Parcel *>(&data), data.dataPosition(), data.dataAvail()); + } + + notify((DrmPlugin::EventType)eventType, extra, &obj); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +}; // namespace android diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp index 493f5a4..f13addc 100644 --- a/media/libmedia/IHDCP.cpp +++ b/media/libmedia/IHDCP.cpp @@ -31,6 +31,7 @@ enum { HDCP_INIT_ASYNC, HDCP_SHUTDOWN_ASYNC, HDCP_ENCRYPT, + HDCP_DECRYPT, }; struct BpHDCPObserver : public BpInterface<IHDCPObserver> { @@ -106,6 +107,29 @@ struct BpHDCP : public BpInterface<IHDCP> { return err; } + + virtual status_t decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t inputCTR, + void *outData) { + Parcel data, reply; + data.writeInterfaceToken(IHDCP::getInterfaceDescriptor()); + data.writeInt32(size); + data.write(inData, size); + data.writeInt32(streamCTR); + data.writeInt64(inputCTR); + remote()->transact(HDCP_DECRYPT, data, &reply); + + status_t err = reply.readInt32(); + + if (err != OK) { + return err; + } + + reply.read(outData, size); + + return err; + } }; IMPLEMENT_META_INTERFACE(HDCP, "android.hardware.IHDCP"); @@ -198,6 +222,31 @@ status_t BnHDCP::onTransact( return OK; } + case HDCP_DECRYPT: + { + size_t size = data.readInt32(); + + void *inData = malloc(2 * size); + void *outData = (uint8_t *)inData + size; + + data.read(inData, size); + + uint32_t streamCTR = data.readInt32(); + uint64_t inputCTR = data.readInt64(); + status_t err = decrypt(inData, size, streamCTR, inputCTR, outData); + + reply->writeInt32(err); + + if (err == OK) { + reply->write(outData, size); + } + + free(inData); + inData = outData = NULL; + + return OK; + } + default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/media/libmedia/IMediaLogService.cpp b/media/libmedia/IMediaLogService.cpp new file mode 100644 index 0000000..33239a7 --- /dev/null +++ b/media/libmedia/IMediaLogService.cpp @@ -0,0 +1,94 @@ +/* +** +** Copyright 2007, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "IMediaLogService" +//#define LOG_NDEBUG 0 + +#include <utils/Log.h> +#include <stdint.h> +#include <sys/types.h> +#include <binder/Parcel.h> +#include <media/IMediaLogService.h> + +namespace android { + +enum { + REGISTER_WRITER = IBinder::FIRST_CALL_TRANSACTION, + UNREGISTER_WRITER, +}; + +class BpMediaLogService : public BpInterface<IMediaLogService> +{ +public: + BpMediaLogService(const sp<IBinder>& impl) + : BpInterface<IMediaLogService>(impl) + { + } + + virtual void registerWriter(const sp<IMemory>& shared, size_t size, const char *name) { + Parcel data, reply; + data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor()); + data.writeStrongBinder(shared->asBinder()); + data.writeInt32((int32_t) size); + data.writeCString(name); + status_t status = remote()->transact(REGISTER_WRITER, data, &reply); + // FIXME ignores status + } + + virtual void unregisterWriter(const sp<IMemory>& shared) { + Parcel data, reply; + data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor()); + data.writeStrongBinder(shared->asBinder()); + status_t status = remote()->transact(UNREGISTER_WRITER, data, &reply); + // FIXME ignores status + } + +}; + +IMPLEMENT_META_INTERFACE(MediaLogService, "android.media.IMediaLogService"); + +// ---------------------------------------------------------------------- + +status_t BnMediaLogService::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch (code) { + + case REGISTER_WRITER: { + CHECK_INTERFACE(IMediaLogService, data, reply); + sp<IMemory> shared = interface_cast<IMemory>(data.readStrongBinder()); + size_t size = (size_t) data.readInt32(); + const char *name = data.readCString(); + registerWriter(shared, size, name); + return NO_ERROR; + } + + case UNREGISTER_WRITER: { + CHECK_INTERFACE(IMediaLogService, data, reply); + sp<IMemory> shared = interface_cast<IMemory>(data.readStrongBinder()); + unregisterWriter(shared); + return NO_ERROR; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp index cb07766..e79bcd2 100644 --- a/media/libmedia/IMediaPlayer.cpp +++ b/media/libmedia/IMediaPlayer.cpp @@ -24,7 +24,7 @@ #include <media/IMediaPlayer.h> #include <media/IStreamSource.h> -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> #include <utils/String8.h> namespace android { @@ -113,12 +113,12 @@ public: return reply.readInt32(); } - // pass the buffered ISurfaceTexture to the media player service - status_t setVideoSurfaceTexture(const sp<ISurfaceTexture>& surfaceTexture) + // pass the buffered IGraphicBufferProducer to the media player service + status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); - sp<IBinder> b(surfaceTexture->asBinder()); + sp<IBinder> b(bufferProducer->asBinder()); data.writeStrongBinder(b); remote()->transact(SET_VIDEO_SURFACETEXTURE, data, &reply); return reply.readInt32(); @@ -383,9 +383,9 @@ status_t BnMediaPlayer::onTransact( } case SET_VIDEO_SURFACETEXTURE: { CHECK_INTERFACE(IMediaPlayer, data, reply); - sp<ISurfaceTexture> surfaceTexture = - interface_cast<ISurfaceTexture>(data.readStrongBinder()); - reply->writeInt32(setVideoSurfaceTexture(surfaceTexture)); + sp<IGraphicBufferProducer> bufferProducer = + interface_cast<IGraphicBufferProducer>(data.readStrongBinder()); + reply->writeInt32(setVideoSurfaceTexture(bufferProducer)); return NO_ERROR; } break; case PREPARE_ASYNC: { diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index c0a0260..74f574d 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -21,6 +21,7 @@ #include <binder/Parcel.h> #include <binder/IMemory.h> #include <media/ICrypto.h> +#include <media/IDrm.h> #include <media/IHDCP.h> #include <media/IMediaPlayerService.h> #include <media/IMediaRecorder.h> @@ -42,10 +43,12 @@ enum { CREATE_METADATA_RETRIEVER, GET_OMX, MAKE_CRYPTO, + MAKE_DRM, MAKE_HDCP, ADD_BATTERY_DATA, PULL_BATTERY_DATA, LISTEN_FOR_REMOTE_DISPLAY, + UPDATE_PROXY_CONFIG, }; class BpMediaPlayerService: public BpInterface<IMediaPlayerService> @@ -56,20 +59,18 @@ public: { } - virtual sp<IMediaMetadataRetriever> createMetadataRetriever(pid_t pid) + virtual sp<IMediaMetadataRetriever> createMetadataRetriever() { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); - data.writeInt32(pid); remote()->transact(CREATE_METADATA_RETRIEVER, data, &reply); return interface_cast<IMediaMetadataRetriever>(reply.readStrongBinder()); } virtual sp<IMediaPlayer> create( - pid_t pid, const sp<IMediaPlayerClient>& client, int audioSessionId) { + const sp<IMediaPlayerClient>& client, int audioSessionId) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); - data.writeInt32(pid); data.writeStrongBinder(client->asBinder()); data.writeInt32(audioSessionId); @@ -77,11 +78,10 @@ public: return interface_cast<IMediaPlayer>(reply.readStrongBinder()); } - virtual sp<IMediaRecorder> createMediaRecorder(pid_t pid) + virtual sp<IMediaRecorder> createMediaRecorder() { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); - data.writeInt32(pid); remote()->transact(CREATE_MEDIA_RECORDER, data, &reply); return interface_cast<IMediaRecorder>(reply.readStrongBinder()); } @@ -126,9 +126,17 @@ public: return interface_cast<ICrypto>(reply.readStrongBinder()); } - virtual sp<IHDCP> makeHDCP() { + virtual sp<IDrm> makeDrm() { Parcel data, reply; data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + remote()->transact(MAKE_DRM, data, &reply); + return interface_cast<IDrm>(reply.readStrongBinder()); + } + + virtual sp<IHDCP> makeHDCP(bool createEncryptionModule) { + Parcel data, reply; + data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + data.writeInt32(createEncryptionModule); remote()->transact(MAKE_HDCP, data, &reply); return interface_cast<IHDCP>(reply.readStrongBinder()); } @@ -156,6 +164,25 @@ public: remote()->transact(LISTEN_FOR_REMOTE_DISPLAY, data, &reply); return interface_cast<IRemoteDisplay>(reply.readStrongBinder()); } + + virtual status_t updateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + Parcel data, reply; + + data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + if (host == NULL) { + data.writeInt32(0); + } else { + data.writeInt32(1); + data.writeCString(host); + data.writeInt32(port); + data.writeCString(exclusionList); + } + + remote()->transact(UPDATE_PROXY_CONFIG, data, &reply); + + return reply.readInt32(); + } }; IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService"); @@ -168,11 +195,10 @@ status_t BnMediaPlayerService::onTransact( switch (code) { case CREATE: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - pid_t pid = data.readInt32(); sp<IMediaPlayerClient> client = interface_cast<IMediaPlayerClient>(data.readStrongBinder()); int audioSessionId = data.readInt32(); - sp<IMediaPlayer> player = create(pid, client, audioSessionId); + sp<IMediaPlayer> player = create(client, audioSessionId); reply->writeStrongBinder(player->asBinder()); return NO_ERROR; } break; @@ -206,15 +232,13 @@ status_t BnMediaPlayerService::onTransact( } break; case CREATE_MEDIA_RECORDER: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - pid_t pid = data.readInt32(); - sp<IMediaRecorder> recorder = createMediaRecorder(pid); + sp<IMediaRecorder> recorder = createMediaRecorder(); reply->writeStrongBinder(recorder->asBinder()); return NO_ERROR; } break; case CREATE_METADATA_RETRIEVER: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - pid_t pid = data.readInt32(); - sp<IMediaMetadataRetriever> retriever = createMetadataRetriever(pid); + sp<IMediaMetadataRetriever> retriever = createMetadataRetriever(); reply->writeStrongBinder(retriever->asBinder()); return NO_ERROR; } break; @@ -230,9 +254,16 @@ status_t BnMediaPlayerService::onTransact( reply->writeStrongBinder(crypto->asBinder()); return NO_ERROR; } break; + case MAKE_DRM: { + CHECK_INTERFACE(IMediaPlayerService, data, reply); + sp<IDrm> drm = makeDrm(); + reply->writeStrongBinder(drm->asBinder()); + return NO_ERROR; + } break; case MAKE_HDCP: { CHECK_INTERFACE(IMediaPlayerService, data, reply); - sp<IHDCP> hdcp = makeHDCP(); + bool createEncryptionModule = data.readInt32(); + sp<IHDCP> hdcp = makeHDCP(createEncryptionModule); reply->writeStrongBinder(hdcp->asBinder()); return NO_ERROR; } break; @@ -256,6 +287,24 @@ status_t BnMediaPlayerService::onTransact( reply->writeStrongBinder(display->asBinder()); return NO_ERROR; } break; + case UPDATE_PROXY_CONFIG: + { + CHECK_INTERFACE(IMediaPlayerService, data, reply); + + const char *host = NULL; + int32_t port = 0; + const char *exclusionList = NULL; + + if (data.readInt32()) { + host = data.readCString(); + port = data.readInt32(); + exclusionList = data.readCString(); + } + + reply->writeInt32(updateProxyConfig(host, port, exclusionList)); + + return OK; + } default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index a710fd7..8e58162 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -23,7 +23,7 @@ #include <media/IMediaRecorderClient.h> #include <media/IMediaRecorder.h> #include <gui/Surface.h> -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> #include <unistd.h> @@ -51,7 +51,8 @@ enum { SET_PARAMETERS, SET_PREVIEW_SURFACE, SET_CAMERA, - SET_LISTENER + SET_LISTENER, + SET_CLIENT_NAME }; class BpMediaRecorder: public BpInterface<IMediaRecorder> @@ -73,7 +74,7 @@ public: return reply.readInt32(); } - sp<ISurfaceTexture> querySurfaceMediaSource() + sp<IGraphicBufferProducer> querySurfaceMediaSource() { ALOGV("Query SurfaceMediaSource"); Parcel data, reply; @@ -83,15 +84,15 @@ public: if (returnedNull) { return NULL; } - return interface_cast<ISurfaceTexture>(reply.readStrongBinder()); + return interface_cast<IGraphicBufferProducer>(reply.readStrongBinder()); } - status_t setPreviewSurface(const sp<Surface>& surface) + status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface) { ALOGV("setPreviewSurface(%p)", surface.get()); Parcel data, reply; data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); - Surface::writeToParcel(surface, &data); + data.writeStrongBinder(surface->asBinder()); remote()->transact(SET_PREVIEW_SURFACE, data, &reply); return reply.readInt32(); } @@ -217,6 +218,16 @@ public: return reply.readInt32(); } + status_t setClientName(const String16& clientName) + { + ALOGV("setClientName(%s)", String8(clientName).string()); + Parcel data, reply; + data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); + data.writeString16(clientName); + remote()->transact(SET_CLIENT_NAME, data, &reply); + return reply.readInt32(); + } + status_t prepare() { ALOGV("prepare"); @@ -423,10 +434,16 @@ status_t BnMediaRecorder::onTransact( reply->writeInt32(setListener(listener)); return NO_ERROR; } break; + case SET_CLIENT_NAME: { + ALOGV("SET_CLIENT_NAME"); + CHECK_INTERFACE(IMediaRecorder, data, reply); + reply->writeInt32(setClientName(data.readString16())); + return NO_ERROR; + } case SET_PREVIEW_SURFACE: { ALOGV("SET_PREVIEW_SURFACE"); CHECK_INTERFACE(IMediaRecorder, data, reply); - sp<Surface> surface = Surface::readFromParcel(data); + sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>(data.readStrongBinder()); reply->writeInt32(setPreviewSurface(surface)); return NO_ERROR; } break; @@ -444,7 +461,7 @@ status_t BnMediaRecorder::onTransact( CHECK_INTERFACE(IMediaRecorder, data, reply); // call the mediaserver side to create // a surfacemediasource - sp<ISurfaceTexture> surfaceMediaSource = querySurfaceMediaSource(); + sp<IGraphicBufferProducer> surfaceMediaSource = querySurfaceMediaSource(); // The mediaserver might have failed to create a source int returnedNull= (surfaceMediaSource == NULL) ? 1 : 0 ; reply->writeInt32(returnedNull); diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index 48e427a..d6cd43a 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -40,6 +40,8 @@ enum { ENABLE_GRAPHIC_BUFFERS, USE_BUFFER, USE_GRAPHIC_BUFFER, + CREATE_INPUT_SURFACE, + SIGNAL_END_OF_INPUT_STREAM, STORE_META_DATA_IN_BUFFERS, ALLOC_BUFFER, ALLOC_BUFFER_WITH_BACKUP, @@ -280,6 +282,45 @@ public: return err; } + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp<IGraphicBufferProducer> *bufferProducer) { + Parcel data, reply; + status_t err; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + err = remote()->transact(CREATE_INPUT_SURFACE, data, &reply); + if (err != OK) { + ALOGW("binder transaction failed: %d", err); + return err; + } + + err = reply.readInt32(); + if (err != OK) { + return err; + } + + *bufferProducer = IGraphicBufferProducer::asInterface( + reply.readStrongBinder()); + + return err; + } + + virtual status_t signalEndOfInputStream(node_id node) { + Parcel data, reply; + status_t err; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + err = remote()->transact(SIGNAL_END_OF_INPUT_STREAM, data, &reply); + if (err != OK) { + ALOGW("binder transaction failed: %d", err); + return err; + } + + return reply.readInt32(); + } + virtual status_t storeMetaDataInBuffers( node_id node, OMX_U32 port_index, OMX_BOOL enable) { Parcel data, reply; @@ -404,7 +445,7 @@ IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX"); //////////////////////////////////////////////////////////////////////////////// -#define CHECK_INTERFACE(interface, data, reply) \ +#define CHECK_OMX_INTERFACE(interface, data, reply) \ do { if (!data.enforceInterface(interface::getInterfaceDescriptor())) { \ ALOGW("Call incorrectly routed to " #interface); \ return PERMISSION_DENIED; \ @@ -415,7 +456,7 @@ status_t BnOMX::onTransact( switch (code) { case LIVES_LOCALLY: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void *)data.readIntPtr(); pid_t pid = (pid_t)data.readInt32(); reply->writeInt32(livesLocally(node, pid)); @@ -425,7 +466,7 @@ status_t BnOMX::onTransact( case LIST_NODES: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); List<ComponentInfo> list; listNodes(&list); @@ -448,7 +489,7 @@ status_t BnOMX::onTransact( case ALLOCATE_NODE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); const char *name = data.readCString(); @@ -468,7 +509,7 @@ status_t BnOMX::onTransact( case FREE_NODE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); @@ -479,7 +520,7 @@ status_t BnOMX::onTransact( case SEND_COMMAND: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); @@ -497,7 +538,7 @@ status_t BnOMX::onTransact( case GET_CONFIG: case SET_CONFIG: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); @@ -539,7 +580,7 @@ status_t BnOMX::onTransact( case GET_STATE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_STATETYPE state = OMX_StateInvalid; @@ -553,7 +594,7 @@ status_t BnOMX::onTransact( case ENABLE_GRAPHIC_BUFFERS: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -567,7 +608,7 @@ status_t BnOMX::onTransact( case GET_GRAPHIC_BUFFER_USAGE: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -582,7 +623,7 @@ status_t BnOMX::onTransact( case USE_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -602,7 +643,7 @@ status_t BnOMX::onTransact( case USE_GRAPHIC_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -621,9 +662,41 @@ status_t BnOMX::onTransact( return NO_ERROR; } + case CREATE_INPUT_SURFACE: + { + CHECK_OMX_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + OMX_U32 port_index = data.readInt32(); + + sp<IGraphicBufferProducer> bufferProducer; + status_t err = createInputSurface(node, port_index, + &bufferProducer); + + reply->writeInt32(err); + + if (err == OK) { + reply->writeStrongBinder(bufferProducer->asBinder()); + } + + return NO_ERROR; + } + + case SIGNAL_END_OF_INPUT_STREAM: + { + CHECK_OMX_INTERFACE(IOMX, data, reply); + + node_id node = (void*)data.readIntPtr(); + + status_t err = signalEndOfInputStream(node); + reply->writeInt32(err); + + return NO_ERROR; + } + case STORE_META_DATA_IN_BUFFERS: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -637,7 +710,7 @@ status_t BnOMX::onTransact( case ALLOC_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -659,7 +732,7 @@ status_t BnOMX::onTransact( case ALLOC_BUFFER_WITH_BACKUP: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -681,7 +754,7 @@ status_t BnOMX::onTransact( case FREE_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); @@ -693,7 +766,7 @@ status_t BnOMX::onTransact( case FILL_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); buffer_id buffer = (void*)data.readIntPtr(); @@ -704,7 +777,7 @@ status_t BnOMX::onTransact( case EMPTY_BUFFER: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); buffer_id buffer = (void*)data.readIntPtr(); @@ -723,7 +796,7 @@ status_t BnOMX::onTransact( case GET_EXTENSION_INDEX: { - CHECK_INTERFACE(IOMX, data, reply); + CHECK_OMX_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); const char *parameter_name = data.readCString(); @@ -769,7 +842,7 @@ status_t BnOMXObserver::onTransact( switch (code) { case OBSERVER_ON_MSG: { - CHECK_INTERFACE(IOMXObserver, data, reply); + CHECK_OMX_INTERFACE(IOMXObserver, data, reply); omx_message msg; data.read(&msg, sizeof(msg)); diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp index 4a1b570..5c494b3 100644 --- a/media/libmedia/IRemoteDisplayClient.cpp +++ b/media/libmedia/IRemoteDisplayClient.cpp @@ -18,7 +18,7 @@ #include <sys/types.h> #include <media/IRemoteDisplayClient.h> -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> #include <utils/String8.h> namespace android { @@ -37,12 +37,12 @@ public: { } - void onDisplayConnected(const sp<ISurfaceTexture>& surfaceTexture, + void onDisplayConnected(const sp<IGraphicBufferProducer>& bufferProducer, uint32_t width, uint32_t height, uint32_t flags) { Parcel data, reply; data.writeInterfaceToken(IRemoteDisplayClient::getInterfaceDescriptor()); - data.writeStrongBinder(surfaceTexture->asBinder()); + data.writeStrongBinder(bufferProducer->asBinder()); data.writeInt32(width); data.writeInt32(height); data.writeInt32(flags); @@ -75,8 +75,8 @@ status_t BnRemoteDisplayClient::onTransact( switch (code) { case ON_DISPLAY_CONNECTED: { CHECK_INTERFACE(IRemoteDisplayClient, data, reply); - sp<ISurfaceTexture> surfaceTexture( - interface_cast<ISurfaceTexture>(data.readStrongBinder())); + sp<IGraphicBufferProducer> surfaceTexture( + interface_cast<IGraphicBufferProducer>(data.readStrongBinder())); uint32_t width = data.readInt32(); uint32_t height = data.readInt32(); uint32_t flags = data.readInt32(); diff --git a/media/libmedia/SingleStateQueue.cpp b/media/libmedia/SingleStateQueue.cpp new file mode 100644 index 0000000..3503baa --- /dev/null +++ b/media/libmedia/SingleStateQueue.cpp @@ -0,0 +1,107 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <new> +#include <cutils/atomic.h> +#include <cutils/atomic-inline.h> // for android_memory_barrier() +#include <media/SingleStateQueue.h> + +namespace android { + +template<typename T> SingleStateQueue<T>::Mutator::Mutator(Shared *shared) + : mSequence(0), mShared((Shared *) shared) +{ + // exactly one of Mutator and Observer must initialize, currently it is Observer + //shared->init(); +} + +template<typename T> int32_t SingleStateQueue<T>::Mutator::push(const T& value) +{ + Shared *shared = mShared; + int32_t sequence = mSequence; + sequence++; + android_atomic_acquire_store(sequence, &shared->mSequence); + shared->mValue = value; + sequence++; + android_atomic_release_store(sequence, &shared->mSequence); + mSequence = sequence; + // consider signalling a futex here, if we know that observer is waiting + return sequence; +} + +template<typename T> bool SingleStateQueue<T>::Mutator::ack() +{ + return mShared->mAck - mSequence == 0; +} + +template<typename T> bool SingleStateQueue<T>::Mutator::ack(int32_t sequence) +{ + // this relies on 2's complement rollover to detect an ancient sequence number + return mShared->mAck - sequence >= 0; +} + +template<typename T> SingleStateQueue<T>::Observer::Observer(Shared *shared) + : mSequence(0), mSeed(1), mShared((Shared *) shared) +{ + // exactly one of Mutator and Observer must initialize, currently it is Observer + shared->init(); +} + +template<typename T> bool SingleStateQueue<T>::Observer::poll(T& value) +{ + Shared *shared = mShared; + int32_t before = shared->mSequence; + if (before == mSequence) { + return false; + } + for (int tries = 0; ; ) { + const int MAX_TRIES = 5; + if (before & 1) { + if (++tries >= MAX_TRIES) { + return false; + } + before = shared->mSequence; + } else { + android_memory_barrier(); + T temp = shared->mValue; + int32_t after = android_atomic_release_load(&shared->mSequence); + if (after == before) { + value = temp; + shared->mAck = before; + mSequence = before; + return true; + } + if (++tries >= MAX_TRIES) { + return false; + } + before = after; + } + } +} + +#if 0 +template<typename T> SingleStateQueue<T>::SingleStateQueue(void /*Shared*/ *shared) +{ + ((Shared *) shared)->init(); +} +#endif + +} // namespace android + +// hack for gcc +#ifdef SINGLE_STATE_QUEUE_INSTANTIATIONS +#include SINGLE_STATE_QUEUE_INSTANTIATIONS +#endif diff --git a/media/libstagefright/wifi-display/TimeSeries.h b/media/libmedia/SingleStateQueueInstantiations.cpp index c818d51..2afebe9 100644 --- a/media/libstagefright/wifi-display/TimeSeries.h +++ b/media/libmedia/SingleStateQueueInstantiations.cpp @@ -1,11 +1,11 @@ /* - * Copyright 2012, The Android Open Source Project + * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -14,33 +14,13 @@ * limitations under the License. */ -#ifndef TIME_SERIES_H_ +#include <media/SingleStateQueue.h> +#include <private/media/StaticAudioTrackState.h> -#define TIME_SERIES_H_ - -#include <sys/types.h> +// FIXME hack for gcc namespace android { -struct TimeSeries { - TimeSeries(); - - void add(double val); - - double mean() const; - double sdev() const; - -private: - enum { - kHistorySize = 20 - }; - double mValues[kHistorySize]; - - size_t mCount; - double mSum; -}; - -} // namespace android - -#endif // TIME_SERIES_H_ +template class SingleStateQueue<StaticAudioTrackState>; // typedef StaticAudioTrackSingleStateQueue +} diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp index abc8899..ee70ef7 100644 --- a/media/libmedia/SoundPool.cpp +++ b/media/libmedia/SoundPool.cpp @@ -489,7 +489,7 @@ Sample::~Sample() ::close(mFd); } mData.clear(); - delete mUrl; + free(mUrl); } status_t Sample::doLoad() @@ -568,8 +568,8 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV } // initialize track - int afFrameCount; - int afSampleRate; + size_t afFrameCount; + uint32_t afSampleRate; audio_stream_type_t streamType = mSoundPool->streamType(); if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) { afFrameCount = kDefaultFrameCount; diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 253602d..f55b697 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -803,6 +803,7 @@ ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool ALOGV("ToneGenerator constructor: streamType=%d, volume=%f", streamType, volume); mState = TONE_IDLE; + mpAudioTrack = NULL; if (AudioSystem::getOutputSamplingRate(&mSamplingRate, streamType) != NO_ERROR) { ALOGE("Unable to marshal AudioFlinger"); @@ -811,7 +812,6 @@ ToneGenerator::ToneGenerator(audio_stream_type_t streamType, float volume, bool mThreadCanCallJava = threadCanCallJava; mStreamType = streamType; mVolume = volume; - mpAudioTrack = NULL; mpToneDesc = NULL; mpNewToneDesc = NULL; // Generate tone by chunks of 20 ms to keep cadencing precision @@ -885,6 +885,11 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { if ((toneType < 0) || (toneType >= NUM_TONES)) return lResult; + toneType = getToneForRegion(toneType); + if (toneType == TONE_CDMA_SIGNAL_OFF) { + return true; + } + if (mState == TONE_IDLE) { ALOGV("startTone: try to re-init AudioTrack"); if (!initAudioTrack()) { @@ -897,7 +902,6 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { mLock.lock(); // Get descriptor for requested tone - toneType = getToneForRegion(toneType); mpNewToneDesc = &sToneDescriptors[toneType]; mDurationMs = durationMs; @@ -918,6 +922,9 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { ALOGV("Immediate start, time %d", (unsigned int)(systemTime()/1000000)); lResult = true; mState = TONE_STARTING; + if (clock_gettime(CLOCK_MONOTONIC, &mStartTime) != 0) { + mStartTime.tv_sec = 0; + } mLock.unlock(); mpAudioTrack->start(); mLock.lock(); @@ -936,6 +943,7 @@ bool ToneGenerator::startTone(tone_type toneType, int durationMs) { } else { ALOGV("Delayed start"); mState = TONE_RESTARTING; + mStartTime.tv_sec = 0; lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus == NO_ERROR) { if (mState != TONE_IDLE) { @@ -972,21 +980,50 @@ void ToneGenerator::stopTone() { ALOGV("stopTone"); mLock.lock(); - if (mState == TONE_PLAYING || mState == TONE_STARTING || mState == TONE_RESTARTING) { - mState = TONE_STOPPING; + if (mState != TONE_IDLE && mState != TONE_INIT) { + if (mState == TONE_PLAYING || mState == TONE_STARTING || mState == TONE_RESTARTING) { + struct timespec stopTime; + // If the start time is valid, make sure that the number of audio samples produced + // corresponds at least to the time between the start and stop commands. + // This is needed in case of cold start of the output stream. + if ((mStartTime.tv_sec != 0) && (clock_gettime(CLOCK_MONOTONIC, &stopTime) == 0)) { + time_t sec = stopTime.tv_sec - mStartTime.tv_sec; + long nsec = stopTime.tv_nsec - mStartTime.tv_nsec; + long durationMs; + if (nsec < 0) { + --sec; + nsec += 1000000000; + } + + if ((sec + 1) > ((long)(INT_MAX / mSamplingRate))) { + mMaxSmp = sec * mSamplingRate; + } else { + // mSamplingRate is always > 1000 + sec = sec * 1000 + nsec / 1000000; // duration in milliseconds + mMaxSmp = (unsigned int)(((int64_t)sec * mSamplingRate) / 1000); + } + ALOGV("stopTone() forcing mMaxSmp to %d, total for far %d", mMaxSmp, mTotalSmp); + } else { + mState = TONE_STOPPING; + } + } ALOGV("waiting cond"); status_t lStatus = mWaitCbkCond.waitRelative(mLock, seconds(3)); if (lStatus == NO_ERROR) { + // If the tone was restarted exit now before calling clearWaveGens(); + if (mState != TONE_INIT) { + mLock.unlock(); + return; + } ALOGV("track stop complete, time %d", (unsigned int)(systemTime()/1000000)); } else { ALOGE("--- Stop timed out"); mState = TONE_IDLE; mpAudioTrack->stop(); } + clearWaveGens(); } - clearWaveGens(); - mLock.unlock(); } @@ -1036,7 +1073,7 @@ bool ToneGenerator::initAudioTrack() { goto initAudioTrack_exit; } - mpAudioTrack->setVolume(mVolume, mVolume); + mpAudioTrack->setVolume(mVolume); mState = TONE_INIT; @@ -1254,6 +1291,9 @@ audioCallback_EndLoop: ALOGV("Cbk restarting track"); if (lpToneGen->prepareWave()) { lpToneGen->mState = TONE_STARTING; + if (clock_gettime(CLOCK_MONOTONIC, &lpToneGen->mStartTime) != 0) { + lpToneGen->mStartTime.tv_sec = 0; + } // must reload lpToneDesc as prepareWave() may change mpToneDesc lpToneDesc = lpToneGen->mpToneDesc; } else { @@ -1295,7 +1335,7 @@ audioCallback_EndLoop: } if (lSignal) - lpToneGen->mWaitCbkCond.signal(); + lpToneGen->mWaitCbkCond.broadcast(); lpToneGen->mLock.unlock(); } } diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp index 8196e10..5b4071b 100644 --- a/media/libmedia/Visualizer.cpp +++ b/media/libmedia/Visualizer.cpp @@ -88,7 +88,8 @@ status_t Visualizer::setEnabled(bool enabled) return status; } -status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, uint32_t rate) +status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, + uint32_t rate) { if (rate > CAPTURE_RATE_MAX) { return BAD_VALUE; @@ -334,7 +335,8 @@ void Visualizer::controlStatusChanged(bool controlGranted) { //------------------------------------------------------------------------- -Visualizer::CaptureThread::CaptureThread(Visualizer& receiver, uint32_t captureRate, bool bCanCallJava) +Visualizer::CaptureThread::CaptureThread(Visualizer& receiver, uint32_t captureRate, + bool bCanCallJava) : Thread(bCanCallJava), mReceiver(receiver) { mSleepTimeUs = 1000000000 / captureRate; diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp index b0241aa..110b94c 100644 --- a/media/libmedia/mediametadataretriever.cpp +++ b/media/libmedia/mediametadataretriever.cpp @@ -64,7 +64,7 @@ MediaMetadataRetriever::MediaMetadataRetriever() ALOGE("failed to obtain MediaMetadataRetrieverService"); return; } - sp<IMediaMetadataRetriever> retriever(service->createMetadataRetriever(getpid())); + sp<IMediaMetadataRetriever> retriever(service->createMetadataRetriever()); if (retriever == 0) { ALOGE("failed to create IMediaMetadataRetriever object from server"); } diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index bbbf4b6..68cbdf5 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -27,7 +27,7 @@ #include <binder/IServiceManager.h> #include <binder/IPCThreadState.h> -#include <gui/SurfaceTextureClient.h> +#include <gui/Surface.h> #include <media/mediaplayer.h> #include <media/AudioSystem.h> @@ -143,7 +143,7 @@ status_t MediaPlayer::setDataSource( if (url != NULL) { const sp<IMediaPlayerService>& service(getMediaPlayerService()); if (service != 0) { - sp<IMediaPlayer> player(service->create(getpid(), this, mAudioSessionId)); + sp<IMediaPlayer> player(service->create(this, mAudioSessionId)); if ((NO_ERROR != doSetRetransmitEndpoint(player)) || (NO_ERROR != player->setDataSource(url, headers))) { player.clear(); @@ -160,7 +160,7 @@ status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length) status_t err = UNKNOWN_ERROR; const sp<IMediaPlayerService>& service(getMediaPlayerService()); if (service != 0) { - sp<IMediaPlayer> player(service->create(getpid(), this, mAudioSessionId)); + sp<IMediaPlayer> player(service->create(this, mAudioSessionId)); if ((NO_ERROR != doSetRetransmitEndpoint(player)) || (NO_ERROR != player->setDataSource(fd, offset, length))) { player.clear(); @@ -176,7 +176,7 @@ status_t MediaPlayer::setDataSource(const sp<IStreamSource> &source) status_t err = UNKNOWN_ERROR; const sp<IMediaPlayerService>& service(getMediaPlayerService()); if (service != 0) { - sp<IMediaPlayer> player(service->create(getpid(), this, mAudioSessionId)); + sp<IMediaPlayer> player(service->create(this, mAudioSessionId)); if ((NO_ERROR != doSetRetransmitEndpoint(player)) || (NO_ERROR != player->setDataSource(source))) { player.clear(); @@ -221,12 +221,12 @@ status_t MediaPlayer::getMetadata(bool update_only, bool apply_filter, Parcel *m } status_t MediaPlayer::setVideoSurfaceTexture( - const sp<ISurfaceTexture>& surfaceTexture) + const sp<IGraphicBufferProducer>& bufferProducer) { ALOGV("setVideoSurfaceTexture"); Mutex::Autolock _l(mLock); if (mPlayer == 0) return NO_INIT; - return mPlayer->setVideoSurfaceTexture(surfaceTexture); + return mPlayer->setVideoSurfaceTexture(bufferProducer); } // must call with lock held @@ -398,6 +398,13 @@ status_t MediaPlayer::getDuration_l(int *msec) if (mPlayer != 0 && isValidState) { int durationMs; status_t ret = mPlayer->getDuration(&durationMs); + + if (ret != OK) { + // Do not enter error state just because no duration was available. + durationMs = -1; + ret = OK; + } + if (msec) { *msec = durationMs; } @@ -568,8 +575,8 @@ status_t MediaPlayer::setAudioSessionId(int sessionId) return BAD_VALUE; } if (sessionId != mAudioSessionId) { - AudioSystem::releaseAudioSessionId(mAudioSessionId); AudioSystem::acquireAudioSessionId(sessionId); + AudioSystem::releaseAudioSessionId(mAudioSessionId); mAudioSessionId = sessionId; } return NO_ERROR; @@ -807,4 +814,15 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) { return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer); } +status_t MediaPlayer::updateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + const sp<IMediaPlayerService>& service = getMediaPlayerService(); + + if (service != NULL) { + return service->updateProxyConfig(host, port, exclusionList); + } + + return INVALID_OPERATION; +} + }; // namespace android diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 9541015..3710e46 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -24,7 +24,7 @@ #include <media/IMediaPlayerService.h> #include <media/IMediaRecorder.h> #include <media/mediaplayer.h> // for MEDIA_ERROR_SERVER_DIED -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> namespace android { @@ -49,7 +49,7 @@ status_t MediaRecorder::setCamera(const sp<ICamera>& camera, const sp<ICameraRec return ret; } -status_t MediaRecorder::setPreviewSurface(const sp<Surface>& surface) +status_t MediaRecorder::setPreviewSurface(const sp<IGraphicBufferProducer>& surface) { ALOGV("setPreviewSurface(%p)", surface.get()); if (mMediaRecorder == NULL) { @@ -348,9 +348,9 @@ status_t MediaRecorder::setVideoSize(int width, int height) } // Query a SurfaceMediaSurface through the Mediaserver, over the -// binder interface. This is used by the Filter Framework (MeidaEncoder) -// to get an <ISurfaceTexture> object to hook up to ANativeWindow. -sp<ISurfaceTexture> MediaRecorder:: +// binder interface. This is used by the Filter Framework (MediaEncoder) +// to get an <IGraphicBufferProducer> object to hook up to ANativeWindow. +sp<IGraphicBufferProducer> MediaRecorder:: querySurfaceMediaSourceFromMediaServer() { Mutex::Autolock _l(mLock); @@ -620,7 +620,7 @@ MediaRecorder::MediaRecorder() : mSurfaceMediaSource(NULL) const sp<IMediaPlayerService>& service(getMediaPlayerService()); if (service != NULL) { - mMediaRecorder = service->createMediaRecorder(getpid()); + mMediaRecorder = service->createMediaRecorder(); } if (mMediaRecorder != NULL) { mCurrentState = MEDIA_RECORDER_IDLE; @@ -656,6 +656,27 @@ status_t MediaRecorder::setListener(const sp<MediaRecorderListener>& listener) return NO_ERROR; } +status_t MediaRecorder::setClientName(const String16& clientName) +{ + ALOGV("setClientName"); + if (mMediaRecorder == NULL) { + ALOGE("media recorder is not initialized yet"); + return INVALID_OPERATION; + } + bool isInvalidState = (mCurrentState & + (MEDIA_RECORDER_PREPARED | + MEDIA_RECORDER_RECORDING | + MEDIA_RECORDER_ERROR)); + if (isInvalidState) { + ALOGE("setClientName is called in an invalid state: %d", mCurrentState); + return INVALID_OPERATION; + } + + mMediaRecorder->setClientName(clientName); + + return NO_ERROR; +} + void MediaRecorder::notify(int msg, int ext1, int ext2) { ALOGV("message received msg=%d, ext1=%d, ext2=%d", msg, ext1, ext2); diff --git a/media/libmedia_native/Android.mk b/media/libmedia_native/Android.mk deleted file mode 100644 index 065a90f..0000000 --- a/media/libmedia_native/Android.mk +++ /dev/null @@ -1,11 +0,0 @@ -LOCAL_PATH := $(call my-dir) - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES := - -LOCAL_MODULE:= libmedia_native - -LOCAL_MODULE_TAGS := optional - -include $(BUILD_SHARED_LIBRARY) diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 5b5ed71..d87bc7f 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -9,6 +9,7 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ ActivityManager.cpp \ Crypto.cpp \ + Drm.cpp \ HDCP.cpp \ MediaPlayerFactory.cpp \ MediaPlayerService.cpp \ @@ -17,6 +18,7 @@ LOCAL_SRC_FILES:= \ MidiFile.cpp \ MidiMetadataRetriever.cpp \ RemoteDisplay.cpp \ + SharedLibrary.cpp \ StagefrightPlayer.cpp \ StagefrightRecorder.cpp \ TestPlayerStub.cpp \ @@ -25,10 +27,10 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libcamera_client \ libcutils \ + liblog \ libdl \ libgui \ libmedia \ - libmedia_native \ libsonivox \ libstagefright \ libstagefright_foundation \ diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp index 0e8f913..ae4d845 100644 --- a/media/libmediaplayerservice/Crypto.cpp +++ b/media/libmediaplayerservice/Crypto.cpp @@ -17,6 +17,8 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "Crypto" #include <utils/Log.h> +#include <dirent.h> +#include <dlfcn.h> #include "Crypto.h" @@ -26,87 +28,176 @@ #include <media/stagefright/foundation/hexdump.h> #include <media/stagefright/MediaErrors.h> -#include <dlfcn.h> - namespace android { +KeyedVector<Vector<uint8_t>, String8> Crypto::mUUIDToLibraryPathMap; +KeyedVector<String8, wp<SharedLibrary> > Crypto::mLibraryPathToOpenLibraryMap; +Mutex Crypto::mMapLock; + +static bool operator<(const Vector<uint8_t> &lhs, const Vector<uint8_t> &rhs) { + if (lhs.size() < rhs.size()) { + return true; + } else if (lhs.size() > rhs.size()) { + return false; + } + + return memcmp((void *)lhs.array(), (void *)rhs.array(), rhs.size()) < 0; +} + Crypto::Crypto() : mInitCheck(NO_INIT), - mLibHandle(NULL), mFactory(NULL), mPlugin(NULL) { - mInitCheck = init(); } Crypto::~Crypto() { delete mPlugin; mPlugin = NULL; + closeFactory(); +} +void Crypto::closeFactory() { delete mFactory; mFactory = NULL; - - if (mLibHandle != NULL) { - dlclose(mLibHandle); - mLibHandle = NULL; - } + mLibrary.clear(); } status_t Crypto::initCheck() const { return mInitCheck; } -status_t Crypto::init() { - mLibHandle = dlopen("libdrmdecrypt.so", RTLD_NOW); +/* + * Search the plugins directory for a plugin that supports the scheme + * specified by uuid + * + * If found: + * mLibrary holds a strong pointer to the dlopen'd library + * mFactory is set to the library's factory method + * mInitCheck is set to OK + * + * If not found: + * mLibrary is cleared and mFactory are set to NULL + * mInitCheck is set to an error (!OK) + */ +void Crypto::findFactoryForScheme(const uint8_t uuid[16]) { - if (mLibHandle == NULL) { - ALOGE("Unable to locate libdrmdecrypt.so"); + closeFactory(); - return ERROR_UNSUPPORTED; + // lock static maps + Mutex::Autolock autoLock(mMapLock); + + // first check cache + Vector<uint8_t> uuidVector; + uuidVector.appendArray(uuid, sizeof(uuid)); + ssize_t index = mUUIDToLibraryPathMap.indexOfKey(uuidVector); + if (index >= 0) { + if (loadLibraryForScheme(mUUIDToLibraryPathMap[index], uuid)) { + mInitCheck = OK; + return; + } else { + ALOGE("Failed to load from cached library path!"); + mInitCheck = ERROR_UNSUPPORTED; + return; + } } - typedef CryptoFactory *(*CreateCryptoFactoryFunc)(); - CreateCryptoFactoryFunc createCryptoFactory = - (CreateCryptoFactoryFunc)dlsym(mLibHandle, "createCryptoFactory"); + // no luck, have to search + String8 dirPath("/vendor/lib/mediadrm"); + String8 pluginPath; - if (createCryptoFactory == NULL - || ((mFactory = createCryptoFactory()) == NULL)) { - if (createCryptoFactory == NULL) { - ALOGE("Unable to find symbol 'createCryptoFactory'."); - } else { - ALOGE("createCryptoFactory() failed."); + DIR* pDir = opendir(dirPath.string()); + if (pDir) { + struct dirent* pEntry; + while ((pEntry = readdir(pDir))) { + + pluginPath = dirPath + "/" + pEntry->d_name; + + if (pluginPath.getPathExtension() == ".so") { + + if (loadLibraryForScheme(pluginPath, uuid)) { + mUUIDToLibraryPathMap.add(uuidVector, pluginPath); + mInitCheck = OK; + closedir(pDir); + return; + } + } } - dlclose(mLibHandle); - mLibHandle = NULL; + closedir(pDir); + } - return ERROR_UNSUPPORTED; + // try the legacy libdrmdecrypt.so + pluginPath = "libdrmdecrypt.so"; + if (loadLibraryForScheme(pluginPath, uuid)) { + mUUIDToLibraryPathMap.add(uuidVector, pluginPath); + mInitCheck = OK; + return; } - return OK; + ALOGE("Failed to find crypto plugin"); + mInitCheck = ERROR_UNSUPPORTED; } -bool Crypto::isCryptoSchemeSupported(const uint8_t uuid[16]) const { - Mutex::Autolock autoLock(mLock); +bool Crypto::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { - if (mInitCheck != OK) { + // get strong pointer to open shared library + ssize_t index = mLibraryPathToOpenLibraryMap.indexOfKey(path); + if (index >= 0) { + mLibrary = mLibraryPathToOpenLibraryMap[index].promote(); + } else { + index = mLibraryPathToOpenLibraryMap.add(path, NULL); + } + + if (!mLibrary.get()) { + mLibrary = new SharedLibrary(path); + if (!*mLibrary) { + return false; + } + + mLibraryPathToOpenLibraryMap.replaceValueAt(index, mLibrary); + } + + typedef CryptoFactory *(*CreateCryptoFactoryFunc)(); + + CreateCryptoFactoryFunc createCryptoFactory = + (CreateCryptoFactoryFunc)mLibrary->lookup("createCryptoFactory"); + + if (createCryptoFactory == NULL || + (mFactory = createCryptoFactory()) == NULL || + !mFactory->isCryptoSchemeSupported(uuid)) { + closeFactory(); return false; } + return true; +} - return mFactory->isCryptoSchemeSupported(uuid); +bool Crypto::isCryptoSchemeSupported(const uint8_t uuid[16]) { + Mutex::Autolock autoLock(mLock); + + if (mFactory && mFactory->isCryptoSchemeSupported(uuid)) { + return true; + } + + findFactoryForScheme(uuid); + return (mInitCheck == OK); } status_t Crypto::createPlugin( const uint8_t uuid[16], const void *data, size_t size) { Mutex::Autolock autoLock(mLock); - if (mInitCheck != OK) { - return mInitCheck; - } - if (mPlugin != NULL) { return -EINVAL; } + if (!mFactory || !mFactory->isCryptoSchemeSupported(uuid)) { + findFactoryForScheme(uuid); + } + + if (mInitCheck != OK) { + return mInitCheck; + } + return mFactory->createPlugin(uuid, data, size, &mPlugin); } diff --git a/media/libmediaplayerservice/Crypto.h b/media/libmediaplayerservice/Crypto.h index d066774..c44ae34 100644 --- a/media/libmediaplayerservice/Crypto.h +++ b/media/libmediaplayerservice/Crypto.h @@ -20,6 +20,9 @@ #include <media/ICrypto.h> #include <utils/threads.h> +#include <utils/KeyedVector.h> + +#include "SharedLibrary.h" namespace android { @@ -32,7 +35,7 @@ struct Crypto : public BnCrypto { virtual status_t initCheck() const; - virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]) const; + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]); virtual status_t createPlugin( const uint8_t uuid[16], const void *data, size_t size); @@ -56,11 +59,17 @@ private: mutable Mutex mLock; status_t mInitCheck; - void *mLibHandle; + sp<SharedLibrary> mLibrary; CryptoFactory *mFactory; CryptoPlugin *mPlugin; - status_t init(); + static KeyedVector<Vector<uint8_t>, String8> mUUIDToLibraryPathMap; + static KeyedVector<String8, wp<SharedLibrary> > mLibraryPathToOpenLibraryMap; + static Mutex mMapLock; + + void findFactoryForScheme(const uint8_t uuid[16]); + bool loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]); + void closeFactory(); DISALLOW_EVIL_CONSTRUCTORS(Crypto); }; diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp new file mode 100644 index 0000000..f00f488 --- /dev/null +++ b/media/libmediaplayerservice/Drm.cpp @@ -0,0 +1,593 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Drm" +#include <utils/Log.h> + +#include <dirent.h> +#include <dlfcn.h> + +#include "Drm.h" + +#include <media/drm/DrmAPI.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AString.h> +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/MediaErrors.h> + +namespace android { + +KeyedVector<Vector<uint8_t>, String8> Drm::mUUIDToLibraryPathMap; +KeyedVector<String8, wp<SharedLibrary> > Drm::mLibraryPathToOpenLibraryMap; +Mutex Drm::mMapLock; + +static bool operator<(const Vector<uint8_t> &lhs, const Vector<uint8_t> &rhs) { + if (lhs.size() < rhs.size()) { + return true; + } else if (lhs.size() > rhs.size()) { + return false; + } + + return memcmp((void *)lhs.array(), (void *)rhs.array(), rhs.size()) < 0; +} + +Drm::Drm() + : mInitCheck(NO_INIT), + mListener(NULL), + mFactory(NULL), + mPlugin(NULL) { +} + +Drm::~Drm() { + delete mPlugin; + mPlugin = NULL; + closeFactory(); +} + +void Drm::closeFactory() { + delete mFactory; + mFactory = NULL; + mLibrary.clear(); +} + +status_t Drm::initCheck() const { + return mInitCheck; +} + +status_t Drm::setListener(const sp<IDrmClient>& listener) +{ + Mutex::Autolock lock(mEventLock); + if (mListener != NULL){ + mListener->asBinder()->unlinkToDeath(this); + } + if (listener != NULL) { + listener->asBinder()->linkToDeath(this); + } + mListener = listener; + return NO_ERROR; +} + +void Drm::sendEvent(DrmPlugin::EventType eventType, int extra, + Vector<uint8_t> const *sessionId, + Vector<uint8_t> const *data) +{ + mEventLock.lock(); + sp<IDrmClient> listener = mListener; + mEventLock.unlock(); + + if (listener != NULL) { + Parcel obj; + if (sessionId && sessionId->size()) { + obj.writeInt32(sessionId->size()); + obj.write(sessionId->array(), sessionId->size()); + } else { + obj.writeInt32(0); + } + + if (data && data->size()) { + obj.writeInt32(data->size()); + obj.write(data->array(), data->size()); + } else { + obj.writeInt32(0); + } + + Mutex::Autolock lock(mNotifyLock); + listener->notify(eventType, extra, &obj); + } +} + +/* + * Search the plugins directory for a plugin that supports the scheme + * specified by uuid + * + * If found: + * mLibrary holds a strong pointer to the dlopen'd library + * mFactory is set to the library's factory method + * mInitCheck is set to OK + * + * If not found: + * mLibrary is cleared and mFactory are set to NULL + * mInitCheck is set to an error (!OK) + */ +void Drm::findFactoryForScheme(const uint8_t uuid[16]) { + + closeFactory(); + + // lock static maps + Mutex::Autolock autoLock(mMapLock); + + // first check cache + Vector<uint8_t> uuidVector; + uuidVector.appendArray(uuid, sizeof(uuid)); + ssize_t index = mUUIDToLibraryPathMap.indexOfKey(uuidVector); + if (index >= 0) { + if (loadLibraryForScheme(mUUIDToLibraryPathMap[index], uuid)) { + mInitCheck = OK; + return; + } else { + ALOGE("Failed to load from cached library path!"); + mInitCheck = ERROR_UNSUPPORTED; + return; + } + } + + // no luck, have to search + String8 dirPath("/vendor/lib/mediadrm"); + DIR* pDir = opendir(dirPath.string()); + + if (pDir == NULL) { + mInitCheck = ERROR_UNSUPPORTED; + ALOGE("Failed to open plugin directory %s", dirPath.string()); + return; + } + + + struct dirent* pEntry; + while ((pEntry = readdir(pDir))) { + + String8 pluginPath = dirPath + "/" + pEntry->d_name; + + if (pluginPath.getPathExtension() == ".so") { + + if (loadLibraryForScheme(pluginPath, uuid)) { + mUUIDToLibraryPathMap.add(uuidVector, pluginPath); + mInitCheck = OK; + closedir(pDir); + return; + } + } + } + + closedir(pDir); + + ALOGE("Failed to find drm plugin"); + mInitCheck = ERROR_UNSUPPORTED; +} + +bool Drm::loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]) { + + // get strong pointer to open shared library + ssize_t index = mLibraryPathToOpenLibraryMap.indexOfKey(path); + if (index >= 0) { + mLibrary = mLibraryPathToOpenLibraryMap[index].promote(); + } else { + index = mLibraryPathToOpenLibraryMap.add(path, NULL); + } + + if (!mLibrary.get()) { + mLibrary = new SharedLibrary(path); + if (!*mLibrary) { + return false; + } + + mLibraryPathToOpenLibraryMap.replaceValueAt(index, mLibrary); + } + + typedef DrmFactory *(*CreateDrmFactoryFunc)(); + + CreateDrmFactoryFunc createDrmFactory = + (CreateDrmFactoryFunc)mLibrary->lookup("createDrmFactory"); + + if (createDrmFactory == NULL || + (mFactory = createDrmFactory()) == NULL || + !mFactory->isCryptoSchemeSupported(uuid)) { + closeFactory(); + return false; + } + return true; +} + +bool Drm::isCryptoSchemeSupported(const uint8_t uuid[16]) { + Mutex::Autolock autoLock(mLock); + + if (mFactory && mFactory->isCryptoSchemeSupported(uuid)) { + return true; + } + + findFactoryForScheme(uuid); + return (mInitCheck == OK); +} + +status_t Drm::createPlugin(const uint8_t uuid[16]) { + Mutex::Autolock autoLock(mLock); + + if (mPlugin != NULL) { + return -EINVAL; + } + + if (!mFactory || !mFactory->isCryptoSchemeSupported(uuid)) { + findFactoryForScheme(uuid); + } + + if (mInitCheck != OK) { + return mInitCheck; + } + + status_t result = mFactory->createDrmPlugin(uuid, &mPlugin); + mPlugin->setListener(this); + return result; +} + +status_t Drm::destroyPlugin() { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + delete mPlugin; + mPlugin = NULL; + + return OK; +} + +status_t Drm::openSession(Vector<uint8_t> &sessionId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->openSession(sessionId); +} + +status_t Drm::closeSession(Vector<uint8_t> const &sessionId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->closeSession(sessionId); +} + +status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector<String8, String8> const &optionalParameters, + Vector<uint8_t> &request, String8 &defaultUrl) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getKeyRequest(sessionId, initData, mimeType, keyType, + optionalParameters, request, defaultUrl); +} + +status_t Drm::provideKeyResponse(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &response, + Vector<uint8_t> &keySetId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->provideKeyResponse(sessionId, response, keySetId); +} + +status_t Drm::removeKeys(Vector<uint8_t> const &keySetId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->removeKeys(keySetId); +} + +status_t Drm::restoreKeys(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keySetId) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->restoreKeys(sessionId, keySetId); +} + +status_t Drm::queryKeyStatus(Vector<uint8_t> const &sessionId, + KeyedVector<String8, String8> &infoMap) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->queryKeyStatus(sessionId, infoMap); +} + +status_t Drm::getProvisionRequest(Vector<uint8_t> &request, String8 &defaultUrl) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getProvisionRequest(request, defaultUrl); +} + +status_t Drm::provideProvisionResponse(Vector<uint8_t> const &response) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->provideProvisionResponse(response); +} + + +status_t Drm::getSecureStops(List<Vector<uint8_t> > &secureStops) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getSecureStops(secureStops); +} + +status_t Drm::releaseSecureStops(Vector<uint8_t> const &ssRelease) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->releaseSecureStops(ssRelease); +} + +status_t Drm::getPropertyString(String8 const &name, String8 &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getPropertyString(name, value); +} + +status_t Drm::getPropertyByteArray(String8 const &name, Vector<uint8_t> &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->getPropertyByteArray(name, value); +} + +status_t Drm::setPropertyString(String8 const &name, String8 const &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setPropertyString(name, value); +} + +status_t Drm::setPropertyByteArray(String8 const &name, + Vector<uint8_t> const &value ) const { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setPropertyByteArray(name, value); +} + + +status_t Drm::setCipherAlgorithm(Vector<uint8_t> const &sessionId, + String8 const &algorithm) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setCipherAlgorithm(sessionId, algorithm); +} + +status_t Drm::setMacAlgorithm(Vector<uint8_t> const &sessionId, + String8 const &algorithm) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->setMacAlgorithm(sessionId, algorithm); +} + +status_t Drm::encrypt(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &input, + Vector<uint8_t> const &iv, + Vector<uint8_t> &output) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->encrypt(sessionId, keyId, input, iv, output); +} + +status_t Drm::decrypt(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &input, + Vector<uint8_t> const &iv, + Vector<uint8_t> &output) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->decrypt(sessionId, keyId, input, iv, output); +} + +status_t Drm::sign(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &message, + Vector<uint8_t> &signature) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->sign(sessionId, keyId, message, signature); +} + +status_t Drm::verify(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &message, + Vector<uint8_t> const &signature, + bool &match) { + Mutex::Autolock autoLock(mLock); + + if (mInitCheck != OK) { + return mInitCheck; + } + + if (mPlugin == NULL) { + return -EINVAL; + } + + return mPlugin->verify(sessionId, keyId, message, signature, match); +} + +void Drm::binderDied(const wp<IBinder> &the_late_who) +{ + delete mPlugin; + mPlugin = NULL; + closeFactory(); + mListener.clear(); +} + +} // namespace android diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h new file mode 100644 index 0000000..3f460f1 --- /dev/null +++ b/media/libmediaplayerservice/Drm.h @@ -0,0 +1,149 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DRM_H_ + +#define DRM_H_ + +#include "SharedLibrary.h" + +#include <media/IDrm.h> +#include <media/IDrmClient.h> +#include <utils/threads.h> + +namespace android { + +struct DrmFactory; +struct DrmPlugin; + +struct Drm : public BnDrm, + public IBinder::DeathRecipient, + public DrmPluginListener { + Drm(); + virtual ~Drm(); + + virtual status_t initCheck() const; + + virtual bool isCryptoSchemeSupported(const uint8_t uuid[16]); + + virtual status_t createPlugin(const uint8_t uuid[16]); + + virtual status_t destroyPlugin(); + + virtual status_t openSession(Vector<uint8_t> &sessionId); + + virtual status_t closeSession(Vector<uint8_t> const &sessionId); + + virtual status_t + getKeyRequest(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &initData, + String8 const &mimeType, DrmPlugin::KeyType keyType, + KeyedVector<String8, String8> const &optionalParameters, + Vector<uint8_t> &request, String8 &defaultUrl); + + virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &response, + Vector<uint8_t> &keySetId); + + virtual status_t removeKeys(Vector<uint8_t> const &keySetId); + + virtual status_t restoreKeys(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keySetId); + + virtual status_t queryKeyStatus(Vector<uint8_t> const &sessionId, + KeyedVector<String8, String8> &infoMap) const; + + virtual status_t getProvisionRequest(Vector<uint8_t> &request, + String8 &defaulUrl); + + virtual status_t provideProvisionResponse(Vector<uint8_t> const &response); + + virtual status_t getSecureStops(List<Vector<uint8_t> > &secureStops); + + virtual status_t releaseSecureStops(Vector<uint8_t> const &ssRelease); + + virtual status_t getPropertyString(String8 const &name, String8 &value ) const; + virtual status_t getPropertyByteArray(String8 const &name, + Vector<uint8_t> &value ) const; + virtual status_t setPropertyString(String8 const &name, String8 const &value ) const; + virtual status_t setPropertyByteArray(String8 const &name, + Vector<uint8_t> const &value ) const; + + virtual status_t setCipherAlgorithm(Vector<uint8_t> const &sessionId, + String8 const &algorithm); + + virtual status_t setMacAlgorithm(Vector<uint8_t> const &sessionId, + String8 const &algorithm); + + virtual status_t encrypt(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &input, + Vector<uint8_t> const &iv, + Vector<uint8_t> &output); + + virtual status_t decrypt(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &input, + Vector<uint8_t> const &iv, + Vector<uint8_t> &output); + + virtual status_t sign(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &message, + Vector<uint8_t> &signature); + + virtual status_t verify(Vector<uint8_t> const &sessionId, + Vector<uint8_t> const &keyId, + Vector<uint8_t> const &message, + Vector<uint8_t> const &signature, + bool &match); + + virtual status_t setListener(const sp<IDrmClient>& listener); + + virtual void sendEvent(DrmPlugin::EventType eventType, int extra, + Vector<uint8_t> const *sessionId, + Vector<uint8_t> const *data); + + virtual void binderDied(const wp<IBinder> &the_late_who); + +private: + mutable Mutex mLock; + + status_t mInitCheck; + + sp<IDrmClient> mListener; + mutable Mutex mEventLock; + mutable Mutex mNotifyLock; + + sp<SharedLibrary> mLibrary; + DrmFactory *mFactory; + DrmPlugin *mPlugin; + + static KeyedVector<Vector<uint8_t>, String8> mUUIDToLibraryPathMap; + static KeyedVector<String8, wp<SharedLibrary> > mLibraryPathToOpenLibraryMap; + static Mutex mMapLock; + + void findFactoryForScheme(const uint8_t uuid[16]); + bool loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]); + void closeFactory(); + + + DISALLOW_EVIL_CONSTRUCTORS(Drm); +}; + +} // namespace android + +#endif // CRYPTO_H_ diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp index 09b9719..469a02e 100644 --- a/media/libmediaplayerservice/HDCP.cpp +++ b/media/libmediaplayerservice/HDCP.cpp @@ -26,8 +26,9 @@ namespace android { -HDCP::HDCP() - : mLibHandle(NULL), +HDCP::HDCP(bool createEncryptionModule) + : mIsEncryptionModule(createEncryptionModule), + mLibHandle(NULL), mHDCPModule(NULL) { mLibHandle = dlopen("libstagefright_hdcp.so", RTLD_NOW); @@ -40,7 +41,10 @@ HDCP::HDCP() void *, HDCPModule::ObserverFunc); CreateHDCPModuleFunc createHDCPModule = - (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule"); + mIsEncryptionModule + ? (CreateHDCPModuleFunc)dlsym(mLibHandle, "createHDCPModule") + : (CreateHDCPModuleFunc)dlsym( + mLibHandle, "createHDCPModuleForDecryption"); if (createHDCPModule == NULL) { ALOGE("Unable to find symbol 'createHDCPModule'."); @@ -101,6 +105,8 @@ status_t HDCP::encrypt( uint64_t *outInputCTR, void *outData) { Mutex::Autolock autoLock(mLock); + CHECK(mIsEncryptionModule); + if (mHDCPModule == NULL) { *outInputCTR = 0; @@ -110,6 +116,20 @@ status_t HDCP::encrypt( return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData); } +status_t HDCP::decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t outInputCTR, void *outData) { + Mutex::Autolock autoLock(mLock); + + CHECK(!mIsEncryptionModule); + + if (mHDCPModule == NULL) { + return NO_INIT; + } + + return mHDCPModule->decrypt(inData, size, streamCTR, outInputCTR, outData); +} + // static void HDCP::ObserveWrapper(void *me, int msg, int ext1, int ext2) { static_cast<HDCP *>(me)->observe(msg, ext1, ext2); diff --git a/media/libmediaplayerservice/HDCP.h b/media/libmediaplayerservice/HDCP.h index b2fc457..42e6467 100644 --- a/media/libmediaplayerservice/HDCP.h +++ b/media/libmediaplayerservice/HDCP.h @@ -24,7 +24,7 @@ namespace android { struct HDCP : public BnHDCP { - HDCP(); + HDCP(bool createEncryptionModule); virtual ~HDCP(); virtual status_t setObserver(const sp<IHDCPObserver> &observer); @@ -35,9 +35,15 @@ struct HDCP : public BnHDCP { const void *inData, size_t size, uint32_t streamCTR, uint64_t *outInputCTR, void *outData); + virtual status_t decrypt( + const void *inData, size_t size, + uint32_t streamCTR, uint64_t outInputCTR, void *outData); + private: Mutex mLock; + bool mIsEncryptionModule; + void *mLibHandle; HDCPModule *mHDCPModule; sp<IHDCPObserver> mObserver; diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp index 3f69c11..90aed39 100644 --- a/media/libmediaplayerservice/MediaPlayerFactory.cpp +++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp @@ -100,7 +100,7 @@ void MediaPlayerFactory::unregisterFactory(player_type type) { } \ \ if (0.0 == bestScore) { \ - bestScore = getDefaultPlayerType(); \ + ret = getDefaultPlayerType(); \ } \ \ return ret; @@ -206,7 +206,8 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory { return 0.0; if (!strncasecmp("http://", url, 7) - || !strncasecmp("https://", url, 8)) { + || !strncasecmp("https://", url, 8) + || !strncasecmp("file://", url, 7)) { size_t len = strlen(url); if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { return kOurScore; @@ -215,6 +216,10 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory { if (strstr(url,"m3u8")) { return kOurScore; } + + if ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?")) { + return kOurScore; + } } if (!strncasecmp("rtsp://", url, 7)) { diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 9bedff1..acff958 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -38,7 +38,7 @@ #include <binder/IServiceManager.h> #include <binder/MemoryHeapBase.h> #include <binder/MemoryBase.h> -#include <gui/SurfaceTextureClient.h> +#include <gui/Surface.h> #include <utils/Errors.h> // for status_t #include <utils/String8.h> #include <utils/SystemClock.h> @@ -72,7 +72,9 @@ #include <OMX.h> #include "Crypto.h" +#include "Drm.h" #include "HDCP.h" +#include "HTTPBase.h" #include "RemoteDisplay.h" namespace { @@ -224,8 +226,9 @@ MediaPlayerService::~MediaPlayerService() ALOGV("MediaPlayerService destroyed"); } -sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(pid_t pid) +sp<IMediaRecorder> MediaPlayerService::createMediaRecorder() { + pid_t pid = IPCThreadState::self()->getCallingPid(); sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid); wp<MediaRecorderClient> w = recorder; Mutex::Autolock lock(mLock); @@ -241,16 +244,18 @@ void MediaPlayerService::removeMediaRecorderClient(wp<MediaRecorderClient> clien ALOGV("Delete media recorder client"); } -sp<IMediaMetadataRetriever> MediaPlayerService::createMetadataRetriever(pid_t pid) +sp<IMediaMetadataRetriever> MediaPlayerService::createMetadataRetriever() { + pid_t pid = IPCThreadState::self()->getCallingPid(); sp<MetadataRetrieverClient> retriever = new MetadataRetrieverClient(pid); ALOGV("Create new media retriever from pid %d", pid); return retriever; } -sp<IMediaPlayer> MediaPlayerService::create(pid_t pid, const sp<IMediaPlayerClient>& client, +sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client, int audioSessionId) { + pid_t pid = IPCThreadState::self()->getCallingPid(); int32_t connId = android_atomic_inc(&mNextConnId); sp<Client> c = new Client( @@ -282,8 +287,12 @@ sp<ICrypto> MediaPlayerService::makeCrypto() { return new Crypto; } -sp<IHDCP> MediaPlayerService::makeHDCP() { - return new HDCP; +sp<IDrm> MediaPlayerService::makeDrm() { + return new Drm; +} + +sp<IHDCP> MediaPlayerService::makeHDCP(bool createEncryptionModule) { + return new HDCP(createEncryptionModule); } sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay( @@ -295,6 +304,11 @@ sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay( return new RemoteDisplay(client, iface.string()); } +status_t MediaPlayerService::updateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + return HTTPBase::UpdateProxyConfig(host, port, exclusionList); +} + status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const { const size_t SIZE = 256; @@ -714,21 +728,21 @@ void MediaPlayerService::Client::disconnectNativeWindow() { } status_t MediaPlayerService::Client::setVideoSurfaceTexture( - const sp<ISurfaceTexture>& surfaceTexture) + const sp<IGraphicBufferProducer>& bufferProducer) { - ALOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, surfaceTexture.get()); + ALOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, bufferProducer.get()); sp<MediaPlayerBase> p = getPlayer(); if (p == 0) return UNKNOWN_ERROR; - sp<IBinder> binder(surfaceTexture == NULL ? NULL : - surfaceTexture->asBinder()); + sp<IBinder> binder(bufferProducer == NULL ? NULL : + bufferProducer->asBinder()); if (mConnectedWindowBinder == binder) { return OK; } sp<ANativeWindow> anw; - if (surfaceTexture != NULL) { - anw = new SurfaceTextureClient(surfaceTexture); + if (bufferProducer != NULL) { + anw = new Surface(bufferProducer); status_t err = native_window_api_connect(anw.get(), NATIVE_WINDOW_API_MEDIA); @@ -745,10 +759,10 @@ status_t MediaPlayerService::Client::setVideoSurfaceTexture( } } - // Note that we must set the player's new SurfaceTexture before + // Note that we must set the player's new GraphicBufferProducer before // disconnecting the old one. Otherwise queue/dequeue calls could be made // on the disconnected ANW, which may result in errors. - status_t err = p->setVideoSurfaceTexture(surfaceTexture); + status_t err = p->setVideoSurfaceTexture(bufferProducer); disconnectNativeWindow(); @@ -1387,8 +1401,8 @@ status_t MediaPlayerService::AudioOutput::open( } ALOGV("open(%u, %d, 0x%x, %d, %d, %d)", sampleRate, channelCount, channelMask, format, bufferCount, mSessionId); - int afSampleRate; - int afFrameCount; + uint32_t afSampleRate; + size_t afFrameCount; uint32_t frameCount; if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) { diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index fd648df..b33805d 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -239,22 +239,26 @@ public: static void instantiate(); // IMediaPlayerService interface - virtual sp<IMediaRecorder> createMediaRecorder(pid_t pid); + virtual sp<IMediaRecorder> createMediaRecorder(); void removeMediaRecorderClient(wp<MediaRecorderClient> client); - virtual sp<IMediaMetadataRetriever> createMetadataRetriever(pid_t pid); + virtual sp<IMediaMetadataRetriever> createMetadataRetriever(); - virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int audioSessionId); + virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId); virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, audio_format_t* pFormat); virtual sp<IOMX> getOMX(); virtual sp<ICrypto> makeCrypto(); - virtual sp<IHDCP> makeHDCP(); + virtual sp<IDrm> makeDrm(); + virtual sp<IHDCP> makeHDCP(bool createEncryptionModule); virtual sp<IRemoteDisplay> listenForRemoteDisplay(const sp<IRemoteDisplayClient>& client, const String8& iface); virtual status_t dump(int fd, const Vector<String16>& args); + virtual status_t updateProxyConfig( + const char *host, int32_t port, const char *exclusionList); + void removeClient(wp<Client> client); // For battery usage tracking purpose @@ -307,7 +311,7 @@ private: // IMediaPlayer interface virtual void disconnect(); virtual status_t setVideoSurfaceTexture( - const sp<ISurfaceTexture>& surfaceTexture); + const sp<IGraphicBufferProducer>& bufferProducer); virtual status_t prepareAsync(); virtual status_t start(); virtual status_t stop(); diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index eadc8ee..a9820e0 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -38,7 +38,7 @@ #include "MediaPlayerService.h" #include "StagefrightRecorder.h" -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> namespace android { @@ -56,7 +56,7 @@ static bool checkPermission(const char* permissionString) { } -sp<ISurfaceTexture> MediaRecorderClient::querySurfaceMediaSource() +sp<IGraphicBufferProducer> MediaRecorderClient::querySurfaceMediaSource() { ALOGV("Query SurfaceMediaSource"); Mutex::Autolock lock(mLock); @@ -81,7 +81,7 @@ status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera, return mRecorder->setCamera(camera, proxy); } -status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface) +status_t MediaRecorderClient::setPreviewSurface(const sp<IGraphicBufferProducer>& surface) { ALOGV("setPreviewSurface"); Mutex::Autolock lock(mLock); @@ -99,7 +99,7 @@ status_t MediaRecorderClient::setVideoSource(int vs) return PERMISSION_DENIED; } Mutex::Autolock lock(mLock); - if (mRecorder == NULL) { + if (mRecorder == NULL) { ALOGE("recorder is not initialized"); return NO_INIT; } @@ -325,6 +325,16 @@ status_t MediaRecorderClient::setListener(const sp<IMediaRecorderClient>& listen return mRecorder->setListener(listener); } +status_t MediaRecorderClient::setClientName(const String16& clientName) { + ALOGV("setClientName(%s)", String8(clientName).string()); + Mutex::Autolock lock(mLock); + if (mRecorder == NULL) { + ALOGE("recorder is not initialized"); + return NO_INIT; + } + return mRecorder->setClientName(clientName); +} + status_t MediaRecorderClient::dump(int fd, const Vector<String16>& args) const { if (mRecorder != NULL) { return mRecorder->dump(fd, args); diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index c9ccf22..a65ec9f 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -25,14 +25,14 @@ namespace android { class MediaRecorderBase; class MediaPlayerService; class ICameraRecordingProxy; -class ISurfaceTexture; +class IGraphicBufferProducer; class MediaRecorderClient : public BnMediaRecorder { public: virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy); - virtual status_t setPreviewSurface(const sp<Surface>& surface); + virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface); virtual status_t setVideoSource(int vs); virtual status_t setAudioSource(int as); virtual status_t setOutputFormat(int of); @@ -46,6 +46,7 @@ public: virtual status_t setParameters(const String8& params); virtual status_t setListener( const sp<IMediaRecorderClient>& listener); + virtual status_t setClientName(const String16& clientName); virtual status_t prepare(); virtual status_t getMaxAmplitude(int* max); virtual status_t start(); @@ -55,7 +56,7 @@ public: virtual status_t close(); virtual status_t release(); virtual status_t dump(int fd, const Vector<String16>& args) const; - virtual sp<ISurfaceTexture> querySurfaceMediaSource(); + virtual sp<IGraphicBufferProducer> querySurfaceMediaSource(); private: friend class MediaPlayerService; // for accessing private constructor diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h index f6f8f7b..24d59b4 100644 --- a/media/libmediaplayerservice/MidiFile.h +++ b/media/libmediaplayerservice/MidiFile.h @@ -36,7 +36,7 @@ public: virtual status_t setDataSource(int fd, int64_t offset, int64_t length); virtual status_t setVideoSurfaceTexture( - const sp<ISurfaceTexture>& surfaceTexture) + const sp<IGraphicBufferProducer>& bufferProducer) { return UNKNOWN_ERROR; } virtual status_t prepare(); virtual status_t prepareAsync(); diff --git a/media/libmediaplayerservice/SharedLibrary.cpp b/media/libmediaplayerservice/SharedLibrary.cpp new file mode 100644 index 0000000..178e15d --- /dev/null +++ b/media/libmediaplayerservice/SharedLibrary.cpp @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "Drm" +#include <utils/Log.h> +#include <media/stagefright/foundation/ADebug.h> + +#include <dlfcn.h> + +#include "SharedLibrary.h" + +namespace android { + + SharedLibrary::SharedLibrary(const String8 &path) { + mLibHandle = dlopen(path.string(), RTLD_NOW); + } + + SharedLibrary::~SharedLibrary() { + if (mLibHandle != NULL) { + dlclose(mLibHandle); + mLibHandle = NULL; + } + } + + bool SharedLibrary::operator!() const { + return mLibHandle == NULL; + } + + void *SharedLibrary::lookup(const char *symbol) const { + if (!mLibHandle) { + return NULL; + } + return dlsym(mLibHandle, symbol); + } +}; diff --git a/media/libmediaplayerservice/SharedLibrary.h b/media/libmediaplayerservice/SharedLibrary.h new file mode 100644 index 0000000..5353642 --- /dev/null +++ b/media/libmediaplayerservice/SharedLibrary.h @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SHARED_LIBRARY_H_ +#define SHARED_LIBRARY_H_ + +#include <utils/RefBase.h> +#include <utils/String8.h> +#include <media/stagefright/foundation/ABase.h> + +namespace android { + class SharedLibrary : public RefBase { + public: + SharedLibrary(const String8 &path); + ~SharedLibrary(); + + bool operator!() const; + void *lookup(const char *symbol) const; + + private: + void *mLibHandle; + DISALLOW_EVIL_CONSTRUCTORS(SharedLibrary); + }; +}; + +#endif // SHARED_LIBRARY_H_ diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp index 619c149..de61d9b 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.cpp +++ b/media/libmediaplayerservice/StagefrightPlayer.cpp @@ -70,10 +70,10 @@ status_t StagefrightPlayer::setDataSource(const sp<IStreamSource> &source) { } status_t StagefrightPlayer::setVideoSurfaceTexture( - const sp<ISurfaceTexture> &surfaceTexture) { + const sp<IGraphicBufferProducer> &bufferProducer) { ALOGV("setVideoSurfaceTexture"); - return mPlayer->setSurfaceTexture(surfaceTexture); + return mPlayer->setSurfaceTexture(bufferProducer); } status_t StagefrightPlayer::prepare() { diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h index e89e18a..600945e 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.h +++ b/media/libmediaplayerservice/StagefrightPlayer.h @@ -41,7 +41,7 @@ public: virtual status_t setDataSource(const sp<IStreamSource> &source); virtual status_t setVideoSurfaceTexture( - const sp<ISurfaceTexture> &surfaceTexture); + const sp<IGraphicBufferProducer> &bufferProducer); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 57b0ec2..095d5ca 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -70,7 +70,8 @@ StagefrightRecorder::StagefrightRecorder() mOutputFd(-1), mAudioSource(AUDIO_SOURCE_CNT), mVideoSource(VIDEO_SOURCE_LIST_END), - mStarted(false), mSurfaceMediaSource(NULL) { + mStarted(false), mSurfaceMediaSource(NULL), + mCaptureTimeLapse(false) { ALOGV("Constructor"); reset(); @@ -89,7 +90,7 @@ status_t StagefrightRecorder::init() { // The client side of mediaserver asks it to creat a SurfaceMediaSource // and return a interface reference. The client side will use that // while encoding GL Frames -sp<ISurfaceTexture> StagefrightRecorder::querySurfaceMediaSource() const { +sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const { ALOGV("Get SurfaceMediaSource"); return mSurfaceMediaSource->getBufferQueue(); } @@ -224,7 +225,7 @@ status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera, return OK; } -status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) { +status_t StagefrightRecorder::setPreviewSurface(const sp<IGraphicBufferProducer> &surface) { ALOGV("setPreviewSurface: %p", surface.get()); mPreviewSurface = surface; @@ -730,6 +731,12 @@ status_t StagefrightRecorder::setListener(const sp<IMediaRecorderClient> &listen return OK; } +status_t StagefrightRecorder::setClientName(const String16& clientName) { + mClientName = clientName; + + return OK; +} + status_t StagefrightRecorder::prepare() { return OK; } @@ -737,6 +744,8 @@ status_t StagefrightRecorder::prepare() { status_t StagefrightRecorder::start() { CHECK_GE(mOutputFd, 0); + // Get UID here for permission checking + mClientUid = IPCThreadState::self()->getCallingUid(); if (mWriter != NULL) { ALOGE("File writer is not avaialble"); return UNKNOWN_ERROR; @@ -1312,13 +1321,14 @@ status_t StagefrightRecorder::setupCameraSource( } mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera( - mCamera, mCameraProxy, mCameraId, + mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, videoSize, mFrameRate, mPreviewSurface, mTimeBetweenTimeLapseFrameCaptureUs); *cameraSource = mCameraSourceTimeLapse; } else { *cameraSource = CameraSource::CreateFromCamera( - mCamera, mCameraProxy, mCameraId, videoSize, mFrameRate, + mCamera, mCameraProxy, mCameraId, mClientName, mClientUid, + videoSize, mFrameRate, mPreviewSurface, true /*storeMetaDataInVideoBuffers*/); } mCamera.clear(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index ec5ce7e..c864207 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -35,7 +35,7 @@ struct MediaWriter; class MetaData; struct AudioSource; class MediaProfiles; -class ISurfaceTexture; +class IGraphicBufferProducer; class SurfaceMediaSource; struct StagefrightRecorder : public MediaRecorderBase { @@ -51,11 +51,12 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t setVideoSize(int width, int height); virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy); - virtual status_t setPreviewSurface(const sp<Surface>& surface); + virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface); virtual status_t setOutputFile(const char *path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); virtual status_t setParameters(const String8& params); virtual status_t setListener(const sp<IMediaRecorderClient>& listener); + virtual status_t setClientName(const String16& clientName); virtual status_t prepare(); virtual status_t start(); virtual status_t pause(); @@ -65,13 +66,15 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t getMaxAmplitude(int *max); virtual status_t dump(int fd, const Vector<String16>& args) const; // Querying a SurfaceMediaSourcer - virtual sp<ISurfaceTexture> querySurfaceMediaSource() const; + virtual sp<IGraphicBufferProducer> querySurfaceMediaSource() const; private: sp<ICamera> mCamera; sp<ICameraRecordingProxy> mCameraProxy; - sp<Surface> mPreviewSurface; + sp<IGraphicBufferProducer> mPreviewSurface; sp<IMediaRecorderClient> mListener; + String16 mClientName; + uid_t mClientUid; sp<MediaWriter> mWriter; int mOutputFd; sp<AudioSource> mAudioSourceNode; @@ -116,7 +119,7 @@ private: bool mStarted; // Needed when GLFrames are encoded. - // An <ISurfaceTexture> pointer + // An <IGraphicBufferProducer> pointer // will be sent to the client side using which the // frame buffers will be queued and dequeued sp<SurfaceMediaSource> mSurfaceMediaSource; diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h index 91ffa7d..a3802eb 100644 --- a/media/libmediaplayerservice/TestPlayerStub.h +++ b/media/libmediaplayerservice/TestPlayerStub.h @@ -76,7 +76,7 @@ class TestPlayerStub : public MediaPlayerInterface { // All the methods below wrap the mPlayer instance. virtual status_t setVideoSurfaceTexture( - const android::sp<android::ISurfaceTexture>& st) { + const android::sp<android::IGraphicBufferProducer>& st) { return mPlayer->setVideoSurfaceTexture(st); } virtual status_t prepare() {return mPlayer->prepare();} diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp index f281879..b04e7a6 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp +++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp @@ -32,11 +32,13 @@ namespace android { NuPlayer::GenericSource::GenericSource( + const sp<AMessage> ¬ify, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid, uid_t uid) - : mDurationUs(0ll), + : Source(notify), + mDurationUs(0ll), mAudioIsVorbis(false) { DataSource::RegisterDefaultSniffers(); @@ -48,8 +50,10 @@ NuPlayer::GenericSource::GenericSource( } NuPlayer::GenericSource::GenericSource( + const sp<AMessage> ¬ify, int fd, int64_t offset, int64_t length) - : mDurationUs(0ll), + : Source(notify), + mDurationUs(0ll), mAudioIsVorbis(false) { DataSource::RegisterDefaultSniffers(); @@ -102,6 +106,26 @@ void NuPlayer::GenericSource::initFromDataSource( NuPlayer::GenericSource::~GenericSource() { } +void NuPlayer::GenericSource::prepareAsync() { + if (mVideoTrack.mSource != NULL) { + sp<MetaData> meta = mVideoTrack.mSource->getFormat(); + + int32_t width, height; + CHECK(meta->findInt32(kKeyWidth, &width)); + CHECK(meta->findInt32(kKeyHeight, &height)); + + notifyVideoSizeChanged(width, height); + } + + notifyFlagsChanged( + FLAG_CAN_PAUSE + | FLAG_CAN_SEEK_BACKWARD + | FLAG_CAN_SEEK_FORWARD + | FLAG_CAN_SEEK); + + notifyPrepared(); +} + void NuPlayer::GenericSource::start() { ALOGI("start"); @@ -258,8 +282,4 @@ void NuPlayer::GenericSource::readBuffer( } } -uint32_t NuPlayer::GenericSource::flags() const { - return FLAG_SEEKABLE; -} - } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h index e1ce2c1..2da680c 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.h +++ b/media/libmediaplayerservice/nuplayer/GenericSource.h @@ -32,12 +32,17 @@ struct MediaSource; struct NuPlayer::GenericSource : public NuPlayer::Source { GenericSource( + const sp<AMessage> ¬ify, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid = false, uid_t uid = 0); - GenericSource(int fd, int64_t offset, int64_t length); + GenericSource( + const sp<AMessage> ¬ify, + int fd, int64_t offset, int64_t length); + + virtual void prepareAsync(); virtual void start(); @@ -48,8 +53,6 @@ struct NuPlayer::GenericSource : public NuPlayer::Source { virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); - virtual uint32_t flags() const; - protected: virtual ~GenericSource(); diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index 5dcca12..655ee55 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -34,10 +34,12 @@ namespace android { NuPlayer::HTTPLiveSource::HTTPLiveSource( + const sp<AMessage> ¬ify, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid, uid_t uid) - : mURL(url), + : Source(notify), + mURL(url), mUIDValid(uidValid), mUID(uid), mFlags(0), @@ -64,12 +66,15 @@ NuPlayer::HTTPLiveSource::~HTTPLiveSource() { } } -void NuPlayer::HTTPLiveSource::start() { +void NuPlayer::HTTPLiveSource::prepareAsync() { mLiveLooper = new ALooper; mLiveLooper->setName("http live"); mLiveLooper->start(); + sp<AMessage> notify = new AMessage(kWhatSessionNotify, id()); + mLiveSession = new LiveSession( + notify, (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0, mUIDValid, mUID); @@ -81,6 +86,9 @@ void NuPlayer::HTTPLiveSource::start() { mTSParser = new ATSParser; } +void NuPlayer::HTTPLiveSource::start() { +} + sp<MetaData> NuPlayer::HTTPLiveSource::getFormatMeta(bool audio) { ATSParser::SourceType type = audio ? ATSParser::AUDIO : ATSParser::VIDEO; @@ -192,17 +200,58 @@ status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) { return OK; } -uint32_t NuPlayer::HTTPLiveSource::flags() const { - uint32_t flags = 0; - if (mLiveSession->isSeekable()) { - flags |= FLAG_SEEKABLE; - } +void NuPlayer::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatSessionNotify: + { + onSessionNotify(msg); + break; + } - if (mLiveSession->hasDynamicDuration()) { - flags |= FLAG_DYNAMIC_DURATION; + default: + Source::onMessageReceived(msg); + break; } +} - return flags; +void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case LiveSession::kWhatPrepared: + { + notifyVideoSizeChanged(0, 0); + + uint32_t flags = FLAG_CAN_PAUSE; + if (mLiveSession->isSeekable()) { + flags |= FLAG_CAN_SEEK; + flags |= FLAG_CAN_SEEK_BACKWARD; + flags |= FLAG_CAN_SEEK_FORWARD; + } + + if (mLiveSession->hasDynamicDuration()) { + flags |= FLAG_DYNAMIC_DURATION; + } + + notifyFlagsChanged(flags); + + notifyPrepared(); + break; + } + + case LiveSession::kWhatPreparationFailed: + { + status_t err; + CHECK(msg->findInt32("err", &err)); + + notifyPrepared(err); + break; + } + + default: + TRESPASS(); + } } } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index 79f4ab8..067d1da 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -28,11 +28,13 @@ struct LiveSession; struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { HTTPLiveSource( + const sp<AMessage> ¬ify, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid = false, uid_t uid = 0); + virtual void prepareAsync(); virtual void start(); virtual status_t feedMoreTSData(); @@ -42,19 +44,23 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); - virtual uint32_t flags() const; - protected: virtual ~HTTPLiveSource(); virtual sp<MetaData> getFormatMeta(bool audio); + virtual void onMessageReceived(const sp<AMessage> &msg); + private: enum Flags { // Don't log any URLs. kFlagIncognito = 1, }; + enum { + kWhatSessionNotify, + }; + AString mURL; KeyedVector<String8, String8> mExtraHeaders; bool mUIDValid; @@ -66,6 +72,8 @@ private: sp<LiveSession> mLiveSession; sp<ATSParser> mTSParser; + void onSessionNotify(const sp<AMessage> &msg); + DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index ff27873..b89b1c8 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -41,7 +41,7 @@ #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaErrors.h> #include <media/stagefright/MetaData.h> -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> #include "avc_utils.h" @@ -50,10 +50,69 @@ namespace android { +struct NuPlayer::Action : public RefBase { + Action() {} + + virtual void execute(NuPlayer *player) = 0; + +private: + DISALLOW_EVIL_CONSTRUCTORS(Action); +}; + +struct NuPlayer::SeekAction : public Action { + SeekAction(int64_t seekTimeUs) + : mSeekTimeUs(seekTimeUs) { + } + + virtual void execute(NuPlayer *player) { + player->performSeek(mSeekTimeUs); + } + +private: + int64_t mSeekTimeUs; + + DISALLOW_EVIL_CONSTRUCTORS(SeekAction); +}; + +struct NuPlayer::SetSurfaceAction : public Action { + SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper) + : mWrapper(wrapper) { + } + + virtual void execute(NuPlayer *player) { + player->performSetSurface(mWrapper); + } + +private: + sp<NativeWindowWrapper> mWrapper; + + DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction); +}; + +// Use this if there's no state necessary to save in order to execute +// the action. +struct NuPlayer::SimpleAction : public Action { + typedef void (NuPlayer::*ActionFunc)(); + + SimpleAction(ActionFunc func) + : mFunc(func) { + } + + virtual void execute(NuPlayer *player) { + (player->*mFunc)(); + } + +private: + ActionFunc mFunc; + + DISALLOW_EVIL_CONSTRUCTORS(SimpleAction); +}; + //////////////////////////////////////////////////////////////////////////////// NuPlayer::NuPlayer() : mUIDValid(false), + mSourceFlags(0), mVideoIsAVC(false), mAudioEOS(false), mVideoEOS(false), @@ -63,14 +122,13 @@ NuPlayer::NuPlayer() mTimeDiscontinuityPending(false), mFlushingAudio(NONE), mFlushingVideo(NONE), - mResetInProgress(false), - mResetPostponed(false), mSkipRenderingAudioUntilMediaTimeUs(-1ll), mSkipRenderingVideoUntilMediaTimeUs(-1ll), mVideoLateByUs(0ll), mNumFramesTotal(0ll), mNumFramesDropped(0ll), - mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW) { + mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW), + mStarted(false) { } NuPlayer::~NuPlayer() { @@ -85,15 +143,17 @@ void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { mDriver = driver; } -void NuPlayer::setDataSource(const sp<IStreamSource> &source) { +void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) { sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); + char prop[PROPERTY_VALUE_MAX]; if (property_get("media.stagefright.use-mp4source", prop, NULL) && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) { - msg->setObject("source", new MP4Source(source)); + msg->setObject("source", new MP4Source(notify, source)); } else { - msg->setObject("source", new StreamingSource(source)); + msg->setObject("source", new StreamingSource(notify, source)); } msg->post(); @@ -101,7 +161,8 @@ void NuPlayer::setDataSource(const sp<IStreamSource> &source) { static bool IsHTTPLiveURL(const char *url) { if (!strncasecmp("http://", url, 7) - || !strncasecmp("https://", url, 8)) { + || !strncasecmp("https://", url, 8) + || !strncasecmp("file://", url, 7)) { size_t len = strlen(url); if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { return true; @@ -115,36 +176,58 @@ static bool IsHTTPLiveURL(const char *url) { return false; } -void NuPlayer::setDataSource( +void NuPlayer::setDataSourceAsync( const char *url, const KeyedVector<String8, String8> *headers) { sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + size_t len = strlen(url); + + sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); sp<Source> source; if (IsHTTPLiveURL(url)) { - source = new HTTPLiveSource(url, headers, mUIDValid, mUID); + source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID); } else if (!strncasecmp(url, "rtsp://", 7)) { - source = new RTSPSource(url, headers, mUIDValid, mUID); + source = new RTSPSource(notify, url, headers, mUIDValid, mUID); + } else if ((!strncasecmp(url, "http://", 7) + || !strncasecmp(url, "https://", 8)) + && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) + || strstr(url, ".sdp?"))) { + source = new RTSPSource(notify, url, headers, mUIDValid, mUID, true); } else { - source = new GenericSource(url, headers, mUIDValid, mUID); + source = new GenericSource(notify, url, headers, mUIDValid, mUID); } msg->setObject("source", source); msg->post(); } -void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { +void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); - sp<Source> source = new GenericSource(fd, offset, length); + sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); + + sp<Source> source = new GenericSource(notify, fd, offset, length); msg->setObject("source", source); msg->post(); } -void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) { +void NuPlayer::prepareAsync() { + (new AMessage(kWhatPrepare, id()))->post(); +} + +void NuPlayer::setVideoSurfaceTextureAsync( + const sp<IGraphicBufferProducer> &bufferProducer) { sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); - sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ? - new SurfaceTextureClient(surfaceTexture) : NULL); - msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient)); + + if (bufferProducer == NULL) { + msg->setObject("native-window", NULL); + } else { + msg->setObject( + "native-window", + new NativeWindowWrapper( + new Surface(bufferProducer))); + } + msg->post(); } @@ -208,6 +291,20 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { CHECK(msg->findObject("source", &obj)); mSource = static_cast<Source *>(obj.get()); + + looper()->registerHandler(mSource); + + CHECK(mDriver != NULL); + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifySetDataSourceCompleted(OK); + } + break; + } + + case kWhatPrepare: + { + mSource->prepareAsync(); break; } @@ -237,13 +334,24 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { { ALOGV("kWhatSetVideoNativeWindow"); + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performDecoderShutdown)); + sp<RefBase> obj; CHECK(msg->findObject("native-window", &obj)); - mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get()); + mDeferredActions.push_back( + new SetSurfaceAction( + static_cast<NativeWindowWrapper *>(obj.get()))); + + if (obj != NULL) { + // If there is a new surface texture, instantiate decoders + // again if possible. + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performScanSources)); + } - // XXX - ignore error from setVideoScalingMode for now - setVideoScalingMode(mVideoScalingMode); + processDeferredActions(); break; } @@ -270,12 +378,20 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { mVideoLateByUs = 0; mNumFramesTotal = 0; mNumFramesDropped = 0; + mStarted = true; mSource->start(); + uint32_t flags = 0; + + if (mSource->isRealTime()) { + flags |= Renderer::FLAG_REAL_TIME; + } + mRenderer = new Renderer( mAudioSink, - new AMessage(kWhatRendererNotify, id())); + new AMessage(kWhatRendererNotify, id()), + flags); looper()->registerHandler(mRenderer); @@ -312,9 +428,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { // This is the first time we've found anything playable. - uint32_t flags = mSource->flags(); - - if (flags & Source::FLAG_DYNAMIC_DURATION) { + if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) { schedulePollDuration(); } } @@ -407,7 +521,8 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { } else if (what == ACodec::kWhatOutputFormatChanged) { if (audio) { int32_t numChannels; - CHECK(codecRequest->findInt32("channel-count", &numChannels)); + CHECK(codecRequest->findInt32( + "channel-count", &numChannels)); int32_t sampleRate; CHECK(codecRequest->findInt32("sample-rate", &sampleRate)); @@ -419,13 +534,15 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { audio_output_flags_t flags; int64_t durationUs; - // FIXME: we should handle the case where the video decoder is created after - // we receive the format change indication. Current code will just make that - // we select deep buffer with video which should not be a problem as it should + // FIXME: we should handle the case where the video decoder + // is created after we receive the format change indication. + // Current code will just make that we select deep buffer + // with video which should not be a problem as it should // not prevent from keeping A/V sync. if (mVideoDecoder == NULL && mSource->getDuration(&durationUs) == OK && - durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { + durationUs + > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; } else { flags = AUDIO_OUTPUT_FLAG_NONE; @@ -461,17 +578,35 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { "crop", &cropLeft, &cropTop, &cropRight, &cropBottom)); + int32_t displayWidth = cropRight - cropLeft + 1; + int32_t displayHeight = cropBottom - cropTop + 1; + ALOGV("Video output format changed to %d x %d " "(crop: %d x %d @ (%d, %d))", width, height, - (cropRight - cropLeft + 1), - (cropBottom - cropTop + 1), + displayWidth, + displayHeight, cropLeft, cropTop); + sp<AMessage> videoInputFormat = + mSource->getFormat(false /* audio */); + + // Take into account sample aspect ratio if necessary: + int32_t sarWidth, sarHeight; + if (videoInputFormat->findInt32("sar-width", &sarWidth) + && videoInputFormat->findInt32( + "sar-height", &sarHeight)) { + ALOGV("Sample aspect ratio %d : %d", + sarWidth, sarHeight); + + displayWidth = (displayWidth * sarWidth) / sarHeight; + + ALOGV("display dimensions %d x %d", + displayWidth, displayHeight); + } + notifyListener( - MEDIA_SET_VIDEO_SIZE, - cropRight - cropLeft + 1, - cropBottom - cropTop + 1); + MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight); } } else if (what == ACodec::kWhatShutdownCompleted) { ALOGV("%s shutdown completed", audio ? "audio" : "video"); @@ -495,8 +630,15 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { mRenderer->queueEOS(audio, UNKNOWN_ERROR); } else if (what == ACodec::kWhatDrainThisBuffer) { renderBuffer(audio, codecRequest); - } else { - ALOGV("Unhandled codec notification %d.", what); + } else if (what != ACodec::kWhatComponentAllocated + && what != ACodec::kWhatComponentConfigured + && what != ACodec::kWhatBuffersAllocated) { + ALOGV("Unhandled codec notification %d '%c%c%c%c'.", + what, + what >> 24, + (what >> 16) & 0xff, + (what >> 8) & 0xff, + what & 0xff); } break; @@ -550,8 +692,6 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { } } } else if (what == Renderer::kWhatFlushComplete) { - CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete); - int32_t audio; CHECK(msg->findInt32("audio", &audio)); @@ -571,47 +711,13 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { { ALOGV("kWhatReset"); - cancelPollDuration(); - - if (mRenderer != NULL) { - // There's an edge case where the renderer owns all output - // buffers and is paused, therefore the decoder will not read - // more input data and will never encounter the matching - // discontinuity. To avoid this, we resume the renderer. - - if (mFlushingAudio == AWAITING_DISCONTINUITY - || mFlushingVideo == AWAITING_DISCONTINUITY) { - mRenderer->resume(); - } - } + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performDecoderShutdown)); - if (mFlushingAudio != NONE || mFlushingVideo != NONE) { - // We're currently flushing, postpone the reset until that's - // completed. - - ALOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d", - mFlushingAudio, mFlushingVideo); - - mResetPostponed = true; - break; - } - - if (mAudioDecoder == NULL && mVideoDecoder == NULL) { - finishReset(); - break; - } + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performReset)); - mTimeDiscontinuityPending = true; - - if (mAudioDecoder != NULL) { - flushDecoder(true /* audio */, true /* needShutdown */); - } - - if (mVideoDecoder != NULL) { - flushDecoder(false /* audio */, true /* needShutdown */); - } - - mResetInProgress = true; + processDeferredActions(); break; } @@ -620,24 +726,21 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { int64_t seekTimeUs; CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); - ALOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)", - seekTimeUs, seekTimeUs / 1E6); + ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs); - mSource->seekTo(seekTimeUs); + mDeferredActions.push_back( + new SimpleAction(&NuPlayer::performDecoderFlush)); - if (mDriver != NULL) { - sp<NuPlayerDriver> driver = mDriver.promote(); - if (driver != NULL) { - driver->notifySeekComplete(); - } - } + mDeferredActions.push_back(new SeekAction(seekTimeUs)); + processDeferredActions(); break; } case kWhatPause: { CHECK(mRenderer != NULL); + mSource->pause(); mRenderer->pause(); break; } @@ -645,10 +748,17 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { case kWhatResume: { CHECK(mRenderer != NULL); + mSource->resume(); mRenderer->resume(); break; } + case kWhatSourceNotify: + { + onSourceNotify(msg); + break; + } + default: TRESPASS(); break; @@ -682,39 +792,7 @@ void NuPlayer::finishFlushIfPossible() { mFlushingAudio = NONE; mFlushingVideo = NONE; - if (mResetInProgress) { - ALOGV("reset completed"); - - mResetInProgress = false; - finishReset(); - } else if (mResetPostponed) { - (new AMessage(kWhatReset, id()))->post(); - mResetPostponed = false; - } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) { - postScanSources(); - } -} - -void NuPlayer::finishReset() { - CHECK(mAudioDecoder == NULL); - CHECK(mVideoDecoder == NULL); - - ++mScanSourcesGeneration; - mScanSourcesPending = false; - - mRenderer.clear(); - - if (mSource != NULL) { - mSource->stop(); - mSource.clear(); - } - - if (mDriver != NULL) { - sp<NuPlayerDriver> driver = mDriver.promote(); - if (driver != NULL) { - driver->notifyResetComplete(); - } - } + processDeferredActions(); } void NuPlayer::postScanSources() { @@ -756,14 +834,6 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { (*decoder)->configure(format); - int64_t durationUs; - if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { - sp<NuPlayerDriver> driver = mDriver.promote(); - if (driver != NULL) { - driver->notifyDuration(durationUs); - } - } - return OK; } @@ -833,6 +903,14 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { mTimeDiscontinuityPending || timeChange; if (formatChange || timeChange) { + if (mFlushingAudio == NONE && mFlushingVideo == NONE) { + // And we'll resume scanning sources once we're done + // flushing. + mDeferredActions.push_front( + new SimpleAction( + &NuPlayer::performScanSources)); + } + flushDecoder(audio, formatChange); } else { // This stream is unaffected by the discontinuity @@ -1002,8 +1080,7 @@ sp<AMessage> NuPlayer::Source::getFormat(bool audio) { status_t NuPlayer::setVideoScalingMode(int32_t mode) { mVideoScalingMode = mode; - if (mNativeWindow != NULL - && mNativeWindow->getNativeWindow() != NULL) { + if (mNativeWindow != NULL) { status_t ret = native_window_set_scaling_mode( mNativeWindow->getNativeWindow().get(), mVideoScalingMode); if (ret != OK) { @@ -1025,4 +1102,257 @@ void NuPlayer::cancelPollDuration() { ++mPollDurationGeneration; } +void NuPlayer::processDeferredActions() { + while (!mDeferredActions.empty()) { + // We won't execute any deferred actions until we're no longer in + // an intermediate state, i.e. one more more decoders are currently + // flushing or shutting down. + + if (mRenderer != NULL) { + // There's an edge case where the renderer owns all output + // buffers and is paused, therefore the decoder will not read + // more input data and will never encounter the matching + // discontinuity. To avoid this, we resume the renderer. + + if (mFlushingAudio == AWAITING_DISCONTINUITY + || mFlushingVideo == AWAITING_DISCONTINUITY) { + mRenderer->resume(); + } + } + + if (mFlushingAudio != NONE || mFlushingVideo != NONE) { + // We're currently flushing, postpone the reset until that's + // completed. + + ALOGV("postponing action mFlushingAudio=%d, mFlushingVideo=%d", + mFlushingAudio, mFlushingVideo); + + break; + } + + sp<Action> action = *mDeferredActions.begin(); + mDeferredActions.erase(mDeferredActions.begin()); + + action->execute(this); + } +} + +void NuPlayer::performSeek(int64_t seekTimeUs) { + ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)", + seekTimeUs, + seekTimeUs / 1E6); + + mSource->seekTo(seekTimeUs); + + if (mDriver != NULL) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyPosition(seekTimeUs); + driver->notifySeekComplete(); + } + } + + // everything's flushed, continue playback. +} + +void NuPlayer::performDecoderFlush() { + ALOGV("performDecoderFlush"); + + if (mAudioDecoder == NULL && mVideoDecoder == NULL) { + return; + } + + mTimeDiscontinuityPending = true; + + if (mAudioDecoder != NULL) { + flushDecoder(true /* audio */, false /* needShutdown */); + } + + if (mVideoDecoder != NULL) { + flushDecoder(false /* audio */, false /* needShutdown */); + } +} + +void NuPlayer::performDecoderShutdown() { + ALOGV("performDecoderShutdown"); + + if (mAudioDecoder == NULL && mVideoDecoder == NULL) { + return; + } + + mTimeDiscontinuityPending = true; + + if (mAudioDecoder != NULL) { + flushDecoder(true /* audio */, true /* needShutdown */); + } + + if (mVideoDecoder != NULL) { + flushDecoder(false /* audio */, true /* needShutdown */); + } +} + +void NuPlayer::performReset() { + ALOGV("performReset"); + + CHECK(mAudioDecoder == NULL); + CHECK(mVideoDecoder == NULL); + + cancelPollDuration(); + + ++mScanSourcesGeneration; + mScanSourcesPending = false; + + mRenderer.clear(); + + if (mSource != NULL) { + mSource->stop(); + + looper()->unregisterHandler(mSource->id()); + + mSource.clear(); + } + + if (mDriver != NULL) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyResetComplete(); + } + } + + mStarted = false; +} + +void NuPlayer::performScanSources() { + ALOGV("performScanSources"); + + if (!mStarted) { + return; + } + + if (mAudioDecoder == NULL || mVideoDecoder == NULL) { + postScanSources(); + } +} + +void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) { + ALOGV("performSetSurface"); + + mNativeWindow = wrapper; + + // XXX - ignore error from setVideoScalingMode for now + setVideoScalingMode(mVideoScalingMode); + + if (mDriver != NULL) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifySetSurfaceComplete(); + } + } +} + +void NuPlayer::onSourceNotify(const sp<AMessage> &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case Source::kWhatPrepared: + { + if (mSource == NULL) { + // This is a stale notification from a source that was + // asynchronously preparing when the client called reset(). + // We handled the reset, the source is gone. + break; + } + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyPrepareCompleted(err); + } + + int64_t durationUs; + if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyDuration(durationUs); + } + } + break; + } + + case Source::kWhatFlagsChanged: + { + uint32_t flags; + CHECK(msg->findInt32("flags", (int32_t *)&flags)); + + if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION) + && (!(flags & Source::FLAG_DYNAMIC_DURATION))) { + cancelPollDuration(); + } else if (!(mSourceFlags & Source::FLAG_DYNAMIC_DURATION) + && (flags & Source::FLAG_DYNAMIC_DURATION) + && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { + schedulePollDuration(); + } + + mSourceFlags = flags; + break; + } + + case Source::kWhatVideoSizeChanged: + { + int32_t width, height; + CHECK(msg->findInt32("width", &width)); + CHECK(msg->findInt32("height", &height)); + + notifyListener(MEDIA_SET_VIDEO_SIZE, width, height); + break; + } + + case Source::kWhatBufferingStart: + { + notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0); + break; + } + + case Source::kWhatBufferingEnd: + { + notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0); + break; + } + + default: + TRESPASS(); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) { + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatFlagsChanged); + notify->setInt32("flags", flags); + notify->post(); +} + +void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) { + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatVideoSizeChanged); + notify->setInt32("width", width); + notify->setInt32("height", height); + notify->post(); +} + +void NuPlayer::Source::notifyPrepared(status_t err) { + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatPrepared); + notify->setInt32("err", err); + notify->post(); +} + +void NuPlayer::Source::onMessageReceived(const sp<AMessage> &msg) { + TRESPASS(); +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 31efb2e..50d0462 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -35,14 +35,18 @@ struct NuPlayer : public AHandler { void setDriver(const wp<NuPlayerDriver> &driver); - void setDataSource(const sp<IStreamSource> &source); + void setDataSourceAsync(const sp<IStreamSource> &source); - void setDataSource( + void setDataSourceAsync( const char *url, const KeyedVector<String8, String8> *headers); - void setDataSource(int fd, int64_t offset, int64_t length); + void setDataSourceAsync(int fd, int64_t offset, int64_t length); + + void prepareAsync(); + + void setVideoSurfaceTextureAsync( + const sp<IGraphicBufferProducer> &bufferProducer); - void setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture); void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink); void start(); @@ -73,9 +77,14 @@ private: struct Renderer; struct RTSPSource; struct StreamingSource; + struct Action; + struct SeekAction; + struct SetSurfaceAction; + struct SimpleAction; enum { kWhatSetDataSource = '=DaS', + kWhatPrepare = 'prep', kWhatSetVideoNativeWindow = '=NaW', kWhatSetAudioSink = '=AuS', kWhatMoreDataQueued = 'more', @@ -89,12 +98,14 @@ private: kWhatPause = 'paus', kWhatResume = 'rsme', kWhatPollDuration = 'polD', + kWhatSourceNotify = 'srcN', }; wp<NuPlayerDriver> mDriver; bool mUIDValid; uid_t mUID; sp<Source> mSource; + uint32_t mSourceFlags; sp<NativeWindowWrapper> mNativeWindow; sp<MediaPlayerBase::AudioSink> mAudioSink; sp<Decoder> mVideoDecoder; @@ -102,6 +113,8 @@ private: sp<Decoder> mAudioDecoder; sp<Renderer> mRenderer; + List<sp<Action> > mDeferredActions; + bool mAudioEOS; bool mVideoEOS; @@ -126,8 +139,6 @@ private: FlushStatus mFlushingAudio; FlushStatus mFlushingVideo; - bool mResetInProgress; - bool mResetPostponed; int64_t mSkipRenderingAudioUntilMediaTimeUs; int64_t mSkipRenderingVideoUntilMediaTimeUs; @@ -137,6 +148,8 @@ private: int32_t mVideoScalingMode; + bool mStarted; + status_t instantiateDecoder(bool audio, sp<Decoder> *decoder); status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg); @@ -150,12 +163,22 @@ private: static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL); - void finishReset(); void postScanSources(); void schedulePollDuration(); void cancelPollDuration(); + void processDeferredActions(); + + void performSeek(int64_t seekTimeUs); + void performDecoderFlush(); + void performDecoderShutdown(); + void performReset(); + void performScanSources(); + void performSetSurface(const sp<NativeWindowWrapper> &wrapper); + + void onSourceNotify(const sp<AMessage> &msg); + DISALLOW_EVIL_CONSTRUCTORS(NuPlayer); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index d03601f..68b9623 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -21,20 +21,25 @@ #include "NuPlayerDriver.h" #include "NuPlayer.h" +#include "NuPlayerSource.h" #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/MetaData.h> namespace android { NuPlayerDriver::NuPlayerDriver() - : mResetInProgress(false), + : mState(STATE_IDLE), + mIsAsyncPrepare(false), + mAsyncResult(UNKNOWN_ERROR), + mSetSurfaceInProgress(false), mDurationUs(-1), mPositionUs(-1), mNumFramesTotal(0), mNumFramesDropped(0), mLooper(new ALooper), - mState(UNINITIALIZED), + mPlayerFlags(0), mAtEOS(false), mStartupSeekTimeUs(-1) { mLooper->setName("NuPlayerDriver Looper"); @@ -66,60 +71,143 @@ status_t NuPlayerDriver::setUID(uid_t uid) { status_t NuPlayerDriver::setDataSource( const char *url, const KeyedVector<String8, String8> *headers) { - CHECK_EQ((int)mState, (int)UNINITIALIZED); + Mutex::Autolock autoLock(mLock); - mPlayer->setDataSource(url, headers); + if (mState != STATE_IDLE) { + return INVALID_OPERATION; + } - mState = STOPPED; + mState = STATE_SET_DATASOURCE_PENDING; - return OK; + mPlayer->setDataSourceAsync(url, headers); + + while (mState == STATE_SET_DATASOURCE_PENDING) { + mCondition.wait(mLock); + } + + return mAsyncResult; } status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) { - CHECK_EQ((int)mState, (int)UNINITIALIZED); + Mutex::Autolock autoLock(mLock); + + if (mState != STATE_IDLE) { + return INVALID_OPERATION; + } - mPlayer->setDataSource(fd, offset, length); + mState = STATE_SET_DATASOURCE_PENDING; - mState = STOPPED; + mPlayer->setDataSourceAsync(fd, offset, length); - return OK; + while (mState == STATE_SET_DATASOURCE_PENDING) { + mCondition.wait(mLock); + } + + return mAsyncResult; } status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) { - CHECK_EQ((int)mState, (int)UNINITIALIZED); + Mutex::Autolock autoLock(mLock); + + if (mState != STATE_IDLE) { + return INVALID_OPERATION; + } - mPlayer->setDataSource(source); + mState = STATE_SET_DATASOURCE_PENDING; - mState = STOPPED; + mPlayer->setDataSourceAsync(source); - return OK; + while (mState == STATE_SET_DATASOURCE_PENDING) { + mCondition.wait(mLock); + } + + return mAsyncResult; } status_t NuPlayerDriver::setVideoSurfaceTexture( - const sp<ISurfaceTexture> &surfaceTexture) { - mPlayer->setVideoSurfaceTexture(surfaceTexture); + const sp<IGraphicBufferProducer> &bufferProducer) { + Mutex::Autolock autoLock(mLock); + + if (mSetSurfaceInProgress) { + return INVALID_OPERATION; + } + + switch (mState) { + case STATE_SET_DATASOURCE_PENDING: + case STATE_RESET_IN_PROGRESS: + return INVALID_OPERATION; + + default: + break; + } + + mSetSurfaceInProgress = true; + + mPlayer->setVideoSurfaceTextureAsync(bufferProducer); + + while (mSetSurfaceInProgress) { + mCondition.wait(mLock); + } return OK; } status_t NuPlayerDriver::prepare() { - sendEvent(MEDIA_SET_VIDEO_SIZE, 0, 0); - return OK; + Mutex::Autolock autoLock(mLock); + return prepare_l(); } -status_t NuPlayerDriver::prepareAsync() { - status_t err = prepare(); +status_t NuPlayerDriver::prepare_l() { + switch (mState) { + case STATE_UNPREPARED: + mState = STATE_PREPARING; + + // Make sure we're not posting any notifications, success or + // failure information is only communicated through our result + // code. + mIsAsyncPrepare = false; + mPlayer->prepareAsync(); + while (mState == STATE_PREPARING) { + mCondition.wait(mLock); + } + return (mState == STATE_PREPARED) ? OK : UNKNOWN_ERROR; + default: + return INVALID_OPERATION; + }; +} - notifyListener(MEDIA_PREPARED); +status_t NuPlayerDriver::prepareAsync() { + Mutex::Autolock autoLock(mLock); - return err; + switch (mState) { + case STATE_UNPREPARED: + mState = STATE_PREPARING; + mIsAsyncPrepare = true; + mPlayer->prepareAsync(); + return OK; + default: + return INVALID_OPERATION; + }; } status_t NuPlayerDriver::start() { + Mutex::Autolock autoLock(mLock); + switch (mState) { - case UNINITIALIZED: - return INVALID_OPERATION; - case STOPPED: + case STATE_UNPREPARED: + { + status_t err = prepare_l(); + + if (err != OK) { + return err; + } + + CHECK_EQ(mState, STATE_PREPARED); + + // fall through + } + + case STATE_PREPARED: { mAtEOS = false; mPlayer->start(); @@ -133,21 +221,23 @@ status_t NuPlayerDriver::start() { mStartupSeekTimeUs = -1; } - break; } - case PLAYING: - return OK; - default: - { - CHECK_EQ((int)mState, (int)PAUSED); + case STATE_RUNNING: + break; + + case STATE_PAUSED: + { mPlayer->resume(); break; } + + default: + return INVALID_OPERATION; } - mState = PLAYING; + mState = STATE_RUNNING; return OK; } @@ -157,43 +247,44 @@ status_t NuPlayerDriver::stop() { } status_t NuPlayerDriver::pause() { + Mutex::Autolock autoLock(mLock); + switch (mState) { - case UNINITIALIZED: - return INVALID_OPERATION; - case STOPPED: + case STATE_PAUSED: + case STATE_PREPARED: return OK; - case PLAYING: + + case STATE_RUNNING: mPlayer->pause(); break; + default: - { - CHECK_EQ((int)mState, (int)PAUSED); - return OK; - } + return INVALID_OPERATION; } - mState = PAUSED; + mState = STATE_PAUSED; return OK; } bool NuPlayerDriver::isPlaying() { - return mState == PLAYING && !mAtEOS; + return mState == STATE_RUNNING && !mAtEOS; } status_t NuPlayerDriver::seekTo(int msec) { + Mutex::Autolock autoLock(mLock); + int64_t seekTimeUs = msec * 1000ll; switch (mState) { - case UNINITIALIZED: - return INVALID_OPERATION; - case STOPPED: + case STATE_PREPARED: { mStartupSeekTimeUs = seekTimeUs; break; } - case PLAYING: - case PAUSED: + + case STATE_RUNNING: + case STATE_PAUSED: { mAtEOS = false; mPlayer->seekToAsync(seekTimeUs); @@ -201,8 +292,7 @@ status_t NuPlayerDriver::seekTo(int msec) { } default: - TRESPASS(); - break; + return INVALID_OPERATION; } return OK; @@ -224,27 +314,46 @@ status_t NuPlayerDriver::getDuration(int *msec) { Mutex::Autolock autoLock(mLock); if (mDurationUs < 0) { - *msec = 0; - } else { - *msec = (mDurationUs + 500ll) / 1000; + return UNKNOWN_ERROR; } + *msec = (mDurationUs + 500ll) / 1000; + return OK; } status_t NuPlayerDriver::reset() { Mutex::Autolock autoLock(mLock); - mResetInProgress = true; + switch (mState) { + case STATE_IDLE: + return OK; + + case STATE_SET_DATASOURCE_PENDING: + case STATE_RESET_IN_PROGRESS: + return INVALID_OPERATION; + + case STATE_PREPARING: + { + CHECK(mIsAsyncPrepare); + + notifyListener(MEDIA_PREPARED); + break; + } + + default: + break; + } + + mState = STATE_RESET_IN_PROGRESS; mPlayer->resetAsync(); - while (mResetInProgress) { + while (mState == STATE_RESET_IN_PROGRESS) { mCondition.wait(mLock); } mDurationUs = -1; mPositionUs = -1; - mState = UNINITIALIZED; mStartupSeekTimeUs = -1; return OK; @@ -277,6 +386,7 @@ status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) { int mode = request.readInt32(); return mPlayer->setVideoScalingMode(mode); } + default: { return INVALID_OPERATION; @@ -298,13 +408,45 @@ status_t NuPlayerDriver::getParameter(int key, Parcel *reply) { status_t NuPlayerDriver::getMetadata( const media::Metadata::Filter& ids, Parcel *records) { - return INVALID_OPERATION; + Mutex::Autolock autoLock(mLock); + + using media::Metadata; + + Metadata meta(records); + + meta.appendBool( + Metadata::kPauseAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_PAUSE); + + meta.appendBool( + Metadata::kSeekBackwardAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_SEEK_BACKWARD); + + meta.appendBool( + Metadata::kSeekForwardAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_SEEK_FORWARD); + + meta.appendBool( + Metadata::kSeekAvailable, + mPlayerFlags & NuPlayer::Source::FLAG_CAN_SEEK); + + return OK; } void NuPlayerDriver::notifyResetComplete() { Mutex::Autolock autoLock(mLock); - CHECK(mResetInProgress); - mResetInProgress = false; + + CHECK_EQ(mState, STATE_RESET_IN_PROGRESS); + mState = STATE_IDLE; + mCondition.broadcast(); +} + +void NuPlayerDriver::notifySetSurfaceComplete() { + Mutex::Autolock autoLock(mLock); + + CHECK(mSetSurfaceInProgress); + mSetSurfaceInProgress = false; + mCondition.broadcast(); } @@ -356,4 +498,50 @@ void NuPlayerDriver::notifyListener(int msg, int ext1, int ext2) { sendEvent(msg, ext1, ext2); } +void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) { + Mutex::Autolock autoLock(mLock); + + CHECK_EQ(mState, STATE_SET_DATASOURCE_PENDING); + + mAsyncResult = err; + mState = (err == OK) ? STATE_UNPREPARED : STATE_IDLE; + mCondition.broadcast(); +} + +void NuPlayerDriver::notifyPrepareCompleted(status_t err) { + Mutex::Autolock autoLock(mLock); + + if (mState != STATE_PREPARING) { + // We were preparing asynchronously when the client called + // reset(), we sent a premature "prepared" notification and + // then initiated the reset. This notification is stale. + CHECK(mState == STATE_RESET_IN_PROGRESS || mState == STATE_IDLE); + return; + } + + CHECK_EQ(mState, STATE_PREPARING); + + mAsyncResult = err; + + if (err == OK) { + if (mIsAsyncPrepare) { + notifyListener(MEDIA_PREPARED); + } + mState = STATE_PREPARED; + } else { + if (mIsAsyncPrepare) { + notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); + } + mState = STATE_UNPREPARED; + } + + mCondition.broadcast(); +} + +void NuPlayerDriver::notifyFlagsChanged(uint32_t flags) { + Mutex::Autolock autoLock(mLock); + + mPlayerFlags = flags; +} + } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 4a0026c..5df0cfb 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -38,7 +38,7 @@ struct NuPlayerDriver : public MediaPlayerInterface { virtual status_t setDataSource(const sp<IStreamSource> &source); virtual status_t setVideoSurfaceTexture( - const sp<ISurfaceTexture> &surfaceTexture); + const sp<IGraphicBufferProducer> &bufferProducer); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); @@ -61,23 +61,43 @@ struct NuPlayerDriver : public MediaPlayerInterface { virtual status_t dump(int fd, const Vector<String16> &args) const; + void notifySetDataSourceCompleted(status_t err); + void notifyPrepareCompleted(status_t err); void notifyResetComplete(); + void notifySetSurfaceComplete(); void notifyDuration(int64_t durationUs); void notifyPosition(int64_t positionUs); void notifySeekComplete(); void notifyFrameStats(int64_t numFramesTotal, int64_t numFramesDropped); void notifyListener(int msg, int ext1 = 0, int ext2 = 0); + void notifyFlagsChanged(uint32_t flags); protected: virtual ~NuPlayerDriver(); private: + enum State { + STATE_IDLE, + STATE_SET_DATASOURCE_PENDING, + STATE_UNPREPARED, + STATE_PREPARING, + STATE_PREPARED, + STATE_RUNNING, + STATE_PAUSED, + STATE_RESET_IN_PROGRESS, + }; + mutable Mutex mLock; Condition mCondition; + State mState; + + bool mIsAsyncPrepare; + status_t mAsyncResult; + // The following are protected through "mLock" // >>> - bool mResetInProgress; + bool mSetSurfaceInProgress; int64_t mDurationUs; int64_t mPositionUs; int64_t mNumFramesTotal; @@ -86,19 +106,14 @@ private: sp<ALooper> mLooper; sp<NuPlayer> mPlayer; + uint32_t mPlayerFlags; - enum State { - UNINITIALIZED, - STOPPED, - PLAYING, - PAUSED - }; - - State mState; bool mAtEOS; int64_t mStartupSeekTimeUs; + status_t prepare_l(); + DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 8a75f83..404b56f 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -31,9 +31,11 @@ const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll; NuPlayer::Renderer::Renderer( const sp<MediaPlayerBase::AudioSink> &sink, - const sp<AMessage> ¬ify) + const sp<AMessage> ¬ify, + uint32_t flags) : mAudioSink(sink), mNotify(notify), + mFlags(flags), mNumFramesWritten(0), mDrainAudioQueuePending(false), mDrainVideoQueuePending(false), @@ -323,6 +325,11 @@ void NuPlayer::Renderer::postDrainVideoQueue() { if (entry.mBuffer == NULL) { // EOS doesn't carry a timestamp. delayUs = 0; + } else if (mFlags & FLAG_REAL_TIME) { + int64_t mediaTimeUs; + CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + delayUs = mediaTimeUs - ALooper::GetNowUs(); } else { int64_t mediaTimeUs; CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); @@ -368,12 +375,17 @@ void NuPlayer::Renderer::onDrainVideoQueue() { return; } - int64_t mediaTimeUs; - CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + int64_t realTimeUs; + if (mFlags & FLAG_REAL_TIME) { + CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs)); + } else { + int64_t mediaTimeUs; + CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; + } - int64_t realTimeUs = mediaTimeUs - mAnchorTimeMediaUs + mAnchorTimeRealUs; mVideoLateByUs = ALooper::GetNowUs() - realTimeUs; - bool tooLate = (mVideoLateByUs > 40000); if (tooLate) { @@ -512,9 +524,15 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { entry.mFinalResult = finalResult; if (audio) { + if (mAudioQueue.empty() && mSyncQueues) { + syncQueuesDone(); + } mAudioQueue.push_back(entry); postDrainAudioQueue(); } else { + if (mVideoQueue.empty() && mSyncQueues) { + syncQueuesDone(); + } mVideoQueue.push_back(entry); postDrainVideoQueue(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h index e4368c7..c9796e2 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -25,8 +25,12 @@ namespace android { struct ABuffer; struct NuPlayer::Renderer : public AHandler { + enum Flags { + FLAG_REAL_TIME = 1, + }; Renderer(const sp<MediaPlayerBase::AudioSink> &sink, - const sp<AMessage> ¬ify); + const sp<AMessage> ¬ify, + uint32_t flags = 0); void queueBuffer( bool audio, @@ -79,6 +83,7 @@ private: sp<MediaPlayerBase::AudioSink> mAudioSink; sp<AMessage> mNotify; + uint32_t mFlags; List<QueueEntry> mAudioQueue; List<QueueEntry> mVideoQueue; uint32_t mNumFramesWritten; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index a635340..1cbf575 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -20,20 +20,42 @@ #include "NuPlayer.h" +#include <media/stagefright/foundation/AMessage.h> + namespace android { struct ABuffer; +struct MetaData; -struct NuPlayer::Source : public RefBase { +struct NuPlayer::Source : public AHandler { enum Flags { - FLAG_SEEKABLE = 1, - FLAG_DYNAMIC_DURATION = 2, + FLAG_CAN_PAUSE = 1, + FLAG_CAN_SEEK_BACKWARD = 2, // the "10 sec back button" + FLAG_CAN_SEEK_FORWARD = 4, // the "10 sec forward button" + FLAG_CAN_SEEK = 8, // the "seek bar" + FLAG_DYNAMIC_DURATION = 16, + }; + + enum { + kWhatPrepared, + kWhatFlagsChanged, + kWhatVideoSizeChanged, + kWhatBufferingStart, + kWhatBufferingEnd, }; - Source() {} + // The provides message is used to notify the player about various + // events. + Source(const sp<AMessage> ¬ify) + : mNotify(notify) { + } + + virtual void prepareAsync() = 0; virtual void start() = 0; virtual void stop() {} + virtual void pause() {} + virtual void resume() {} // Returns OK iff more data was available, // an error or ERROR_END_OF_STREAM if not. @@ -52,14 +74,26 @@ struct NuPlayer::Source : public RefBase { return INVALID_OPERATION; } - virtual uint32_t flags() const = 0; + virtual bool isRealTime() const { + return false; + } protected: virtual ~Source() {} + virtual void onMessageReceived(const sp<AMessage> &msg); + virtual sp<MetaData> getFormatMeta(bool audio) { return NULL; } + sp<AMessage> dupNotify() const { return mNotify->dup(); } + + void notifyFlagsChanged(uint32_t flags); + void notifyVideoSizeChanged(int32_t width, int32_t height); + void notifyPrepared(status_t err = OK); + private: + sp<AMessage> mNotify; + DISALLOW_EVIL_CONSTRUCTORS(Source); }; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index cf455bd..50ebf9c 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -22,26 +22,35 @@ #include "AnotherPacketSource.h" #include "MyHandler.h" +#include "SDPLoader.h" #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MetaData.h> namespace android { +const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs + NuPlayer::RTSPSource::RTSPSource( + const sp<AMessage> ¬ify, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid, - uid_t uid) - : mURL(url), + uid_t uid, + bool isSDP) + : Source(notify), + mURL(url), mUIDValid(uidValid), mUID(uid), mFlags(0), + mIsSDP(isSDP), mState(DISCONNECTED), mFinalResult(OK), mDisconnectReplyID(0), - mStartingUp(true), - mSeekGeneration(0) { + mBuffering(true), + mSeekGeneration(0), + mEOSTimeoutAudio(0), + mEOSTimeoutVideo(0) { if (headers) { mExtraHeaders = *headers; @@ -62,7 +71,7 @@ NuPlayer::RTSPSource::~RTSPSource() { } } -void NuPlayer::RTSPSource::start() { +void NuPlayer::RTSPSource::prepareAsync() { if (mLooper == NULL) { mLooper = new ALooper; mLooper->setName("rtsp"); @@ -73,25 +82,64 @@ void NuPlayer::RTSPSource::start() { } CHECK(mHandler == NULL); + CHECK(mSDPLoader == NULL); sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id()); - mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID); - mLooper->registerHandler(mHandler); - CHECK_EQ(mState, (int)DISCONNECTED); mState = CONNECTING; - mHandler->connect(); + if (mIsSDP) { + mSDPLoader = new SDPLoader(notify, + (mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0, + mUIDValid, mUID); + + mSDPLoader->load( + mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders); + } else { + mHandler = new MyHandler(mURL.c_str(), notify, mUIDValid, mUID); + mLooper->registerHandler(mHandler); + + mHandler->connect(); + } + + sp<AMessage> notifyStart = dupNotify(); + notifyStart->setInt32("what", kWhatBufferingStart); + notifyStart->post(); +} + +void NuPlayer::RTSPSource::start() { } void NuPlayer::RTSPSource::stop() { + if (mLooper == NULL) { + return; + } sp<AMessage> msg = new AMessage(kWhatDisconnect, mReflector->id()); sp<AMessage> dummy; msg->postAndAwaitResponse(&dummy); } +void NuPlayer::RTSPSource::pause() { + int64_t mediaDurationUs = 0; + getDuration(&mediaDurationUs); + for (size_t index = 0; index < mTracks.size(); index++) { + TrackInfo *info = &mTracks.editItemAt(index); + sp<AnotherPacketSource> source = info->mSource; + + // Check if EOS or ERROR is received + if (source != NULL && source->isFinished(mediaDurationUs)) { + return; + } + } + mHandler->pause(); +} + +void NuPlayer::RTSPSource::resume() { + mHandler->resume(); +} + status_t NuPlayer::RTSPSource::feedMoreTSData() { return mFinalResult; } @@ -112,6 +160,13 @@ bool NuPlayer::RTSPSource::haveSufficientDataOnAllTracks() { static const int64_t kMinDurationUs = 2000000ll; + int64_t mediaDurationUs = 0; + getDuration(&mediaDurationUs); + if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs)) + || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) { + return true; + } + status_t err; int64_t durationUs; if (mAudioTrack != NULL @@ -137,12 +192,16 @@ bool NuPlayer::RTSPSource::haveSufficientDataOnAllTracks() { status_t NuPlayer::RTSPSource::dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) { - if (mStartingUp) { + if (mBuffering) { if (!haveSufficientDataOnAllTracks()) { return -EWOULDBLOCK; } - mStartingUp = false; + mBuffering = false; + + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatBufferingEnd); + notify->post(); } sp<AnotherPacketSource> source = getSource(audio); @@ -153,9 +212,51 @@ status_t NuPlayer::RTSPSource::dequeueAccessUnit( status_t finalResult; if (!source->hasBufferAvailable(&finalResult)) { - return finalResult == OK ? -EWOULDBLOCK : finalResult; + if (finalResult == OK) { + int64_t mediaDurationUs = 0; + getDuration(&mediaDurationUs); + sp<AnotherPacketSource> otherSource = getSource(!audio); + status_t otherFinalResult; + + // If other source already signaled EOS, this source should also signal EOS + if (otherSource != NULL && + !otherSource->hasBufferAvailable(&otherFinalResult) && + otherFinalResult == ERROR_END_OF_STREAM) { + source->signalEOS(ERROR_END_OF_STREAM); + return ERROR_END_OF_STREAM; + } + + // If this source has detected near end, give it some time to retrieve more + // data before signaling EOS + if (source->isFinished(mediaDurationUs)) { + int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo; + if (eosTimeout == 0) { + setEOSTimeout(audio, ALooper::GetNowUs()); + } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) { + setEOSTimeout(audio, 0); + source->signalEOS(ERROR_END_OF_STREAM); + return ERROR_END_OF_STREAM; + } + return -EWOULDBLOCK; + } + + if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) { + // We should not enter buffering mode + // if any of the sources already have detected EOS. + mBuffering = true; + + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatBufferingStart); + notify->post(); + } + + return -EWOULDBLOCK; + } + return finalResult; } + setEOSTimeout(audio, 0); + return source->dequeueAccessUnit(accessUnit); } @@ -170,6 +271,14 @@ sp<AnotherPacketSource> NuPlayer::RTSPSource::getSource(bool audio) { return audio ? mAudioTrack : mVideoTrack; } +void NuPlayer::RTSPSource::setEOSTimeout(bool audio, int64_t timeout) { + if (audio) { + mEOSTimeoutAudio = timeout; + } else { + mEOSTimeoutVideo = timeout; + } +} + status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) { *durationUs = 0ll; @@ -210,10 +319,6 @@ void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) { mHandler->seek(seekTimeUs); } -uint32_t NuPlayer::RTSPSource::flags() const { - return FLAG_SEEKABLE; -} - void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) { if (msg->what() == kWhatDisconnect) { uint32_t replyID; @@ -245,17 +350,35 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) { switch (what) { case MyHandler::kWhatConnected: + { onConnected(); + + notifyVideoSizeChanged(0, 0); + + uint32_t flags = 0; + + if (mHandler->isSeekable()) { + flags = FLAG_CAN_PAUSE | FLAG_CAN_SEEK; + + // Seeking 10secs forward or backward is a very expensive + // operation for rtsp, so let's not enable that. + // The user can always use the seek bar. + } + + notifyFlagsChanged(flags); + notifyPrepared(); break; + } case MyHandler::kWhatDisconnected: + { onDisconnected(msg); break; + } case MyHandler::kWhatSeekDone: { mState = CONNECTED; - mStartingUp = true; break; } @@ -405,6 +528,12 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) { break; } + case SDPLoader::kWhatSDPLoaded: + { + onSDPLoaded(msg); + break; + } + default: TRESPASS(); } @@ -458,6 +587,52 @@ void NuPlayer::RTSPSource::onConnected() { mState = CONNECTED; } +void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) { + status_t err; + CHECK(msg->findInt32("result", &err)); + + mSDPLoader.clear(); + + if (mDisconnectReplyID != 0) { + err = UNKNOWN_ERROR; + } + + if (err == OK) { + sp<ASessionDescription> desc; + sp<RefBase> obj; + CHECK(msg->findObject("description", &obj)); + desc = static_cast<ASessionDescription *>(obj.get()); + + AString rtspUri; + if (!desc->findAttribute(0, "a=control", &rtspUri)) { + ALOGE("Unable to find url in SDP"); + err = UNKNOWN_ERROR; + } else { + sp<AMessage> notify = new AMessage(kWhatNotify, mReflector->id()); + + mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID); + mLooper->registerHandler(mHandler); + + mHandler->loadSDP(desc); + } + } + + if (err != OK) { + if (mState == CONNECTING) { + // We're still in the preparation phase, signal that it + // failed. + notifyPrepared(err); + } + + mState = DISCONNECTED; + mFinalResult = err; + + if (mDisconnectReplyID != 0) { + finishDisconnectIfPossible(); + } + } +} + void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) { status_t err; CHECK(msg->findInt32("result", &err)); @@ -466,6 +641,12 @@ void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) { mLooper->unregisterHandler(mHandler->id()); mHandler.clear(); + if (mState == CONNECTING) { + // We're still in the preparation phase, signal that it + // failed. + notifyPrepared(err); + } + mState = DISCONNECTED; mFinalResult = err; @@ -476,7 +657,11 @@ void NuPlayer::RTSPSource::onDisconnected(const sp<AMessage> &msg) { void NuPlayer::RTSPSource::finishDisconnectIfPossible() { if (mState != DISCONNECTED) { - mHandler->disconnect(); + if (mHandler != NULL) { + mHandler->disconnect(); + } else if (mSDPLoader != NULL) { + mSDPLoader->cancel(); + } return; } diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index 779d791..8cf34a0 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -29,16 +29,22 @@ namespace android { struct ALooper; struct AnotherPacketSource; struct MyHandler; +struct SDPLoader; struct NuPlayer::RTSPSource : public NuPlayer::Source { RTSPSource( + const sp<AMessage> ¬ify, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid = false, - uid_t uid = 0); + uid_t uid = 0, + bool isSDP = false); + virtual void prepareAsync(); virtual void start(); virtual void stop(); + virtual void pause(); + virtual void resume(); virtual status_t feedMoreTSData(); @@ -47,8 +53,6 @@ struct NuPlayer::RTSPSource : public NuPlayer::Source { virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo(int64_t seekTimeUs); - virtual uint32_t flags() const; - void onMessageReceived(const sp<AMessage> &msg); protected: @@ -89,14 +93,16 @@ private: bool mUIDValid; uid_t mUID; uint32_t mFlags; + bool mIsSDP; State mState; status_t mFinalResult; uint32_t mDisconnectReplyID; - bool mStartingUp; + bool mBuffering; sp<ALooper> mLooper; sp<AHandlerReflector<RTSPSource> > mReflector; sp<MyHandler> mHandler; + sp<SDPLoader> mSDPLoader; Vector<TrackInfo> mTracks; sp<AnotherPacketSource> mAudioTrack; @@ -106,9 +112,13 @@ private: int32_t mSeekGeneration; + int64_t mEOSTimeoutAudio; + int64_t mEOSTimeoutVideo; + sp<AnotherPacketSource> getSource(bool audio); void onConnected(); + void onSDPLoaded(const sp<AMessage> &msg); void onDisconnected(const sp<AMessage> &msg); void finishDisconnectIfPossible(); @@ -116,6 +126,8 @@ private: bool haveSufficientDataOnAllTracks(); + void setEOSTimeout(bool audio, int64_t timeout); + DISALLOW_EVIL_CONSTRUCTORS(RTSPSource); }; diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp index 7159404..28f0d50 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -32,14 +32,23 @@ namespace android { -NuPlayer::StreamingSource::StreamingSource(const sp<IStreamSource> &source) - : mSource(source), +NuPlayer::StreamingSource::StreamingSource( + const sp<AMessage> ¬ify, + const sp<IStreamSource> &source) + : Source(notify), + mSource(source), mFinalResult(OK) { } NuPlayer::StreamingSource::~StreamingSource() { } +void NuPlayer::StreamingSource::prepareAsync() { + notifyVideoSizeChanged(0, 0); + notifyFlagsChanged(0); + notifyPrepared(); +} + void NuPlayer::StreamingSource::start() { mStreamListener = new NuPlayerStreamListener(mSource, 0); @@ -173,8 +182,8 @@ status_t NuPlayer::StreamingSource::dequeueAccessUnit( return err; } -uint32_t NuPlayer::StreamingSource::flags() const { - return 0; +bool NuPlayer::StreamingSource::isRealTime() const { + return mSource->flags() & IStreamSource::kFlagIsRealTimeData; } } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h index a27b58a..412b6c4 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.h +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h @@ -27,15 +27,18 @@ struct ABuffer; struct ATSParser; struct NuPlayer::StreamingSource : public NuPlayer::Source { - StreamingSource(const sp<IStreamSource> &source); + StreamingSource( + const sp<AMessage> ¬ify, + const sp<IStreamSource> &source); + virtual void prepareAsync(); virtual void start(); virtual status_t feedMoreTSData(); virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); - virtual uint32_t flags() const; + virtual bool isRealTime() const; protected: virtual ~StreamingSource(); diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp index a62d5a2..d31d947 100644 --- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp +++ b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp @@ -104,8 +104,10 @@ private: DISALLOW_EVIL_CONSTRUCTORS(StreamSource); }; -MP4Source::MP4Source(const sp<IStreamSource> &source) - : mSource(source), +MP4Source::MP4Source( + const sp<AMessage> ¬ify, const sp<IStreamSource> &source) + : Source(notify), + mSource(source), mLooper(new ALooper), mParser(new FragmentedMP4Parser), mEOS(false) { @@ -115,6 +117,12 @@ MP4Source::MP4Source(const sp<IStreamSource> &source) MP4Source::~MP4Source() { } +void MP4Source::prepareAsync() { + notifyVideoSizeChanged(0, 0); + notifyFlagsChanged(0); + notifyPrepared(); +} + void MP4Source::start() { mLooper->start(false /* runOnCallingThread */); mParser->start(new StreamSource(mSource)); @@ -133,8 +141,4 @@ status_t MP4Source::dequeueAccessUnit( return mParser->dequeueAccessUnit(audio, accessUnit); } -uint32_t MP4Source::flags() const { - return 0; -} - } // namespace android diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h index abca236..a6ef622 100644 --- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h +++ b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h @@ -24,8 +24,9 @@ namespace android { struct FragmentedMP4Parser; struct MP4Source : public NuPlayer::Source { - MP4Source(const sp<IStreamSource> &source); + MP4Source(const sp<AMessage> ¬ify, const sp<IStreamSource> &source); + virtual void prepareAsync(); virtual void start(); virtual status_t feedMoreTSData(); @@ -35,8 +36,6 @@ struct MP4Source : public NuPlayer::Source { virtual status_t dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit); - virtual uint32_t flags() const; - protected: virtual ~MP4Source(); diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk index 757272f..5d00d15 100644 --- a/media/libnbaio/Android.mk +++ b/media/libnbaio/Android.mk @@ -14,6 +14,8 @@ LOCAL_SRC_FILES := \ roundup.c \ SourceAudioBufferProvider.cpp +LOCAL_SRC_FILES += NBLog.cpp + # libsndfile license is incompatible; uncomment to use for local debug only #LOCAL_SRC_FILES += LibsndfileSink.cpp LibsndfileSource.cpp #LOCAL_C_INCLUDES += path/to/libsndfile/src @@ -25,8 +27,10 @@ LOCAL_SRC_FILES := \ LOCAL_MODULE := libnbaio LOCAL_SHARED_LIBRARIES := \ + libbinder \ libcommon_time_client \ libcutils \ - libutils + libutils \ + liblog include $(BUILD_SHARED_LIBRARY) diff --git a/media/libnbaio/NBAIO.cpp b/media/libnbaio/NBAIO.cpp index 00d2017..e0d2c21 100644 --- a/media/libnbaio/NBAIO.cpp +++ b/media/libnbaio/NBAIO.cpp @@ -24,44 +24,55 @@ namespace android { size_t Format_frameSize(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C2_I16: - case Format_SR48_C2_I16: - return 2 * sizeof(short); - case Format_SR44_1_C1_I16: - case Format_SR48_C1_I16: - return 1 * sizeof(short); - case Format_Invalid: - default: - return 0; - } + return Format_channelCount(format) * sizeof(short); } size_t Format_frameBitShift(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C2_I16: - case Format_SR48_C2_I16: - return 2; // 1 << 2 == 2 * sizeof(short) - case Format_SR44_1_C1_I16: - case Format_SR48_C1_I16: - return 1; // 1 << 1 == 1 * sizeof(short) - case Format_Invalid: - default: - return 0; - } + // sizeof(short) == 2, so frame size == 1 << channels + return Format_channelCount(format); } +enum { + Format_SR_8000, + Format_SR_11025, + Format_SR_16000, + Format_SR_22050, + Format_SR_24000, + Format_SR_32000, + Format_SR_44100, + Format_SR_48000, + Format_SR_Mask = 7 +}; + +enum { + Format_C_1 = 0x08, + Format_C_2 = 0x10, + Format_C_Mask = 0x18 +}; + unsigned Format_sampleRate(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C1_I16: - case Format_SR44_1_C2_I16: + if (format == Format_Invalid) { + return 0; + } + switch (format & Format_SR_Mask) { + case Format_SR_8000: + return 8000; + case Format_SR_11025: + return 11025; + case Format_SR_16000: + return 16000; + case Format_SR_22050: + return 22050; + case Format_SR_24000: + return 24000; + case Format_SR_32000: + return 32000; + case Format_SR_44100: return 44100; - case Format_SR48_C1_I16: - case Format_SR48_C2_I16: + case Format_SR_48000: return 48000; - case Format_Invalid: default: return 0; } @@ -69,14 +80,14 @@ unsigned Format_sampleRate(NBAIO_Format format) unsigned Format_channelCount(NBAIO_Format format) { - switch (format) { - case Format_SR44_1_C1_I16: - case Format_SR48_C1_I16: + if (format == Format_Invalid) { + return 0; + } + switch (format & Format_C_Mask) { + case Format_C_1: return 1; - case Format_SR44_1_C2_I16: - case Format_SR48_C2_I16: + case Format_C_2: return 2; - case Format_Invalid: default: return 0; } @@ -84,11 +95,46 @@ unsigned Format_channelCount(NBAIO_Format format) NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount) { - if (sampleRate == 44100 && channelCount == 2) return Format_SR44_1_C2_I16; - if (sampleRate == 48000 && channelCount == 2) return Format_SR48_C2_I16; - if (sampleRate == 44100 && channelCount == 1) return Format_SR44_1_C1_I16; - if (sampleRate == 48000 && channelCount == 1) return Format_SR48_C1_I16; - return Format_Invalid; + NBAIO_Format format; + switch (sampleRate) { + case 8000: + format = Format_SR_8000; + break; + case 11025: + format = Format_SR_11025; + break; + case 16000: + format = Format_SR_16000; + break; + case 22050: + format = Format_SR_22050; + break; + case 24000: + format = Format_SR_24000; + break; + case 32000: + format = Format_SR_32000; + break; + case 44100: + format = Format_SR_44100; + break; + case 48000: + format = Format_SR_48000; + break; + default: + return Format_Invalid; + } + switch (channelCount) { + case 1: + format |= Format_C_1; + break; + case 2: + format |= Format_C_2; + break; + default: + return Format_Invalid; + } + return format; } // This is a default implementation; it is expected that subclasses will optimize this. diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp new file mode 100644 index 0000000..045bf64 --- /dev/null +++ b/media/libnbaio/NBLog.cpp @@ -0,0 +1,447 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "NBLog" +//#define LOG_NDEBUG 0 + +#include <stdarg.h> +#include <stdint.h> +#include <stdio.h> +#include <string.h> +#include <time.h> +#include <new> +#include <cutils/atomic.h> +#include <media/nbaio/NBLog.h> +#include <utils/Log.h> + +namespace android { + +int NBLog::Entry::readAt(size_t offset) const +{ + // FIXME This is too slow, despite the name it is used during writing + if (offset == 0) + return mEvent; + else if (offset == 1) + return mLength; + else if (offset < (size_t) (mLength + 2)) + return ((char *) mData)[offset - 2]; + else if (offset == (size_t) (mLength + 2)) + return mLength; + else + return 0; +} + +// --------------------------------------------------------------------------- + +#if 0 // FIXME see note in NBLog.h +NBLog::Timeline::Timeline(size_t size, void *shared) + : mSize(roundup(size)), mOwn(shared == NULL), + mShared((Shared *) (mOwn ? new char[sharedSize(size)] : shared)) +{ + new (mShared) Shared; +} + +NBLog::Timeline::~Timeline() +{ + mShared->~Shared(); + if (mOwn) { + delete[] (char *) mShared; + } +} +#endif + +/*static*/ +size_t NBLog::Timeline::sharedSize(size_t size) +{ + return sizeof(Shared) + roundup(size); +} + +// --------------------------------------------------------------------------- + +NBLog::Writer::Writer() + : mSize(0), mShared(NULL), mRear(0), mEnabled(false) +{ +} + +NBLog::Writer::Writer(size_t size, void *shared) + : mSize(roundup(size)), mShared((Shared *) shared), mRear(0), mEnabled(mShared != NULL) +{ +} + +NBLog::Writer::Writer(size_t size, const sp<IMemory>& iMemory) + : mSize(roundup(size)), mShared(iMemory != 0 ? (Shared *) iMemory->pointer() : NULL), + mIMemory(iMemory), mRear(0), mEnabled(mShared != NULL) +{ +} + +void NBLog::Writer::log(const char *string) +{ + if (!mEnabled) { + return; + } + size_t length = strlen(string); + if (length > 255) { + length = 255; + } + log(EVENT_STRING, string, length); +} + +void NBLog::Writer::logf(const char *fmt, ...) +{ + if (!mEnabled) { + return; + } + va_list ap; + va_start(ap, fmt); + Writer::logvf(fmt, ap); // the Writer:: is needed to avoid virtual dispatch for LockedWriter + va_end(ap); +} + +void NBLog::Writer::logvf(const char *fmt, va_list ap) +{ + if (!mEnabled) { + return; + } + char buffer[256]; + int length = vsnprintf(buffer, sizeof(buffer), fmt, ap); + if (length >= (int) sizeof(buffer)) { + length = sizeof(buffer) - 1; + // NUL termination is not required + // buffer[length] = '\0'; + } + if (length >= 0) { + log(EVENT_STRING, buffer, length); + } +} + +void NBLog::Writer::logTimestamp() +{ + if (!mEnabled) { + return; + } + struct timespec ts; + if (!clock_gettime(CLOCK_MONOTONIC, &ts)) { + log(EVENT_TIMESTAMP, &ts, sizeof(struct timespec)); + } +} + +void NBLog::Writer::logTimestamp(const struct timespec& ts) +{ + if (!mEnabled) { + return; + } + log(EVENT_TIMESTAMP, &ts, sizeof(struct timespec)); +} + +void NBLog::Writer::log(Event event, const void *data, size_t length) +{ + if (!mEnabled) { + return; + } + if (data == NULL || length > 255) { + return; + } + switch (event) { + case EVENT_STRING: + case EVENT_TIMESTAMP: + break; + case EVENT_RESERVED: + default: + return; + } + Entry entry(event, data, length); + log(&entry, true /*trusted*/); +} + +void NBLog::Writer::log(const NBLog::Entry *entry, bool trusted) +{ + if (!mEnabled) { + return; + } + if (!trusted) { + log(entry->mEvent, entry->mData, entry->mLength); + return; + } + size_t rear = mRear & (mSize - 1); + size_t written = mSize - rear; // written = number of bytes that have been written so far + size_t need = entry->mLength + 3; // mEvent, mLength, data[length], mLength + // need = number of bytes remaining to write + if (written > need) { + written = need; + } + size_t i; + // FIXME optimize this using memcpy for the data part of the Entry. + // The Entry could have a method copyTo(ptr, offset, size) to optimize the copy. + for (i = 0; i < written; ++i) { + mShared->mBuffer[rear + i] = entry->readAt(i); + } + if (rear + written == mSize && (need -= written) > 0) { + for (i = 0; i < need; ++i) { + mShared->mBuffer[i] = entry->readAt(written + i); + } + written += need; + } + android_atomic_release_store(mRear += written, &mShared->mRear); +} + +bool NBLog::Writer::isEnabled() const +{ + return mEnabled; +} + +bool NBLog::Writer::setEnabled(bool enabled) +{ + bool old = mEnabled; + mEnabled = enabled && mShared != NULL; + return old; +} + +// --------------------------------------------------------------------------- + +NBLog::LockedWriter::LockedWriter() + : Writer() +{ +} + +NBLog::LockedWriter::LockedWriter(size_t size, void *shared) + : Writer(size, shared) +{ +} + +void NBLog::LockedWriter::log(const char *string) +{ + Mutex::Autolock _l(mLock); + Writer::log(string); +} + +void NBLog::LockedWriter::logf(const char *fmt, ...) +{ + // FIXME should not take the lock until after formatting is done + Mutex::Autolock _l(mLock); + va_list ap; + va_start(ap, fmt); + Writer::logvf(fmt, ap); + va_end(ap); +} + +void NBLog::LockedWriter::logvf(const char *fmt, va_list ap) +{ + // FIXME should not take the lock until after formatting is done + Mutex::Autolock _l(mLock); + Writer::logvf(fmt, ap); +} + +void NBLog::LockedWriter::logTimestamp() +{ + // FIXME should not take the lock until after the clock_gettime() syscall + Mutex::Autolock _l(mLock); + Writer::logTimestamp(); +} + +void NBLog::LockedWriter::logTimestamp(const struct timespec& ts) +{ + Mutex::Autolock _l(mLock); + Writer::logTimestamp(ts); +} + +bool NBLog::LockedWriter::isEnabled() const +{ + Mutex::Autolock _l(mLock); + return Writer::isEnabled(); +} + +bool NBLog::LockedWriter::setEnabled(bool enabled) +{ + Mutex::Autolock _l(mLock); + return Writer::setEnabled(enabled); +} + +// --------------------------------------------------------------------------- + +NBLog::Reader::Reader(size_t size, const void *shared) + : mSize(roundup(size)), mShared((const Shared *) shared), mFront(0) +{ +} + +NBLog::Reader::Reader(size_t size, const sp<IMemory>& iMemory) + : mSize(roundup(size)), mShared(iMemory != 0 ? (const Shared *) iMemory->pointer() : NULL), + mIMemory(iMemory), mFront(0) +{ +} + +void NBLog::Reader::dump(int fd, size_t indent) +{ + int32_t rear = android_atomic_acquire_load(&mShared->mRear); + size_t avail = rear - mFront; + if (avail == 0) { + return; + } + size_t lost = 0; + if (avail > mSize) { + lost = avail - mSize; + mFront += lost; + avail = mSize; + } + size_t remaining = avail; // remaining = number of bytes left to read + size_t front = mFront & (mSize - 1); + size_t read = mSize - front; // read = number of bytes that have been read so far + if (read > remaining) { + read = remaining; + } + // make a copy to avoid race condition with writer + uint8_t *copy = new uint8_t[avail]; + // copy first part of circular buffer up until the wraparound point + memcpy(copy, &mShared->mBuffer[front], read); + if (front + read == mSize) { + if ((remaining -= read) > 0) { + // copy second part of circular buffer starting at beginning + memcpy(©[read], mShared->mBuffer, remaining); + read += remaining; + // remaining = 0 but not necessary + } + } + mFront += read; + size_t i = avail; + Event event; + size_t length; + struct timespec ts; + time_t maxSec = -1; + while (i >= 3) { + length = copy[i - 1]; + if (length + 3 > i || copy[i - length - 2] != length) { + break; + } + event = (Event) copy[i - length - 3]; + if (event == EVENT_TIMESTAMP) { + if (length != sizeof(struct timespec)) { + // corrupt + break; + } + memcpy(&ts, ©[i - length - 1], sizeof(struct timespec)); + if (ts.tv_sec > maxSec) { + maxSec = ts.tv_sec; + } + } + i -= length + 3; + } + if (i > 0) { + lost += i; + if (fd >= 0) { + fdprintf(fd, "%*swarning: lost %u bytes worth of events\n", indent, "", lost); + } else { + ALOGI("%*swarning: lost %u bytes worth of events\n", indent, "", lost); + } + } + size_t width = 1; + while (maxSec >= 10) { + ++width; + maxSec /= 10; + } + char prefix[32]; + if (maxSec >= 0) { + snprintf(prefix, sizeof(prefix), "[%*s] ", width + 4, ""); + } else { + prefix[0] = '\0'; + } + while (i < avail) { + event = (Event) copy[i]; + length = copy[i + 1]; + const void *data = ©[i + 2]; + size_t advance = length + 3; + switch (event) { + case EVENT_STRING: + if (fd >= 0) { + fdprintf(fd, "%*s%s%.*s\n", indent, "", prefix, length, (const char *) data); + } else { + ALOGI("%*s%s%.*s", indent, "", prefix, length, (const char *) data); + } break; + case EVENT_TIMESTAMP: { + // already checked that length == sizeof(struct timespec); + memcpy(&ts, data, sizeof(struct timespec)); + long prevNsec = ts.tv_nsec; + long deltaMin = LONG_MAX; + long deltaMax = -1; + long deltaTotal = 0; + size_t j = i; + for (;;) { + j += sizeof(struct timespec) + 3; + if (j >= avail || (Event) copy[j] != EVENT_TIMESTAMP) { + break; + } + struct timespec tsNext; + memcpy(&tsNext, ©[j + 2], sizeof(struct timespec)); + if (tsNext.tv_sec != ts.tv_sec) { + break; + } + long delta = tsNext.tv_nsec - prevNsec; + if (delta < 0) { + break; + } + if (delta < deltaMin) { + deltaMin = delta; + } + if (delta > deltaMax) { + deltaMax = delta; + } + deltaTotal += delta; + prevNsec = tsNext.tv_nsec; + } + size_t n = (j - i) / (sizeof(struct timespec) + 3); + if (n >= kSquashTimestamp) { + if (fd >= 0) { + fdprintf(fd, "%*s[%d.%03d to .%.03d by .%.03d to .%.03d]\n", indent, "", + (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000), + (int) ((ts.tv_nsec + deltaTotal) / 1000000), + (int) (deltaMin / 1000000), (int) (deltaMax / 1000000)); + } else { + ALOGI("%*s[%d.%03d to .%.03d by .%.03d to .%.03d]\n", indent, "", + (int) ts.tv_sec, (int) (ts.tv_nsec / 1000000), + (int) ((ts.tv_nsec + deltaTotal) / 1000000), + (int) (deltaMin / 1000000), (int) (deltaMax / 1000000)); + } + i = j; + advance = 0; + break; + } + if (fd >= 0) { + fdprintf(fd, "%*s[%d.%03d]\n", indent, "", (int) ts.tv_sec, + (int) (ts.tv_nsec / 1000000)); + } else { + ALOGI("%*s[%d.%03d]", indent, "", (int) ts.tv_sec, + (int) (ts.tv_nsec / 1000000)); + } + } break; + case EVENT_RESERVED: + default: + if (fd >= 0) { + fdprintf(fd, "%*s%swarning: unknown event %d\n", indent, "", prefix, event); + } else { + ALOGI("%*s%swarning: unknown event %d", indent, "", prefix, event); + } + break; + } + i += advance; + } + // FIXME it would be more efficient to put a char mCopy[256] as a member variable of the dumper + delete[] copy; +} + +bool NBLog::Reader::isIMemory(const sp<IMemory>& iMemory) const +{ + return iMemory.get() == mIMemory.get(); +} + +} // namespace android diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index a01d03f..cf41cf2 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -26,6 +26,7 @@ #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/BufferProducerWrapper.h> #include <media/stagefright/MediaCodecList.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/NativeWindowWrapper.h> @@ -165,6 +166,24 @@ private: //////////////////////////////////////////////////////////////////////////////// +struct ACodec::DeathNotifier : public IBinder::DeathRecipient { + DeathNotifier(const sp<AMessage> ¬ify) + : mNotify(notify) { + } + + virtual void binderDied(const wp<IBinder> &) { + mNotify->post(); + } + +protected: + virtual ~DeathNotifier() {} + +private: + sp<AMessage> mNotify; + + DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); +}; + struct ACodec::UninitializedState : public ACodec::BaseState { UninitializedState(ACodec *codec); @@ -176,6 +195,8 @@ private: void onSetup(const sp<AMessage> &msg); bool onAllocateComponent(const sp<AMessage> &msg); + sp<DeathNotifier> mDeathNotifier; + DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); }; @@ -192,6 +213,7 @@ private: friend struct ACodec::UninitializedState; bool onConfigureComponent(const sp<AMessage> &msg); + void onCreateInputSurface(const sp<AMessage> &msg); void onStart(); void onShutdown(bool keepComponentAllocated); @@ -374,6 +396,12 @@ void ACodec::initiateSetup(const sp<AMessage> &msg) { msg->post(); } +void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { + sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); + msg->setMessage("params", params); + msg->post(); +} + void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { msg->setWhat(kWhatAllocateComponent); msg->setTarget(id()); @@ -386,6 +414,14 @@ void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { msg->post(); } +void ACodec::initiateCreateInputSurface() { + (new AMessage(kWhatCreateInputSurface, id()))->post(); +} + +void ACodec::signalEndOfInputStream() { + (new AMessage(kWhatSignalEndOfInputStream, id()))->post(); +} + void ACodec::initiateStart() { (new AMessage(kWhatStart, id()))->post(); } @@ -612,7 +648,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); BufferInfo info; info.mStatus = BufferInfo::OWNED_BY_US; - info.mData = new ABuffer(0); + info.mData = new ABuffer(NULL /* data */, def.nBufferSize /* capacity */); info.mGraphicBuffer = graphicBuffer; mBuffers[kPortIndexOutput].push(info); @@ -712,12 +748,10 @@ status_t ACodec::freeOutputBuffersNotOwnedByComponent() { BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); - if (info->mStatus != - BufferInfo::OWNED_BY_COMPONENT) { - // We shouldn't have sent out any buffers to the client at this - // point. - CHECK_NE((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); - + // At this time some buffers may still be with the component + // or being drained. + if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && + info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); } } @@ -803,6 +837,8 @@ status_t ACodec::setComponentRole( "audio_decoder.raw", "audio_encoder.raw" }, { MEDIA_MIMETYPE_AUDIO_FLAC, "audio_decoder.flac", "audio_encoder.flac" }, + { MEDIA_MIMETYPE_AUDIO_MSGSM, + "audio_decoder.gsm", "audio_encoder.gsm" }, }; static const size_t kNumMimeToRole = @@ -922,6 +958,19 @@ status_t ACodec::configureCodec( err = setupVideoDecoder(mime, width, height); } } + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { + int32_t numChannels, sampleRate; + if (!msg->findInt32("channel-count", &numChannels) + || !msg->findInt32("sample-rate", &sampleRate)) { + // Since we did not always check for these, leave them optional + // and have the decoder figure it all out. + err = OK; + } else { + err = setupRawAudioFormat( + encoder ? kPortIndexInput : kPortIndexOutput, + sampleRate, + numChannels); + } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { int32_t numChannels, sampleRate; if (!msg->findInt32("channel-count", &numChannels) @@ -964,17 +1013,23 @@ status_t ACodec::configureCodec( err = INVALID_OPERATION; } else { if (encoder) { - if (!msg->findInt32("flac-compression-level", &compressionLevel)) { + if (!msg->findInt32( + "flac-compression-level", &compressionLevel)) { compressionLevel = 5;// default FLAC compression level } else if (compressionLevel < 0) { - ALOGW("compression level %d outside [0..8] range, using 0", compressionLevel); + ALOGW("compression level %d outside [0..8] range, " + "using 0", + compressionLevel); compressionLevel = 0; } else if (compressionLevel > 8) { - ALOGW("compression level %d outside [0..8] range, using 8", compressionLevel); + ALOGW("compression level %d outside [0..8] range, " + "using 8", + compressionLevel); compressionLevel = 8; } } - err = setupFlacCodec(encoder, numChannels, sampleRate, compressionLevel); + err = setupFlacCodec( + encoder, numChannels, sampleRate, compressionLevel); } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { int32_t numChannels, sampleRate; @@ -1408,36 +1463,52 @@ status_t ACodec::setSupportedOutputFormat() { CHECK_EQ(err, (status_t)OK); CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); - CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar - || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar - || format.eColorFormat == OMX_COLOR_FormatCbYCrY - || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka); - return mOMX->setParameter( mNode, OMX_IndexParamVideoPortFormat, &format, sizeof(format)); } +static const struct VideoCodingMapEntry { + const char *mMime; + OMX_VIDEO_CODINGTYPE mVideoCodingType; +} kVideoCodingMapEntry[] = { + { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, + { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, + { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, + { MEDIA_MIMETYPE_VIDEO_VPX, OMX_VIDEO_CodingVPX }, +}; + static status_t GetVideoCodingTypeFromMime( const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { - if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { - *codingType = OMX_VIDEO_CodingAVC; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { - *codingType = OMX_VIDEO_CodingMPEG4; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { - *codingType = OMX_VIDEO_CodingH263; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) { - *codingType = OMX_VIDEO_CodingMPEG2; - } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) { - *codingType = OMX_VIDEO_CodingVPX; - } else { - *codingType = OMX_VIDEO_CodingUnused; - return ERROR_UNSUPPORTED; + for (size_t i = 0; + i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); + ++i) { + if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { + *codingType = kVideoCodingMapEntry[i].mVideoCodingType; + return OK; + } } - return OK; + *codingType = OMX_VIDEO_CodingUnused; + + return ERROR_UNSUPPORTED; +} + +static status_t GetMimeTypeForVideoCoding( + OMX_VIDEO_CODINGTYPE codingType, AString *mime) { + for (size_t i = 0; + i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); + ++i) { + if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { + *mime = kVideoCodingMapEntry[i].mMime; + return OK; + } + } + + mime->clear(); + + return ERROR_UNSUPPORTED; } status_t ACodec::setupVideoDecoder( @@ -2085,6 +2156,42 @@ size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { return n; } +size_t ACodec::countBuffersOwnedByNativeWindow() const { + size_t n = 0; + + for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { + const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); + + if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { + ++n; + } + } + + return n; +} + +void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { + if (mNativeWindow == NULL) { + return; + } + + int minUndequeuedBufs = 0; + status_t err = mNativeWindow->query( + mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, + &minUndequeuedBufs); + + if (err != OK) { + ALOGE("[%s] NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", + mComponentName.c_str(), strerror(-err), -err); + + minUndequeuedBufs = 0; + } + + while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs + && dequeueBufferFromNativeWindow() != NULL) { + } +} + bool ACodec::allYourBuffersAreBelongToUs( OMX_U32 portIndex) { for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { @@ -2122,7 +2229,7 @@ void ACodec::processDeferredMessages() { } } -void ACodec::sendFormatChange() { +void ACodec::sendFormatChange(const sp<AMessage> &reply) { sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatOutputFormatChanged); @@ -2141,49 +2248,59 @@ void ACodec::sendFormatChange() { { OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; - notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); + AString mime; + if (!mIsEncoder) { + notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); + } else if (GetMimeTypeForVideoCoding( + videoDef->eCompressionFormat, &mime) != OK) { + notify->setString("mime", "application/octet-stream"); + } else { + notify->setString("mime", mime.c_str()); + } + notify->setInt32("width", videoDef->nFrameWidth); notify->setInt32("height", videoDef->nFrameHeight); - notify->setInt32("stride", videoDef->nStride); - notify->setInt32("slice-height", videoDef->nSliceHeight); - notify->setInt32("color-format", videoDef->eColorFormat); - - OMX_CONFIG_RECTTYPE rect; - InitOMXParams(&rect); - rect.nPortIndex = kPortIndexOutput; - - if (mOMX->getConfig( - mNode, OMX_IndexConfigCommonOutputCrop, - &rect, sizeof(rect)) != OK) { - rect.nLeft = 0; - rect.nTop = 0; - rect.nWidth = videoDef->nFrameWidth; - rect.nHeight = videoDef->nFrameHeight; - } - CHECK_GE(rect.nLeft, 0); - CHECK_GE(rect.nTop, 0); - CHECK_GE(rect.nWidth, 0u); - CHECK_GE(rect.nHeight, 0u); - CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); - CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); - - notify->setRect( - "crop", - rect.nLeft, - rect.nTop, - rect.nLeft + rect.nWidth - 1, - rect.nTop + rect.nHeight - 1); - - if (mNativeWindow != NULL) { - android_native_rect_t crop; - crop.left = rect.nLeft; - crop.top = rect.nTop; - crop.right = rect.nLeft + rect.nWidth; - crop.bottom = rect.nTop + rect.nHeight; - - CHECK_EQ(0, native_window_set_crop( - mNativeWindow.get(), &crop)); + if (!mIsEncoder) { + notify->setInt32("stride", videoDef->nStride); + notify->setInt32("slice-height", videoDef->nSliceHeight); + notify->setInt32("color-format", videoDef->eColorFormat); + + OMX_CONFIG_RECTTYPE rect; + InitOMXParams(&rect); + rect.nPortIndex = kPortIndexOutput; + + if (mOMX->getConfig( + mNode, OMX_IndexConfigCommonOutputCrop, + &rect, sizeof(rect)) != OK) { + rect.nLeft = 0; + rect.nTop = 0; + rect.nWidth = videoDef->nFrameWidth; + rect.nHeight = videoDef->nFrameHeight; + } + + CHECK_GE(rect.nLeft, 0); + CHECK_GE(rect.nTop, 0); + CHECK_GE(rect.nWidth, 0u); + CHECK_GE(rect.nHeight, 0u); + CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); + CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); + + notify->setRect( + "crop", + rect.nLeft, + rect.nTop, + rect.nLeft + rect.nWidth - 1, + rect.nTop + rect.nHeight - 1); + + if (mNativeWindow != NULL) { + reply->setRect( + "crop", + rect.nLeft, + rect.nTop, + rect.nLeft + rect.nWidth, + rect.nTop + rect.nHeight); + } } break; } @@ -2191,41 +2308,108 @@ void ACodec::sendFormatChange() { case OMX_PortDomainAudio: { OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; - CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM); - OMX_AUDIO_PARAM_PCMMODETYPE params; - InitOMXParams(¶ms); - params.nPortIndex = kPortIndexOutput; + switch (audioDef->eEncoding) { + case OMX_AUDIO_CodingPCM: + { + OMX_AUDIO_PARAM_PCMMODETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; - CHECK_EQ(mOMX->getParameter( - mNode, OMX_IndexParamAudioPcm, - ¶ms, sizeof(params)), - (status_t)OK); + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioPcm, + ¶ms, sizeof(params)), + (status_t)OK); + + CHECK(params.nChannels == 1 || params.bInterleaved); + CHECK_EQ(params.nBitPerSample, 16u); + CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); + CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSamplingRate); + if (mEncoderDelay + mEncoderPadding) { + size_t frameSize = params.nChannels * sizeof(int16_t); + if (mSkipCutBuffer != NULL) { + size_t prevbufsize = mSkipCutBuffer->size(); + if (prevbufsize != 0) { + ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize); + } + } + mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize, + mEncoderPadding * frameSize); + } - CHECK(params.nChannels == 1 || params.bInterleaved); - CHECK_EQ(params.nBitPerSample, 16u); - CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); - CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); - - notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); - notify->setInt32("channel-count", params.nChannels); - notify->setInt32("sample-rate", params.nSamplingRate); - if (mEncoderDelay + mEncoderPadding) { - size_t frameSize = params.nChannels * sizeof(int16_t); - if (mSkipCutBuffer != NULL) { - size_t prevbufsize = mSkipCutBuffer->size(); - if (prevbufsize != 0) { - ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize); + if (mChannelMaskPresent) { + notify->setInt32("channel-mask", mChannelMask); } + break; } - mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize, - mEncoderPadding * frameSize); - } - if (mChannelMaskPresent) { - notify->setInt32("channel-mask", mChannelMask); - } + case OMX_AUDIO_CodingAAC: + { + OMX_AUDIO_PARAM_AACPROFILETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioAac, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSampleRate); + break; + } + + case OMX_AUDIO_CodingAMR: + { + OMX_AUDIO_PARAM_AMRTYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioAmr, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setInt32("channel-count", 1); + if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { + notify->setString( + "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); + + notify->setInt32("sample-rate", 16000); + } else { + notify->setString( + "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); + + notify->setInt32("sample-rate", 8000); + } + break; + } + + case OMX_AUDIO_CodingFLAC: + { + OMX_AUDIO_PARAM_FLACTYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioFlac, + ¶ms, sizeof(params)), + (status_t)OK); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSampleRate); + break; + } + + default: + TRESPASS(); + } break; } @@ -2454,6 +2638,21 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { return onOMXMessage(msg); } + case ACodec::kWhatCreateInputSurface: + case ACodec::kWhatSignalEndOfInputStream: + { + ALOGE("Message 0x%x was not handled", msg->what()); + mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); + return true; + } + + case ACodec::kWhatOMXDied: + { + ALOGE("OMX/mediaserver died, signalling error!"); + mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); + break; + } + default: return false; } @@ -2856,19 +3055,21 @@ bool ACodec::BaseState::onOMXFillBufferDone( break; } - if (!mCodec->mIsEncoder && !mCodec->mSentFormat) { - mCodec->sendFormatChange(); - } + sp<AMessage> reply = + new AMessage(kWhatOutputBufferDrained, mCodec->id()); - if (mCodec->mNativeWindow == NULL) { - info->mData->setRange(rangeOffset, rangeLength); + if (!mCodec->mSentFormat) { + mCodec->sendFormatChange(reply); + } + info->mData->setRange(rangeOffset, rangeLength); #if 0 + if (mCodec->mNativeWindow == NULL) { if (IsIDR(info->mData)) { ALOGI("IDR frame"); } -#endif } +#endif if (mCodec->mSkipCutBuffer != NULL) { mCodec->mSkipCutBuffer->submit(info->mData); @@ -2881,9 +3082,6 @@ bool ACodec::BaseState::onOMXFillBufferDone( notify->setBuffer("buffer", info->mData); notify->setInt32("flags", flags); - sp<AMessage> reply = - new AMessage(kWhatOutputBufferDrained, mCodec->id()); - reply->setPointer("buffer-id", info->mBufferID); notify->setMessage("reply", reply); @@ -2927,9 +3125,17 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); + android_native_rect_t crop; + if (msg->findRect("crop", + &crop.left, &crop.top, &crop.right, &crop.bottom)) { + CHECK_EQ(0, native_window_set_crop( + mCodec->mNativeWindow.get(), &crop)); + } + int32_t render; if (mCodec->mNativeWindow != NULL - && msg->findInt32("render", &render) && render != 0) { + && msg->findInt32("render", &render) && render != 0 + && (info->mData == NULL || info->mData->size() != 0)) { // The client wants this buffer to be rendered. status_t err; @@ -3003,6 +3209,18 @@ ACodec::UninitializedState::UninitializedState(ACodec *codec) void ACodec::UninitializedState::stateEntered() { ALOGV("Now uninitialized"); + + if (mDeathNotifier != NULL) { + mCodec->mOMX->asBinder()->unlinkToDeath(mDeathNotifier); + mDeathNotifier.clear(); + } + + mCodec->mNativeWindow.clear(); + mCodec->mNode = NULL; + mCodec->mOMX.clear(); + mCodec->mQuirks = 0; + mCodec->mFlags = 0; + mCodec->mComponentName.clear(); } bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { @@ -3074,6 +3292,15 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { sp<IOMX> omx = client.interface(); + sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id()); + + mDeathNotifier = new DeathNotifier(notify); + if (omx->asBinder()->linkToDeath(mDeathNotifier) != OK) { + // This was a local binder, if it dies so do we, we won't care + // about any notifications in the afterlife. + mDeathNotifier.clear(); + } + Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs; AString mime; @@ -3138,7 +3365,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { return false; } - sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id()); + notify = new AMessage(kWhatOMXMessage, mCodec->id()); observer->setNotificationMessage(notify); mCodec->mComponentName = componentName; @@ -3152,11 +3379,6 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { mCodec->mOMX = omx; mCodec->mNode = node; - mCodec->mPortEOS[kPortIndexInput] = - mCodec->mPortEOS[kPortIndexOutput] = false; - - mCodec->mInputEOSResult = OK; - { sp<AMessage> notify = mCodec->mNotify->dup(); notify->setInt32("what", ACodec::kWhatComponentAllocated); @@ -3178,6 +3400,11 @@ ACodec::LoadedState::LoadedState(ACodec *codec) void ACodec::LoadedState::stateEntered() { ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); + mCodec->mPortEOS[kPortIndexInput] = + mCodec->mPortEOS[kPortIndexOutput] = false; + + mCodec->mInputEOSResult = OK; + if (mCodec->mShutdownInProgress) { bool keepComponentAllocated = mCodec->mKeepComponentAllocated; @@ -3192,13 +3419,6 @@ void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { if (!keepComponentAllocated) { CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); - mCodec->mNativeWindow.clear(); - mCodec->mNode = NULL; - mCodec->mOMX.clear(); - mCodec->mQuirks = 0; - mCodec->mFlags = 0; - mCodec->mComponentName.clear(); - mCodec->changeState(mCodec->mUninitializedState); } @@ -3218,6 +3438,13 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { break; } + case ACodec::kWhatCreateInputSurface: + { + onCreateInputSurface(msg); + handled = true; + break; + } + case ACodec::kWhatStart: { onStart(); @@ -3296,6 +3523,32 @@ bool ACodec::LoadedState::onConfigureComponent( return true; } +void ACodec::LoadedState::onCreateInputSurface( + const sp<AMessage> &msg) { + ALOGV("onCreateInputSurface"); + + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatInputSurfaceCreated); + + sp<IGraphicBufferProducer> bufferProducer; + status_t err; + + err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, + &bufferProducer); + if (err == OK) { + notify->setObject("input-surface", + new BufferProducerWrapper(bufferProducer)); + } else { + // Can't use mCodec->signalError() here -- MediaCodec won't forward + // the error through because it's in the "configured" state. We + // send a kWhatInputSurfaceCreated with an error value instead. + ALOGE("[%s] onCreateInputSurface returning error %d", + mCodec->mComponentName.c_str(), err); + notify->setInt32("err", err); + } + notify->post(); +} + void ACodec::LoadedState::onStart() { ALOGV("onStart"); @@ -3345,6 +3598,27 @@ bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { return true; } + case kWhatSignalEndOfInputStream: + { + mCodec->onSignalEndOfInputStream(); + return true; + } + + case kWhatResume: + { + // We'll be active soon enough. + return true; + } + + case kWhatFlush: + { + // We haven't even started yet, so we're flushed alright... + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); + return true; + } + default: return BaseState::onMessageReceived(msg); } @@ -3390,6 +3664,28 @@ bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { return true; } + case kWhatResume: + { + // We'll be active soon enough. + return true; + } + + case kWhatFlush: + { + // We haven't even started yet, so we're flushed alright... + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); + + return true; + } + + case kWhatSignalEndOfInputStream: + { + mCodec->onSignalEndOfInputStream(); + return true; + } + default: return BaseState::onMessageReceived(msg); } @@ -3518,7 +3814,6 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { (status_t)OK); mCodec->changeState(mCodec->mFlushingState); - handled = true; break; } @@ -3542,6 +3837,30 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatSetParameters: + { + sp<AMessage> params; + CHECK(msg->findMessage("params", ¶ms)); + + status_t err = mCodec->setParameters(params); + + sp<AMessage> reply; + if (msg->findMessage("reply", &reply)) { + reply->setInt32("err", err); + reply->post(); + } + + handled = true; + break; + } + + case ACodec::kWhatSignalEndOfInputStream: + { + mCodec->onSignalEndOfInputStream(); + handled = true; + break; + } + default: handled = BaseState::onMessageReceived(msg); break; @@ -3550,6 +3869,42 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { return handled; } +status_t ACodec::setParameters(const sp<AMessage> ¶ms) { + int32_t videoBitrate; + if (params->findInt32("videoBitrate", &videoBitrate)) { + OMX_VIDEO_CONFIG_BITRATETYPE configParams; + InitOMXParams(&configParams); + configParams.nPortIndex = kPortIndexOutput; + configParams.nEncodeBitrate = videoBitrate; + + status_t err = mOMX->setConfig( + mNode, + OMX_IndexConfigVideoBitrate, + &configParams, + sizeof(configParams)); + + if (err != OK) { + ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", + videoBitrate, err); + + return err; + } + } + + return OK; +} + +void ACodec::onSignalEndOfInputStream() { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", ACodec::kWhatSignaledInputEOS); + + status_t err = mOMX->signalEndOfInputStream(mNode); + if (err != OK) { + notify->setInt32("err", err); + } + notify->post(); +} + bool ACodec::ExecutingState::onOMXEvent( OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { switch (event) { @@ -3964,6 +4319,10 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput] && mCodec->allYourBuffersAreBelongToUs()) { + // We now own all buffers except possibly those still queued with + // the native window for rendering. Let's get those back as well. + mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); + sp<AMessage> notify = mCodec->mNotify->dup(); notify->setInt32("what", ACodec::kWhatFlushCompleted); notify->post(); @@ -3973,6 +4332,10 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { mCodec->mInputEOSResult = OK; + if (mCodec->mSkipCutBuffer != NULL) { + mCodec->mSkipCutBuffer->clear(); + } + mCodec->changeState(mCodec->mExecutingState); } } diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index cc0581e..acc3abf 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -19,19 +19,20 @@ LOCAL_SRC_FILES:= \ ESDS.cpp \ FileSource.cpp \ FLACExtractor.cpp \ - FragmentedMP4Extractor.cpp \ HTTPBase.cpp \ JPEGSource.cpp \ MP3Extractor.cpp \ MPEG2TSWriter.cpp \ MPEG4Extractor.cpp \ MPEG4Writer.cpp \ + MediaAdapter.cpp \ MediaBuffer.cpp \ MediaBufferGroup.cpp \ MediaCodec.cpp \ MediaCodecList.cpp \ MediaDefs.cpp \ MediaExtractor.cpp \ + MediaMuxer.cpp \ MediaSource.cpp \ MetaData.cpp \ NuCachedSource2.cpp \ @@ -78,7 +79,6 @@ LOCAL_SHARED_LIBRARIES := \ libicuuc \ liblog \ libmedia \ - libmedia_native \ libsonivox \ libssl \ libstagefright_omx \ diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp index 861aebe..3cf4d5c 100644 --- a/media/libstagefright/AudioSource.cpp +++ b/media/libstagefright/AudioSource.cpp @@ -58,7 +58,7 @@ AudioSource::AudioSource( ALOGV("sampleRate: %d, channelCount: %d", sampleRate, channelCount); CHECK(channelCount == 1 || channelCount == 2); - int minFrameCount; + size_t minFrameCount; status_t status = AudioRecord::getMinFrameCount(&minFrameCount, sampleRate, AUDIO_FORMAT_PCM_16_BIT, diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 1e2625a..f12f4d4 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -48,8 +48,8 @@ #include <media/stagefright/MetaData.h> #include <media/stagefright/OMXCodec.h> -#include <gui/ISurfaceTexture.h> -#include <gui/SurfaceTextureClient.h> +#include <gui/IGraphicBufferProducer.h> +#include <gui/Surface.h> #include <media/stagefright/foundation/AMessage.h> @@ -1103,8 +1103,7 @@ void AwesomePlayer::initRenderer_l() { setVideoScalingMode_l(mVideoScalingMode); if (USE_SURFACE_ALLOC && !strncmp(component, "OMX.", 4) - && strncmp(component, "OMX.google.", 11) - && strcmp(component, "OMX.Nvidia.mpeg2v.decode")) { + && strncmp(component, "OMX.google.", 11)) { // Hardware decoders avoid the CPU color conversion by decoding // directly to ANativeBuffers, so we must use a renderer that // just pushes those buffers to the ANativeWindow. @@ -1178,12 +1177,12 @@ bool AwesomePlayer::isPlaying() const { return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); } -status_t AwesomePlayer::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) { +status_t AwesomePlayer::setSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer) { Mutex::Autolock autoLock(mLock); status_t err; - if (surfaceTexture != NULL) { - err = setNativeWindow_l(new SurfaceTextureClient(surfaceTexture)); + if (bufferProducer != NULL) { + err = setNativeWindow_l(new Surface(bufferProducer)); } else { err = setNativeWindow_l(NULL); } @@ -2511,6 +2510,7 @@ status_t AwesomePlayer::setVideoScalingMode_l(int32_t mode) { if (err != OK) { ALOGW("Failed to set scaling mode: %d", err); } + return err; } return OK; } diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index efd7af7..5a26b06 100755..100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -121,13 +121,14 @@ static int32_t getColorFormat(const char* colorFormat) { CHECK(!"Unknown color format"); } -CameraSource *CameraSource::Create() { +CameraSource *CameraSource::Create(const String16 &clientName) { Size size; size.width = -1; size.height = -1; sp<ICamera> camera; - return new CameraSource(camera, NULL, 0, size, -1, NULL, false); + return new CameraSource(camera, NULL, 0, clientName, -1, + size, -1, NULL, false); } // static @@ -135,14 +136,16 @@ CameraSource *CameraSource::CreateFromCamera( const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, - const sp<Surface>& surface, + const sp<IGraphicBufferProducer>& surface, bool storeMetaDataInVideoBuffers) { CameraSource *source = new CameraSource(camera, proxy, cameraId, - videoSize, frameRate, surface, - storeMetaDataInVideoBuffers); + clientName, clientUid, videoSize, frameRate, surface, + storeMetaDataInVideoBuffers); return source; } @@ -150,9 +153,11 @@ CameraSource::CameraSource( const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, - const sp<Surface>& surface, + const sp<IGraphicBufferProducer>& surface, bool storeMetaDataInVideoBuffers) : mCameraFlags(0), mNumInputBuffers(0), @@ -173,6 +178,7 @@ CameraSource::CameraSource( mVideoSize.height = -1; mInitCheck = init(camera, proxy, cameraId, + clientName, clientUid, videoSize, frameRate, storeMetaDataInVideoBuffers); if (mInitCheck != OK) releaseCamera(); @@ -184,10 +190,10 @@ status_t CameraSource::initCheck() const { status_t CameraSource::isCameraAvailable( const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, - int32_t cameraId) { + int32_t cameraId, const String16& clientName, uid_t clientUid) { if (camera == 0) { - mCamera = Camera::connect(cameraId); + mCamera = Camera::connect(cameraId, clientName, clientUid); if (mCamera == 0) return -EBUSY; mCameraFlags &= ~FLAGS_HOT_CAMERA; } else { @@ -469,6 +475,8 @@ status_t CameraSource::init( const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers) { @@ -476,7 +484,7 @@ status_t CameraSource::init( ALOGV("init"); status_t err = OK; int64_t token = IPCThreadState::self()->clearCallingIdentity(); - err = initWithCameraAccess(camera, proxy, cameraId, + err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, videoSize, frameRate, storeMetaDataInVideoBuffers); IPCThreadState::self()->restoreCallingIdentity(token); @@ -487,13 +495,16 @@ status_t CameraSource::initWithCameraAccess( const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t frameRate, bool storeMetaDataInVideoBuffers) { ALOGV("initWithCameraAccess"); status_t err = OK; - if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) { + if ((err = isCameraAvailable(camera, proxy, cameraId, + clientName, clientUid)) != OK) { ALOGE("Camera connection could not be established."); return err; } @@ -525,7 +536,7 @@ status_t CameraSource::initWithCameraAccess( if (mSurface != NULL) { // This CHECK is good, since we just passed the lock/unlock // check earlier by calling mCamera->setParameters(). - CHECK_EQ((status_t)OK, mCamera->setPreviewDisplay(mSurface)); + CHECK_EQ((status_t)OK, mCamera->setPreviewTexture(mSurface)); } // By default, do not store metadata in video buffers diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp index 26ce7ae..20214e8 100644 --- a/media/libstagefright/CameraSourceTimeLapse.cpp +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -36,13 +36,16 @@ CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( const sp<ICamera> &camera, const sp<ICameraRecordingProxy> &proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t videoFrameRate, - const sp<Surface>& surface, + const sp<IGraphicBufferProducer>& surface, int64_t timeBetweenFrameCaptureUs) { CameraSourceTimeLapse *source = new CameraSourceTimeLapse(camera, proxy, cameraId, + clientName, clientUid, videoSize, videoFrameRate, surface, timeBetweenFrameCaptureUs); @@ -59,11 +62,14 @@ CameraSourceTimeLapse::CameraSourceTimeLapse( const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy, int32_t cameraId, + const String16& clientName, + uid_t clientUid, Size videoSize, int32_t videoFrameRate, - const sp<Surface>& surface, + const sp<IGraphicBufferProducer>& surface, int64_t timeBetweenFrameCaptureUs) - : CameraSource(camera, proxy, cameraId, videoSize, videoFrameRate, surface, true), + : CameraSource(camera, proxy, cameraId, clientName, clientUid, + videoSize, videoFrameRate, surface, true), mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), mLastTimeLapseFrameRealTimestampUs(0), mSkipCurrentFrame(false) { diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index 9d0eea2..fc6fd9c 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -23,7 +23,6 @@ #include "include/AACExtractor.h" #include "include/DRMExtractor.h" #include "include/FLACExtractor.h" -#include "include/FragmentedMP4Extractor.h" #include "include/HTTPBase.h" #include "include/MP3Extractor.h" #include "include/MPEG2PSExtractor.h" @@ -59,6 +58,45 @@ bool DataSource::getUInt16(off64_t offset, uint16_t *x) { return true; } +bool DataSource::getUInt24(off64_t offset, uint32_t *x) { + *x = 0; + + uint8_t byte[3]; + if (readAt(offset, byte, 3) != 3) { + return false; + } + + *x = (byte[0] << 16) | (byte[1] << 8) | byte[2]; + + return true; +} + +bool DataSource::getUInt32(off64_t offset, uint32_t *x) { + *x = 0; + + uint32_t tmp; + if (readAt(offset, &tmp, 4) != 4) { + return false; + } + + *x = ntohl(tmp); + + return true; +} + +bool DataSource::getUInt64(off64_t offset, uint64_t *x) { + *x = 0; + + uint64_t tmp; + if (readAt(offset, &tmp, 8) != 8) { + return false; + } + + *x = ntoh64(tmp); + + return true; +} + status_t DataSource::getSize(off64_t *size) { *size = 0; @@ -111,7 +149,6 @@ void DataSource::RegisterSniffer(SnifferFunc func) { // static void DataSource::RegisterDefaultSniffers() { RegisterSniffer(SniffMPEG4); - RegisterSniffer(SniffFragmentedMP4); RegisterSniffer(SniffMatroska); RegisterSniffer(SniffOgg); RegisterSniffer(SniffWAV); diff --git a/media/libstagefright/FragmentedMP4Extractor.cpp b/media/libstagefright/FragmentedMP4Extractor.cpp deleted file mode 100644 index 82712ef..0000000 --- a/media/libstagefright/FragmentedMP4Extractor.cpp +++ /dev/null @@ -1,460 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "FragmentedMP4Extractor" -#include <utils/Log.h> - -#include "include/FragmentedMP4Extractor.h" -#include "include/SampleTable.h" -#include "include/ESDS.h" - -#include <arpa/inet.h> - -#include <ctype.h> -#include <stdint.h> -#include <stdlib.h> -#include <string.h> - -#include <cutils/properties.h> // for property_get - -#include <media/stagefright/foundation/ABitReader.h> -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/DataSource.h> -#include <media/stagefright/MediaBuffer.h> -#include <media/stagefright/MediaBufferGroup.h> -#include <media/stagefright/MediaDefs.h> -#include <media/stagefright/MediaSource.h> -#include <media/stagefright/MetaData.h> -#include <media/stagefright/Utils.h> -#include <utils/String8.h> - -namespace android { - -class FragmentedMPEG4Source : public MediaSource { -public: - // Caller retains ownership of the Parser - FragmentedMPEG4Source(bool audio, - const sp<MetaData> &format, - const sp<FragmentedMP4Parser> &parser, - const sp<FragmentedMP4Extractor> &extractor); - - virtual status_t start(MetaData *params = NULL); - virtual status_t stop(); - - virtual sp<MetaData> getFormat(); - - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); - -protected: - virtual ~FragmentedMPEG4Source(); - -private: - Mutex mLock; - - sp<MetaData> mFormat; - sp<FragmentedMP4Parser> mParser; - sp<FragmentedMP4Extractor> mExtractor; - bool mIsAudioTrack; - uint32_t mCurrentSampleIndex; - - bool mIsAVC; - size_t mNALLengthSize; - - bool mStarted; - - MediaBufferGroup *mGroup; - - bool mWantsNALFragments; - - uint8_t *mSrcBuffer; - - FragmentedMPEG4Source(const FragmentedMPEG4Source &); - FragmentedMPEG4Source &operator=(const FragmentedMPEG4Source &); -}; - - -FragmentedMP4Extractor::FragmentedMP4Extractor(const sp<DataSource> &source) - : mLooper(new ALooper), - mParser(new FragmentedMP4Parser()), - mDataSource(source), - mInitCheck(NO_INIT), - mFileMetaData(new MetaData) { - ALOGV("FragmentedMP4Extractor"); - mLooper->registerHandler(mParser); - mLooper->start(false /* runOnCallingThread */); - mParser->start(mDataSource); - - bool hasVideo = mParser->getFormat(false /* audio */, true /* synchronous */) != NULL; - bool hasAudio = mParser->getFormat(true /* audio */, true /* synchronous */) != NULL; - - ALOGV("number of tracks: %d", countTracks()); - - if (hasVideo) { - mFileMetaData->setCString( - kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_MPEG4); - } else if (hasAudio) { - mFileMetaData->setCString(kKeyMIMEType, "audio/mp4"); - } else { - ALOGE("no audio and no video, no idea what file type this is"); - } - // tracks are numbered such that video track is first, audio track is second - if (hasAudio && hasVideo) { - mTrackCount = 2; - mAudioTrackIndex = 1; - } else if (hasAudio) { - mTrackCount = 1; - mAudioTrackIndex = 0; - } else if (hasVideo) { - mTrackCount = 1; - mAudioTrackIndex = -1; - } else { - mTrackCount = 0; - mAudioTrackIndex = -1; - } -} - -FragmentedMP4Extractor::~FragmentedMP4Extractor() { - ALOGV("~FragmentedMP4Extractor"); - mLooper->stop(); -} - -uint32_t FragmentedMP4Extractor::flags() const { - return CAN_PAUSE | - (mParser->isSeekable() ? (CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK) : 0); -} - -sp<MetaData> FragmentedMP4Extractor::getMetaData() { - return mFileMetaData; -} - -size_t FragmentedMP4Extractor::countTracks() { - return mTrackCount; -} - - -sp<MetaData> FragmentedMP4Extractor::getTrackMetaData( - size_t index, uint32_t flags) { - if (index >= countTracks()) { - return NULL; - } - - sp<AMessage> msg = mParser->getFormat(index == mAudioTrackIndex, true /* synchronous */); - - if (msg == NULL) { - ALOGV("got null format for track %d", index); - return NULL; - } - - sp<MetaData> meta = new MetaData(); - convertMessageToMetaData(msg, meta); - return meta; -} - -static void MakeFourCCString(uint32_t x, char *s) { - s[0] = x >> 24; - s[1] = (x >> 16) & 0xff; - s[2] = (x >> 8) & 0xff; - s[3] = x & 0xff; - s[4] = '\0'; -} - -sp<MediaSource> FragmentedMP4Extractor::getTrack(size_t index) { - if (index >= countTracks()) { - return NULL; - } - return new FragmentedMPEG4Source(index == mAudioTrackIndex, getTrackMetaData(index, 0), mParser, this); -} - - -//////////////////////////////////////////////////////////////////////////////// - -FragmentedMPEG4Source::FragmentedMPEG4Source( - bool audio, - const sp<MetaData> &format, - const sp<FragmentedMP4Parser> &parser, - const sp<FragmentedMP4Extractor> &extractor) - : mFormat(format), - mParser(parser), - mExtractor(extractor), - mIsAudioTrack(audio), - mStarted(false), - mGroup(NULL), - mWantsNALFragments(false), - mSrcBuffer(NULL) { -} - -FragmentedMPEG4Source::~FragmentedMPEG4Source() { - if (mStarted) { - stop(); - } -} - -status_t FragmentedMPEG4Source::start(MetaData *params) { - Mutex::Autolock autoLock(mLock); - - CHECK(!mStarted); - - int32_t val; - if (params && params->findInt32(kKeyWantsNALFragments, &val) - && val != 0) { - mWantsNALFragments = true; - } else { - mWantsNALFragments = false; - } - ALOGV("caller wants NAL fragments: %s", mWantsNALFragments ? "yes" : "no"); - - mGroup = new MediaBufferGroup; - - int32_t max_size = 65536; - // XXX CHECK(mFormat->findInt32(kKeyMaxInputSize, &max_size)); - - mGroup->add_buffer(new MediaBuffer(max_size)); - - mSrcBuffer = new uint8_t[max_size]; - - mStarted = true; - - return OK; -} - -status_t FragmentedMPEG4Source::stop() { - Mutex::Autolock autoLock(mLock); - - CHECK(mStarted); - - delete[] mSrcBuffer; - mSrcBuffer = NULL; - - delete mGroup; - mGroup = NULL; - - mStarted = false; - mCurrentSampleIndex = 0; - - return OK; -} - -sp<MetaData> FragmentedMPEG4Source::getFormat() { - Mutex::Autolock autoLock(mLock); - - return mFormat; -} - - -status_t FragmentedMPEG4Source::read( - MediaBuffer **out, const ReadOptions *options) { - int64_t seekTimeUs; - ReadOptions::SeekMode mode; - if (options && options->getSeekTo(&seekTimeUs, &mode)) { - mParser->seekTo(mIsAudioTrack, seekTimeUs); - } - MediaBuffer *buffer = NULL; - mGroup->acquire_buffer(&buffer); - sp<ABuffer> parseBuffer; - - status_t ret = mParser->dequeueAccessUnit(mIsAudioTrack, &parseBuffer, true /* synchronous */); - if (ret != OK) { - buffer->release(); - ALOGV("returning %d", ret); - return ret; - } - sp<AMessage> meta = parseBuffer->meta(); - int64_t timeUs; - CHECK(meta->findInt64("timeUs", &timeUs)); - buffer->meta_data()->setInt64(kKeyTime, timeUs); - buffer->set_range(0, parseBuffer->size()); - memcpy(buffer->data(), parseBuffer->data(), parseBuffer->size()); - *out = buffer; - return OK; -} - - -static bool isCompatibleBrand(uint32_t fourcc) { - static const uint32_t kCompatibleBrands[] = { - FOURCC('i', 's', 'o', 'm'), - FOURCC('i', 's', 'o', '2'), - FOURCC('a', 'v', 'c', '1'), - FOURCC('3', 'g', 'p', '4'), - FOURCC('m', 'p', '4', '1'), - FOURCC('m', 'p', '4', '2'), - - // Won't promise that the following file types can be played. - // Just give these file types a chance. - FOURCC('q', 't', ' ', ' '), // Apple's QuickTime - FOURCC('M', 'S', 'N', 'V'), // Sony's PSP - - FOURCC('3', 'g', '2', 'a'), // 3GPP2 - FOURCC('3', 'g', '2', 'b'), - }; - - for (size_t i = 0; - i < sizeof(kCompatibleBrands) / sizeof(kCompatibleBrands[0]); - ++i) { - if (kCompatibleBrands[i] == fourcc) { - return true; - } - } - - return false; -} - -// Attempt to actually parse the 'ftyp' atom and determine if a suitable -// compatible brand is present. -// Also try to identify where this file's metadata ends -// (end of the 'moov' atom) and report it to the caller as part of -// the metadata. -static bool Sniff( - const sp<DataSource> &source, String8 *mimeType, float *confidence, - sp<AMessage> *meta) { - // We scan up to 128k bytes to identify this file as an MP4. - static const off64_t kMaxScanOffset = 128ll * 1024ll; - - off64_t offset = 0ll; - bool foundGoodFileType = false; - bool isFragmented = false; - off64_t moovAtomEndOffset = -1ll; - bool done = false; - - while (!done && offset < kMaxScanOffset) { - uint32_t hdr[2]; - if (source->readAt(offset, hdr, 8) < 8) { - return false; - } - - uint64_t chunkSize = ntohl(hdr[0]); - uint32_t chunkType = ntohl(hdr[1]); - off64_t chunkDataOffset = offset + 8; - - if (chunkSize == 1) { - if (source->readAt(offset + 8, &chunkSize, 8) < 8) { - return false; - } - - chunkSize = ntoh64(chunkSize); - chunkDataOffset += 8; - - if (chunkSize < 16) { - // The smallest valid chunk is 16 bytes long in this case. - return false; - } - } else if (chunkSize < 8) { - // The smallest valid chunk is 8 bytes long. - return false; - } - - off64_t chunkDataSize = offset + chunkSize - chunkDataOffset; - - char chunkstring[5]; - MakeFourCCString(chunkType, chunkstring); - ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset); - switch (chunkType) { - case FOURCC('f', 't', 'y', 'p'): - { - if (chunkDataSize < 8) { - return false; - } - - uint32_t numCompatibleBrands = (chunkDataSize - 8) / 4; - for (size_t i = 0; i < numCompatibleBrands + 2; ++i) { - if (i == 1) { - // Skip this index, it refers to the minorVersion, - // not a brand. - continue; - } - - uint32_t brand; - if (source->readAt( - chunkDataOffset + 4 * i, &brand, 4) < 4) { - return false; - } - - brand = ntohl(brand); - char brandstring[5]; - MakeFourCCString(brand, brandstring); - ALOGV("Brand: %s", brandstring); - - if (isCompatibleBrand(brand)) { - foundGoodFileType = true; - break; - } - } - - if (!foundGoodFileType) { - return false; - } - - break; - } - - case FOURCC('m', 'o', 'o', 'v'): - { - moovAtomEndOffset = offset + chunkSize; - break; - } - - case FOURCC('m', 'o', 'o', 'f'): - { - // this is kind of broken, since we might not actually find a - // moof box in the first 128k. - isFragmented = true; - done = true; - break; - } - - default: - break; - } - - offset += chunkSize; - } - - if (!foundGoodFileType || !isFragmented) { - return false; - } - - *mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4; - *confidence = 0.5f; // slightly more than MPEG4Extractor - - if (moovAtomEndOffset >= 0) { - *meta = new AMessage; - (*meta)->setInt64("meta-data-size", moovAtomEndOffset); - (*meta)->setInt32("fragmented", 1); // tell MediaExtractor what to instantiate - - ALOGV("found metadata size: %lld", moovAtomEndOffset); - } - - return true; -} - -// used by DataSource::RegisterDefaultSniffers -bool SniffFragmentedMP4( - const sp<DataSource> &source, String8 *mimeType, float *confidence, - sp<AMessage> *meta) { - ALOGV("SniffFragmentedMP4"); - char prop[PROPERTY_VALUE_MAX]; - if (property_get("media.stagefright.use-fragmp4", prop, NULL) - && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) { - return Sniff(source, mimeType, confidence, meta); - } - - return false; -} - -} // namespace android diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp index 40bfc55..d2cc6c2 100644 --- a/media/libstagefright/HTTPBase.cpp +++ b/media/libstagefright/HTTPBase.cpp @@ -58,6 +58,16 @@ sp<HTTPBase> HTTPBase::Create(uint32_t flags) { } } +// static +status_t HTTPBase::UpdateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { +#if CHROMIUM_AVAILABLE + return UpdateChromiumHTTPDataSourceProxyConfig(host, port, exclusionList); +#else + return INVALID_OPERATION; +#endif +} + void HTTPBase::addBandwidthMeasurement( size_t numBytes, int64_t delayUs) { Mutex::Autolock autoLock(mLock); diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 1a62f9d..145869e 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -22,8 +22,6 @@ #include "include/SampleTable.h" #include "include/ESDS.h" -#include <arpa/inet.h> - #include <ctype.h> #include <stdint.h> #include <stdlib.h> @@ -33,13 +31,11 @@ #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/DataSource.h> #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaBufferGroup.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaSource.h> #include <media/stagefright/MetaData.h> -#include <media/stagefright/Utils.h> #include <utils/String8.h> namespace android { @@ -50,15 +46,17 @@ public: MPEG4Source(const sp<MetaData> &format, const sp<DataSource> &dataSource, int32_t timeScale, - const sp<SampleTable> &sampleTable); + const sp<SampleTable> &sampleTable, + Vector<SidxEntry> &sidx, + off64_t firstMoofOffset); virtual status_t start(MetaData *params = NULL); virtual status_t stop(); virtual sp<MetaData> getFormat(); - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL); + virtual status_t fragmentedRead(MediaBuffer **buffer, const ReadOptions *options = NULL); protected: virtual ~MPEG4Source(); @@ -71,6 +69,27 @@ private: int32_t mTimescale; sp<SampleTable> mSampleTable; uint32_t mCurrentSampleIndex; + uint32_t mCurrentFragmentIndex; + Vector<SidxEntry> &mSegments; + off64_t mFirstMoofOffset; + off64_t mCurrentMoofOffset; + off64_t mNextMoofOffset; + uint32_t mCurrentTime; + int32_t mLastParsedTrackId; + int32_t mTrackId; + + int32_t mCryptoMode; // passed in from extractor + int32_t mDefaultIVSize; // passed in from extractor + uint8_t mCryptoKey[16]; // passed in from extractor + uint32_t mCurrentAuxInfoType; + uint32_t mCurrentAuxInfoTypeParameter; + int32_t mCurrentDefaultSampleInfoSize; + uint32_t mCurrentSampleInfoCount; + uint32_t mCurrentSampleInfoAllocSize; + uint8_t* mCurrentSampleInfoSizes; + uint32_t mCurrentSampleInfoOffsetCount; + uint32_t mCurrentSampleInfoOffsetsAllocSize; + uint64_t* mCurrentSampleInfoOffsets; bool mIsAVC; size_t mNALLengthSize; @@ -86,6 +105,43 @@ private: uint8_t *mSrcBuffer; size_t parseNALSize(const uint8_t *data) const; + status_t parseChunk(off64_t *offset); + status_t parseTrackFragmentHeader(off64_t offset, off64_t size); + status_t parseTrackFragmentRun(off64_t offset, off64_t size); + status_t parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size); + status_t parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size); + + struct TrackFragmentHeaderInfo { + enum Flags { + kBaseDataOffsetPresent = 0x01, + kSampleDescriptionIndexPresent = 0x02, + kDefaultSampleDurationPresent = 0x08, + kDefaultSampleSizePresent = 0x10, + kDefaultSampleFlagsPresent = 0x20, + kDurationIsEmpty = 0x10000, + }; + + uint32_t mTrackID; + uint32_t mFlags; + uint64_t mBaseDataOffset; + uint32_t mSampleDescriptionIndex; + uint32_t mDefaultSampleDuration; + uint32_t mDefaultSampleSize; + uint32_t mDefaultSampleFlags; + + uint64_t mDataOffset; + }; + TrackFragmentHeaderInfo mTrackFragmentHeaderInfo; + + struct Sample { + off64_t offset; + size_t size; + uint32_t duration; + uint8_t iv[16]; + Vector<size_t> clearsizes; + Vector<size_t> encryptedsizes; + }; + Vector<Sample> mCurrentSamples; MPEG4Source(const MPEG4Source &); MPEG4Source &operator=(const MPEG4Source &); @@ -264,8 +320,25 @@ static const char *FourCC2MIME(uint32_t fourcc) { } } +static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t *rate) { + if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, FourCC2MIME(fourcc))) { + // AMR NB audio is always mono, 8kHz + *channels = 1; + *rate = 8000; + return true; + } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, FourCC2MIME(fourcc))) { + // AMR WB audio is always mono, 16kHz + *channels = 1; + *rate = 16000; + return true; + } + return false; +} + MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source) - : mDataSource(source), + : mSidxDuration(0), + mMoofOffset(0), + mDataSource(source), mInitCheck(NO_INIT), mHasVideo(false), mFirstTrack(NULL), @@ -293,6 +366,16 @@ MPEG4Extractor::~MPEG4Extractor() { sinf = next; } mFirstSINF = NULL; + + for (size_t i = 0; i < mPssh.size(); i++) { + delete [] mPssh[i].data; + } +} + +uint32_t MPEG4Extractor::flags() const { + return CAN_PAUSE | + ((mMoofOffset == 0 || mSidxEntries.size() != 0) ? + (CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK) : 0); } sp<MetaData> MPEG4Extractor::getMetaData() { @@ -307,6 +390,7 @@ sp<MetaData> MPEG4Extractor::getMetaData() { size_t MPEG4Extractor::countTracks() { status_t err; if ((err = readMetaData()) != OK) { + ALOGV("MPEG4Extractor::countTracks: no tracks"); return 0; } @@ -317,6 +401,7 @@ size_t MPEG4Extractor::countTracks() { track = track->next; } + ALOGV("MPEG4Extractor::countTracks: %d tracks", n); return n; } @@ -348,15 +433,24 @@ sp<MetaData> MPEG4Extractor::getTrackMetaData( const char *mime; CHECK(track->meta->findCString(kKeyMIMEType, &mime)); if (!strncasecmp("video/", mime, 6)) { - uint32_t sampleIndex; - uint32_t sampleTime; - if (track->sampleTable->findThumbnailSample(&sampleIndex) == OK - && track->sampleTable->getMetaDataForSample( - sampleIndex, NULL /* offset */, NULL /* size */, - &sampleTime) == OK) { - track->meta->setInt64( - kKeyThumbnailTime, - ((int64_t)sampleTime * 1000000) / track->timescale); + if (mMoofOffset > 0) { + int64_t duration; + if (track->meta->findInt64(kKeyDuration, &duration)) { + // nothing fancy, just pick a frame near 1/4th of the duration + track->meta->setInt64( + kKeyThumbnailTime, duration / 4); + } + } else { + uint32_t sampleIndex; + uint32_t sampleTime; + if (track->sampleTable->findThumbnailSample(&sampleIndex) == OK + && track->sampleTable->getMetaDataForSample( + sampleIndex, NULL /* offset */, NULL /* size */, + &sampleTime) == OK) { + track->meta->setInt64( + kKeyThumbnailTime, + ((int64_t)sampleTime * 1000000) / track->timescale); + } } } } @@ -364,6 +458,14 @@ sp<MetaData> MPEG4Extractor::getTrackMetaData( return track->meta; } +static void MakeFourCCString(uint32_t x, char *s) { + s[0] = x >> 24; + s[1] = (x >> 16) & 0xff; + s[2] = (x >> 8) & 0xff; + s[3] = x & 0xff; + s[4] = '\0'; +} + status_t MPEG4Extractor::readMetaData() { if (mInitCheck != NO_INIT) { return mInitCheck; @@ -371,7 +473,25 @@ status_t MPEG4Extractor::readMetaData() { off64_t offset = 0; status_t err; - while ((err = parseChunk(&offset, 0)) == OK) { + while (true) { + err = parseChunk(&offset, 0); + if (err == OK) { + continue; + } + + uint32_t hdr[2]; + if (mDataSource->readAt(offset, hdr, 8) < 8) { + break; + } + uint32_t chunk_type = ntohl(hdr[1]); + if (chunk_type == FOURCC('s', 'i', 'd', 'x')) { + // parse the sidx box too + continue; + } else if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { + // store the offset of the first segment + mMoofOffset = offset; + } + break; } if (mInitCheck == OK) { @@ -388,6 +508,23 @@ status_t MPEG4Extractor::readMetaData() { } CHECK_NE(err, (status_t)NO_INIT); + + // copy pssh data into file metadata + int psshsize = 0; + for (size_t i = 0; i < mPssh.size(); i++) { + psshsize += 20 + mPssh[i].datalen; + } + if (psshsize) { + char *buf = (char*)malloc(psshsize); + char *ptr = buf; + for (size_t i = 0; i < mPssh.size(); i++) { + memcpy(ptr, mPssh[i].uuid, 20); // uuid + length + memcpy(ptr + 20, mPssh[i].data, mPssh[i].datalen); + ptr += (20 + mPssh[i].datalen); + } + mFileMetaData->setData(kKeyPssh, 'pssh', buf, psshsize); + free(buf); + } return mInitCheck; } @@ -559,14 +696,6 @@ status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) { return UNKNOWN_ERROR; // Return a dummy error. } -static void MakeFourCCString(uint32_t x, char *s) { - s[0] = x >> 24; - s[1] = (x >> 16) & 0xff; - s[2] = (x >> 8) & 0xff; - s[3] = x & 0xff; - s[4] = '\0'; -} - struct PathAdder { PathAdder(Vector<uint32_t> *path, uint32_t chunkType) : mPath(path) { @@ -630,7 +759,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { char chunk[5]; MakeFourCCString(chunk_type, chunk); - ALOGV("chunk: %s @ %lld", chunk, *offset); + ALOGV("chunk: %s @ %lld, %d", chunk, *offset, depth); #if 0 static const char kWhitespace[] = " "; @@ -686,6 +815,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('m', 'f', 'r', 'a'): case FOURCC('u', 'd', 't', 'a'): case FOURCC('i', 'l', 's', 't'): + case FOURCC('s', 'i', 'n', 'f'): + case FOURCC('s', 'c', 'h', 'i'): { if (chunk_type == FOURCC('s', 't', 'b', 'l')) { ALOGV("sampleTable chunk is %d bytes long.", (size_t)chunk_size); @@ -773,6 +904,75 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('f', 'r', 'm', 'a'): + { + uint32_t original_fourcc; + if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) { + return ERROR_IO; + } + original_fourcc = ntohl(original_fourcc); + ALOGV("read original format: %d", original_fourcc); + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc)); + uint32_t num_channels = 0; + uint32_t sample_rate = 0; + if (AdjustChannelsAndRate(original_fourcc, &num_channels, &sample_rate)) { + mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); + mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); + } + *offset += chunk_size; + break; + } + + case FOURCC('t', 'e', 'n', 'c'): + { + if (chunk_size < 32) { + return ERROR_MALFORMED; + } + + // tenc box contains 1 byte version, 3 byte flags, 3 byte default algorithm id, one byte + // default IV size, 16 bytes default KeyID + // (ISO 23001-7) + char buf[4]; + memset(buf, 0, 4); + if (mDataSource->readAt(data_offset + 4, buf + 1, 3) < 3) { + return ERROR_IO; + } + uint32_t defaultAlgorithmId = ntohl(*((int32_t*)buf)); + if (defaultAlgorithmId > 1) { + // only 0 (clear) and 1 (AES-128) are valid + return ERROR_MALFORMED; + } + + memset(buf, 0, 4); + if (mDataSource->readAt(data_offset + 7, buf + 3, 1) < 1) { + return ERROR_IO; + } + uint32_t defaultIVSize = ntohl(*((int32_t*)buf)); + + if ((defaultAlgorithmId == 0 && defaultIVSize != 0) || + (defaultAlgorithmId != 0 && defaultIVSize == 0)) { + // only unencrypted data must have 0 IV size + return ERROR_MALFORMED; + } else if (defaultIVSize != 0 && + defaultIVSize != 8 && + defaultIVSize != 16) { + // only supported sizes are 0, 8 and 16 + return ERROR_MALFORMED; + } + + uint8_t defaultKeyId[16]; + + if (mDataSource->readAt(data_offset + 8, &defaultKeyId, 16) < 16) { + return ERROR_IO; + } + + mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId); + mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize); + mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16); + *offset += chunk_size; + break; + } + case FOURCC('t', 'k', 'h', 'd'): { status_t err; @@ -784,6 +984,37 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('p', 's', 's', 'h'): + { + PsshInfo pssh; + + if (mDataSource->readAt(data_offset + 4, &pssh.uuid, 16) < 16) { + return ERROR_IO; + } + + uint32_t psshdatalen = 0; + if (mDataSource->readAt(data_offset + 20, &psshdatalen, 4) < 4) { + return ERROR_IO; + } + pssh.datalen = ntohl(psshdatalen); + ALOGV("pssh data size: %d", pssh.datalen); + if (pssh.datalen + 20 > chunk_size) { + // pssh data length exceeds size of containing box + return ERROR_MALFORMED; + } + + pssh.data = new uint8_t[pssh.datalen]; + ALOGV("allocated pssh @ %p", pssh.data); + ssize_t requested = (ssize_t) pssh.datalen; + if (mDataSource->readAt(data_offset + 24, pssh.data, requested) < requested) { + return ERROR_IO; + } + mPssh.push_back(pssh); + + *offset += chunk_size; + break; + } + case FOURCC('m', 'd', 'h', 'd'): { if (chunk_data_size < 4) { @@ -816,7 +1047,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { mLastTrack->timescale = ntohl(timescale); - int64_t duration; + int64_t duration = 0; if (version == 1) { if (mDataSource->readAt( timescale_offset + 4, &duration, sizeof(duration)) @@ -825,13 +1056,16 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } duration = ntoh64(duration); } else { - int32_t duration32; + uint32_t duration32; if (mDataSource->readAt( timescale_offset + 4, &duration32, sizeof(duration32)) < (ssize_t)sizeof(duration32)) { return ERROR_IO; } - duration = ntohl(duration32); + // ffmpeg sets duration to -1, which is incorrect. + if (duration32 != 0xffffffff) { + duration = ntohl(duration32); + } } mLastTrack->meta->setInt64( kKeyDuration, (duration * 1000000) / mLastTrack->timescale); @@ -894,16 +1128,17 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { // For 3GPP timed text, there could be multiple tx3g boxes contain // multiple text display formats. These formats will be used to // display the timed text. + // For encrypted files, there may also be more than one entry. const char *mime; CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime)); - if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) { + if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) && + strcasecmp(mime, "application/octet-stream")) { // For now we only support a single type of media per track. mLastTrack->skipTrack = true; *offset += chunk_size; break; } } - off64_t stop_offset = *offset + chunk_size; *offset = data_offset + 8; for (uint32_t i = 0; i < entry_count; ++i) { @@ -920,6 +1155,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } case FOURCC('m', 'p', '4', 'a'): + case FOURCC('e', 'n', 'c', 'a'): case FOURCC('s', 'a', 'm', 'r'): case FOURCC('s', 'a', 'w', 'b'): { @@ -935,29 +1171,18 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } uint16_t data_ref_index = U16_AT(&buffer[6]); - uint16_t num_channels = U16_AT(&buffer[16]); + uint32_t num_channels = U16_AT(&buffer[16]); uint16_t sample_size = U16_AT(&buffer[18]); uint32_t sample_rate = U32_AT(&buffer[24]) >> 16; - if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, - FourCC2MIME(chunk_type))) { - // AMR NB audio is always mono, 8kHz - num_channels = 1; - sample_rate = 8000; - } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, - FourCC2MIME(chunk_type))) { - // AMR WB audio is always mono, 16kHz - num_channels = 1; - sample_rate = 16000; + if (chunk_type != FOURCC('e', 'n', 'c', 'a')) { + // if the chunk type is enca, we'll get the type from the sinf/frma box later + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + AdjustChannelsAndRate(chunk_type, &num_channels, &sample_rate); } - -#if 0 - printf("*** coding='%s' %d channels, size %d, rate %d\n", + ALOGV("*** coding='%s' %d channels, size %d, rate %d\n", chunk, num_channels, sample_size, sample_rate); -#endif - - mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); @@ -977,6 +1202,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { } case FOURCC('m', 'p', '4', 'v'): + case FOURCC('e', 'n', 'c', 'v'): case FOURCC('s', '2', '6', '3'): case FOURCC('H', '2', '6', '3'): case FOURCC('h', '2', '6', '3'): @@ -999,7 +1225,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { uint16_t width = U16_AT(&buffer[6 + 18]); uint16_t height = U16_AT(&buffer[6 + 20]); - // The video sample is not stand-compliant if it has invalid dimension. + // The video sample is not standard-compliant if it has invalid dimension. // Use some default width and height value, and // let the decoder figure out the actual width and height (and thus // be prepared for INFO_FOMRAT_CHANGED event). @@ -1009,7 +1235,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { // printf("*** coding='%s' width=%d height=%d\n", // chunk, width, height); - mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + if (chunk_type != FOURCC('e', 'n', 'c', 'v')) { + // if the chunk type is encv, we'll get the type from the sinf/frma box later + mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); + } mLastTrack->meta->setInt32(kKeyWidth, width); mLastTrack->meta->setInt32(kKeyHeight, height); @@ -1075,11 +1304,23 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return err; } - // Assume that a given buffer only contains at most 10 fragments, - // each fragment originally prefixed with a 2 byte length will - // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion, - // and thus will grow by 2 bytes per fragment. - mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2); + if (max_size != 0) { + // Assume that a given buffer only contains at most 10 chunks, + // each chunk originally prefixed with a 2 byte length will + // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion, + // and thus will grow by 2 bytes per chunk. + mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2); + } else { + // No size was specified. Pick a conservatively large size. + int32_t width, height; + if (mLastTrack->meta->findInt32(kKeyWidth, &width) && + mLastTrack->meta->findInt32(kKeyHeight, &height)) { + mLastTrack->meta->setInt32(kKeyMaxInputSize, width * height * 3 / 2); + } else { + ALOGE("No width or height, assuming worst case 1080p"); + mLastTrack->meta->setInt32(kKeyMaxInputSize, 3110400); + } + } *offset += chunk_size; // Calculate average frame rate. @@ -1354,6 +1595,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('m', 'd', 'a', 't'): { + ALOGV("mdat chunk, drm: %d", mIsDrm); if (!mIsDrm) { *offset += chunk_size; break; @@ -1448,6 +1690,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { break; } + case FOURCC('s', 'i', 'd', 'x'): + { + parseSegmentIndex(data_offset, chunk_data_size); + *offset += chunk_size; + return UNKNOWN_ERROR; // stop parsing after sidx + } + default: { *offset += chunk_size; @@ -1458,6 +1707,125 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { return OK; } +status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) { + ALOGV("MPEG4Extractor::parseSegmentIndex"); + + if (size < 12) { + return -EINVAL; + } + + uint32_t flags; + if (!mDataSource->getUInt32(offset, &flags)) { + return ERROR_MALFORMED; + } + + uint32_t version = flags >> 24; + flags &= 0xffffff; + + ALOGV("sidx version %d", version); + + uint32_t referenceId; + if (!mDataSource->getUInt32(offset + 4, &referenceId)) { + return ERROR_MALFORMED; + } + + uint32_t timeScale; + if (!mDataSource->getUInt32(offset + 8, &timeScale)) { + return ERROR_MALFORMED; + } + ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale); + + uint64_t earliestPresentationTime; + uint64_t firstOffset; + + offset += 12; + size -= 12; + + if (version == 0) { + if (size < 8) { + return -EINVAL; + } + uint32_t tmp; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_MALFORMED; + } + earliestPresentationTime = tmp; + if (!mDataSource->getUInt32(offset + 4, &tmp)) { + return ERROR_MALFORMED; + } + firstOffset = tmp; + offset += 8; + size -= 8; + } else { + if (size < 16) { + return -EINVAL; + } + if (!mDataSource->getUInt64(offset, &earliestPresentationTime)) { + return ERROR_MALFORMED; + } + if (!mDataSource->getUInt64(offset + 8, &firstOffset)) { + return ERROR_MALFORMED; + } + offset += 16; + size -= 16; + } + ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset); + + if (size < 4) { + return -EINVAL; + } + + uint16_t referenceCount; + if (!mDataSource->getUInt16(offset + 2, &referenceCount)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + ALOGV("refcount: %d", referenceCount); + + if (size < referenceCount * 12) { + return -EINVAL; + } + + uint64_t total_duration = 0; + for (unsigned int i = 0; i < referenceCount; i++) { + uint32_t d1, d2, d3; + + if (!mDataSource->getUInt32(offset, &d1) || // size + !mDataSource->getUInt32(offset + 4, &d2) || // duration + !mDataSource->getUInt32(offset + 8, &d3)) { // flags + return ERROR_MALFORMED; + } + + if (d1 & 0x80000000) { + ALOGW("sub-sidx boxes not supported yet"); + } + bool sap = d3 & 0x80000000; + bool saptype = d3 >> 28; + if (!sap || saptype > 2) { + ALOGW("not a stream access point, or unsupported type"); + } + total_duration += d2; + offset += 12; + ALOGV(" item %d, %08x %08x %08x", i, d1, d2, d3); + SidxEntry se; + se.mSize = d1 & 0x7fffffff; + se.mDurationUs = 1000000LL * d2 / timeScale; + mSidxEntries.add(se); + } + + mSidxDuration = total_duration * 1000000 / timeScale; + ALOGV("duration: %lld", mSidxDuration); + + int64_t metaDuration; + if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) { + mLastTrack->meta->setInt64(kKeyDuration, mSidxDuration); + } + return OK; +} + + + status_t MPEG4Extractor::parseTrackHeader( off64_t data_offset, off64_t data_size) { if (data_size < 4) { @@ -1754,8 +2122,11 @@ sp<MediaSource> MPEG4Extractor::getTrack(size_t index) { return NULL; } + ALOGV("getTrack called, pssh: %d", mPssh.size()); + return new MPEG4Source( - track->meta, mDataSource, track->timescale, track->sampleTable); + track->meta, mDataSource, track->timescale, track->sampleTable, + mSidxEntries, mMoofOffset); } // static @@ -1852,17 +2223,30 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio( sampleRate = br.getBits(24); numChannels = br.getBits(4); } else { - static uint32_t kSamplingRate[] = { - 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, - 16000, 12000, 11025, 8000, 7350 - }; - - if (freqIndex == 13 || freqIndex == 14) { - return ERROR_MALFORMED; + numChannels = br.getBits(4); + if (objectType == 5) { + // SBR specific config per 14496-3 table 1.13 + freqIndex = br.getBits(4); + if (freqIndex == 15) { + if (csd_size < 8) { + return ERROR_MALFORMED; + } + sampleRate = br.getBits(24); + } } - sampleRate = kSamplingRate[freqIndex]; - numChannels = br.getBits(4); + if (sampleRate == 0) { + static uint32_t kSamplingRate[] = { + 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, + 16000, 12000, 11025, 8000, 7350 + }; + + if (freqIndex == 13 || freqIndex == 14) { + return ERROR_MALFORMED; + } + + sampleRate = kSamplingRate[freqIndex]; + } } if (numChannels == 0) { @@ -1898,12 +2282,23 @@ MPEG4Source::MPEG4Source( const sp<MetaData> &format, const sp<DataSource> &dataSource, int32_t timeScale, - const sp<SampleTable> &sampleTable) + const sp<SampleTable> &sampleTable, + Vector<SidxEntry> &sidx, + off64_t firstMoofOffset) : mFormat(format), mDataSource(dataSource), mTimescale(timeScale), mSampleTable(sampleTable), mCurrentSampleIndex(0), + mCurrentFragmentIndex(0), + mSegments(sidx), + mFirstMoofOffset(firstMoofOffset), + mCurrentMoofOffset(firstMoofOffset), + mCurrentTime(0), + mCurrentSampleInfoAllocSize(0), + mCurrentSampleInfoSizes(NULL), + mCurrentSampleInfoOffsetsAllocSize(0), + mCurrentSampleInfoOffsets(NULL), mIsAVC(false), mNALLengthSize(0), mStarted(false), @@ -1911,6 +2306,19 @@ MPEG4Source::MPEG4Source( mBuffer(NULL), mWantsNALFragments(false), mSrcBuffer(NULL) { + + mFormat->findInt32(kKeyCryptoMode, &mCryptoMode); + mDefaultIVSize = 0; + mFormat->findInt32(kKeyCryptoDefaultIVSize, &mDefaultIVSize); + uint32_t keytype; + const void *key; + size_t keysize; + if (mFormat->findData(kKeyCryptoKey, &keytype, &key, &keysize)) { + CHECK(keysize <= 16); + memset(mCryptoKey, 0, 16); + memcpy(mCryptoKey, key, keysize); + } + const char *mime; bool success = mFormat->findCString(kKeyMIMEType, &mime); CHECK(success); @@ -1931,12 +2339,21 @@ MPEG4Source::MPEG4Source( // The number of bytes used to encode the length of a NAL unit. mNALLengthSize = 1 + (ptr[4] & 3); } + + CHECK(format->findInt32(kKeyTrackID, &mTrackId)); + + if (mFirstMoofOffset != 0) { + off64_t offset = mFirstMoofOffset; + parseChunk(&offset); + } } MPEG4Source::~MPEG4Source() { if (mStarted) { stop(); } + free(mCurrentSampleInfoSizes); + free(mCurrentSampleInfoOffsets); } status_t MPEG4Source::start(MetaData *params) { @@ -1988,6 +2405,529 @@ status_t MPEG4Source::stop() { return OK; } +status_t MPEG4Source::parseChunk(off64_t *offset) { + uint32_t hdr[2]; + if (mDataSource->readAt(*offset, hdr, 8) < 8) { + return ERROR_IO; + } + uint64_t chunk_size = ntohl(hdr[0]); + uint32_t chunk_type = ntohl(hdr[1]); + off64_t data_offset = *offset + 8; + + if (chunk_size == 1) { + if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) { + return ERROR_IO; + } + chunk_size = ntoh64(chunk_size); + data_offset += 8; + + if (chunk_size < 16) { + // The smallest valid chunk is 16 bytes long in this case. + return ERROR_MALFORMED; + } + } else if (chunk_size < 8) { + // The smallest valid chunk is 8 bytes long. + return ERROR_MALFORMED; + } + + char chunk[5]; + MakeFourCCString(chunk_type, chunk); + ALOGV("MPEG4Source chunk %s @ %llx", chunk, *offset); + + off64_t chunk_data_size = *offset + chunk_size - data_offset; + + switch(chunk_type) { + + case FOURCC('t', 'r', 'a', 'f'): + case FOURCC('m', 'o', 'o', 'f'): { + off64_t stop_offset = *offset + chunk_size; + *offset = data_offset; + while (*offset < stop_offset) { + status_t err = parseChunk(offset); + if (err != OK) { + return err; + } + } + if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { + // *offset points to the mdat box following this moof + parseChunk(offset); // doesn't actually parse it, just updates offset + mNextMoofOffset = *offset; + } + break; + } + + case FOURCC('t', 'f', 'h', 'd'): { + status_t err; + if ((err = parseTrackFragmentHeader(data_offset, chunk_data_size)) != OK) { + return err; + } + *offset += chunk_size; + break; + } + + case FOURCC('t', 'r', 'u', 'n'): { + status_t err; + if (mLastParsedTrackId == mTrackId) { + if ((err = parseTrackFragmentRun(data_offset, chunk_data_size)) != OK) { + return err; + } + } + + *offset += chunk_size; + break; + } + + case FOURCC('s', 'a', 'i', 'z'): { + status_t err; + if ((err = parseSampleAuxiliaryInformationSizes(data_offset, chunk_data_size)) != OK) { + return err; + } + *offset += chunk_size; + break; + } + case FOURCC('s', 'a', 'i', 'o'): { + status_t err; + if ((err = parseSampleAuxiliaryInformationOffsets(data_offset, chunk_data_size)) != OK) { + return err; + } + *offset += chunk_size; + break; + } + + case FOURCC('m', 'd', 'a', 't'): { + // parse DRM info if present + ALOGV("MPEG4Source::parseChunk mdat"); + // if saiz/saoi was previously observed, do something with the sampleinfos + *offset += chunk_size; + break; + } + + default: { + *offset += chunk_size; + break; + } + } + return OK; +} + +status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size) { + ALOGV("parseSampleAuxiliaryInformationSizes"); + // 14496-12 8.7.12 + uint8_t version; + if (mDataSource->readAt( + offset, &version, sizeof(version)) + < (ssize_t)sizeof(version)) { + return ERROR_IO; + } + + if (version != 0) { + return ERROR_UNSUPPORTED; + } + offset++; + + uint32_t flags; + if (!mDataSource->getUInt24(offset, &flags)) { + return ERROR_IO; + } + offset += 3; + + if (flags & 1) { + uint32_t tmp; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_MALFORMED; + } + mCurrentAuxInfoType = tmp; + offset += 4; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_MALFORMED; + } + mCurrentAuxInfoTypeParameter = tmp; + offset += 4; + } + + uint8_t defsize; + if (mDataSource->readAt(offset, &defsize, 1) != 1) { + return ERROR_MALFORMED; + } + mCurrentDefaultSampleInfoSize = defsize; + offset++; + + uint32_t smplcnt; + if (!mDataSource->getUInt32(offset, &smplcnt)) { + return ERROR_MALFORMED; + } + mCurrentSampleInfoCount = smplcnt; + offset += 4; + + if (mCurrentDefaultSampleInfoSize != 0) { + ALOGV("@@@@ using default sample info size of %d", mCurrentDefaultSampleInfoSize); + return OK; + } + if (smplcnt > mCurrentSampleInfoAllocSize) { + mCurrentSampleInfoSizes = (uint8_t*) realloc(mCurrentSampleInfoSizes, smplcnt); + mCurrentSampleInfoAllocSize = smplcnt; + } + + mDataSource->readAt(offset, mCurrentSampleInfoSizes, smplcnt); + return OK; +} + +status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size) { + ALOGV("parseSampleAuxiliaryInformationOffsets"); + // 14496-12 8.7.13 + uint8_t version; + if (mDataSource->readAt(offset, &version, sizeof(version)) != 1) { + return ERROR_IO; + } + offset++; + + uint32_t flags; + if (!mDataSource->getUInt24(offset, &flags)) { + return ERROR_IO; + } + offset += 3; + + uint32_t entrycount; + if (!mDataSource->getUInt32(offset, &entrycount)) { + return ERROR_IO; + } + offset += 4; + + if (entrycount > mCurrentSampleInfoOffsetsAllocSize) { + mCurrentSampleInfoOffsets = (uint64_t*) realloc(mCurrentSampleInfoOffsets, entrycount * 8); + mCurrentSampleInfoOffsetsAllocSize = entrycount; + } + mCurrentSampleInfoOffsetCount = entrycount; + + for (size_t i = 0; i < entrycount; i++) { + if (version == 0) { + uint32_t tmp; + if (!mDataSource->getUInt32(offset, &tmp)) { + return ERROR_IO; + } + mCurrentSampleInfoOffsets[i] = tmp; + offset += 4; + } else { + uint64_t tmp; + if (!mDataSource->getUInt64(offset, &tmp)) { + return ERROR_IO; + } + mCurrentSampleInfoOffsets[i] = tmp; + offset += 8; + } + } + + // parse clear/encrypted data + + off64_t drmoffset = mCurrentSampleInfoOffsets[0]; // from moof + + drmoffset += mCurrentMoofOffset; + int ivlength; + CHECK(mFormat->findInt32(kKeyCryptoDefaultIVSize, &ivlength)); + + // read CencSampleAuxiliaryDataFormats + for (size_t i = 0; i < mCurrentSampleInfoCount; i++) { + Sample *smpl = &mCurrentSamples.editItemAt(i); + + memset(smpl->iv, 0, 16); + if (mDataSource->readAt(drmoffset, smpl->iv, ivlength) != ivlength) { + return ERROR_IO; + } + + drmoffset += ivlength; + + int32_t smplinfosize = mCurrentDefaultSampleInfoSize; + if (smplinfosize == 0) { + smplinfosize = mCurrentSampleInfoSizes[i]; + } + if (smplinfosize > ivlength) { + uint16_t numsubsamples; + if (!mDataSource->getUInt16(drmoffset, &numsubsamples)) { + return ERROR_IO; + } + drmoffset += 2; + for (size_t j = 0; j < numsubsamples; j++) { + uint16_t numclear; + uint32_t numencrypted; + if (!mDataSource->getUInt16(drmoffset, &numclear)) { + return ERROR_IO; + } + drmoffset += 2; + if (!mDataSource->getUInt32(drmoffset, &numencrypted)) { + return ERROR_IO; + } + drmoffset += 4; + smpl->clearsizes.add(numclear); + smpl->encryptedsizes.add(numencrypted); + } + } else { + smpl->clearsizes.add(0); + smpl->encryptedsizes.add(smpl->size); + } + } + + + return OK; +} + +status_t MPEG4Source::parseTrackFragmentHeader(off64_t offset, off64_t size) { + + if (size < 8) { + return -EINVAL; + } + + uint32_t flags; + if (!mDataSource->getUInt32(offset, &flags)) { // actually version + flags + return ERROR_MALFORMED; + } + + if (flags & 0xff000000) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset + 4, (uint32_t*)&mLastParsedTrackId)) { + return ERROR_MALFORMED; + } + + if (mLastParsedTrackId != mTrackId) { + // this is not the right track, skip it + return OK; + } + + mTrackFragmentHeaderInfo.mFlags = flags; + mTrackFragmentHeaderInfo.mTrackID = mLastParsedTrackId; + offset += 8; + size -= 8; + + ALOGV("fragment header: %08x %08x", flags, mTrackFragmentHeaderInfo.mTrackID); + + if (flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent) { + if (size < 8) { + return -EINVAL; + } + + if (!mDataSource->getUInt64(offset, &mTrackFragmentHeaderInfo.mBaseDataOffset)) { + return ERROR_MALFORMED; + } + offset += 8; + size -= 8; + } + + if (flags & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mSampleDescriptionIndex)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (flags & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleDuration)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (flags & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleSize)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (flags & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleFlags)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + if (!(flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent)) { + mTrackFragmentHeaderInfo.mBaseDataOffset = mCurrentMoofOffset; + } + + mTrackFragmentHeaderInfo.mDataOffset = 0; + return OK; +} + +status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) { + + ALOGV("MPEG4Extractor::parseTrackFragmentRun"); + if (size < 8) { + return -EINVAL; + } + + enum { + kDataOffsetPresent = 0x01, + kFirstSampleFlagsPresent = 0x04, + kSampleDurationPresent = 0x100, + kSampleSizePresent = 0x200, + kSampleFlagsPresent = 0x400, + kSampleCompositionTimeOffsetPresent = 0x800, + }; + + uint32_t flags; + if (!mDataSource->getUInt32(offset, &flags)) { + return ERROR_MALFORMED; + } + ALOGV("fragment run flags: %08x", flags); + + if (flags & 0xff000000) { + return -EINVAL; + } + + if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) { + // These two shall not be used together. + return -EINVAL; + } + + uint32_t sampleCount; + if (!mDataSource->getUInt32(offset + 4, &sampleCount)) { + return ERROR_MALFORMED; + } + offset += 8; + size -= 8; + + uint64_t dataOffset = mTrackFragmentHeaderInfo.mDataOffset; + + uint32_t firstSampleFlags = 0; + + if (flags & kDataOffsetPresent) { + if (size < 4) { + return -EINVAL; + } + + int32_t dataOffsetDelta; + if (!mDataSource->getUInt32(offset, (uint32_t*)&dataOffsetDelta)) { + return ERROR_MALFORMED; + } + + dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta; + + offset += 4; + size -= 4; + } + + if (flags & kFirstSampleFlagsPresent) { + if (size < 4) { + return -EINVAL; + } + + if (!mDataSource->getUInt32(offset, &firstSampleFlags)) { + return ERROR_MALFORMED; + } + offset += 4; + size -= 4; + } + + uint32_t sampleDuration = 0, sampleSize = 0, sampleFlags = 0, + sampleCtsOffset = 0; + + size_t bytesPerSample = 0; + if (flags & kSampleDurationPresent) { + bytesPerSample += 4; + } else if (mTrackFragmentHeaderInfo.mFlags + & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) { + sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration; + } else { + sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration; + } + + if (flags & kSampleSizePresent) { + bytesPerSample += 4; + } else if (mTrackFragmentHeaderInfo.mFlags + & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) { + sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize; + } else { + sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize; + } + + if (flags & kSampleFlagsPresent) { + bytesPerSample += 4; + } else if (mTrackFragmentHeaderInfo.mFlags + & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) { + sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags; + } else { + sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags; + } + + if (flags & kSampleCompositionTimeOffsetPresent) { + bytesPerSample += 4; + } else { + sampleCtsOffset = 0; + } + + if (size < sampleCount * bytesPerSample) { + return -EINVAL; + } + + Sample tmp; + for (uint32_t i = 0; i < sampleCount; ++i) { + if (flags & kSampleDurationPresent) { + if (!mDataSource->getUInt32(offset, &sampleDuration)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + if (flags & kSampleSizePresent) { + if (!mDataSource->getUInt32(offset, &sampleSize)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + if (flags & kSampleFlagsPresent) { + if (!mDataSource->getUInt32(offset, &sampleFlags)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + if (flags & kSampleCompositionTimeOffsetPresent) { + if (!mDataSource->getUInt32(offset, &sampleCtsOffset)) { + return ERROR_MALFORMED; + } + offset += 4; + } + + ALOGV("adding sample %d at offset 0x%08llx, size %u, duration %u, " + " flags 0x%08x", i + 1, + dataOffset, sampleSize, sampleDuration, + (flags & kFirstSampleFlagsPresent) && i == 0 + ? firstSampleFlags : sampleFlags); + tmp.offset = dataOffset; + tmp.size = sampleSize; + tmp.duration = sampleDuration; + mCurrentSamples.add(tmp); + + dataOffset += sampleSize; + } + + mTrackFragmentHeaderInfo.mDataOffset = dataOffset; + + return OK; +} + sp<MetaData> MPEG4Source::getFormat() { Mutex::Autolock autoLock(mLock); @@ -2019,6 +2959,10 @@ status_t MPEG4Source::read( CHECK(mStarted); + if (mFirstMoofOffset > 0) { + return fragmentedRead(out, options); + } + *out = NULL; int64_t targetSampleTimeUs = -1; @@ -2076,6 +3020,7 @@ status_t MPEG4Source::read( // we had seeked to the end of stream, ending normally. err = ERROR_END_OF_STREAM; } + ALOGV("end of stream"); return err; } @@ -2286,6 +3231,268 @@ status_t MPEG4Source::read( } } +status_t MPEG4Source::fragmentedRead( + MediaBuffer **out, const ReadOptions *options) { + + ALOGV("MPEG4Source::fragmentedRead"); + + CHECK(mStarted); + + *out = NULL; + + int64_t targetSampleTimeUs = -1; + + int64_t seekTimeUs; + ReadOptions::SeekMode mode; + if (options && options->getSeekTo(&seekTimeUs, &mode)) { + + int numSidxEntries = mSegments.size(); + if (numSidxEntries != 0) { + int64_t totalTime = 0; + off64_t totalOffset = mFirstMoofOffset; + for (int i = 0; i < numSidxEntries; i++) { + const SidxEntry *se = &mSegments[i]; + if (totalTime + se->mDurationUs > seekTimeUs) { + // The requested time is somewhere in this segment + if ((mode == ReadOptions::SEEK_NEXT_SYNC) || + (mode == ReadOptions::SEEK_CLOSEST_SYNC && + (seekTimeUs - totalTime) > (totalTime + se->mDurationUs - seekTimeUs))) { + // requested next sync, or closest sync and it was closer to the end of + // this segment + totalTime += se->mDurationUs; + totalOffset += se->mSize; + } + break; + } + totalTime += se->mDurationUs; + totalOffset += se->mSize; + } + mCurrentMoofOffset = totalOffset; + mCurrentSamples.clear(); + mCurrentSampleIndex = 0; + parseChunk(&totalOffset); + mCurrentTime = totalTime * mTimescale / 1000000ll; + } + + if (mBuffer != NULL) { + mBuffer->release(); + mBuffer = NULL; + } + + // fall through + } + + off64_t offset = 0; + size_t size; + uint32_t cts = 0; + bool isSyncSample = false; + bool newBuffer = false; + if (mBuffer == NULL) { + newBuffer = true; + + if (mCurrentSampleIndex >= mCurrentSamples.size()) { + // move to next fragment + Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1]; + off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size; + mCurrentMoofOffset = nextMoof; + mCurrentSamples.clear(); + mCurrentSampleIndex = 0; + parseChunk(&nextMoof); + if (mCurrentSampleIndex >= mCurrentSamples.size()) { + return ERROR_END_OF_STREAM; + } + } + + const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; + offset = smpl->offset; + size = smpl->size; + cts = mCurrentTime; + mCurrentTime += smpl->duration; + isSyncSample = (mCurrentSampleIndex == 0); // XXX + + status_t err = mGroup->acquire_buffer(&mBuffer); + + if (err != OK) { + CHECK(mBuffer == NULL); + ALOGV("acquire_buffer returned %d", err); + return err; + } + } + + const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex]; + const sp<MetaData> bufmeta = mBuffer->meta_data(); + bufmeta->clear(); + if (smpl->encryptedsizes.size()) { + // store clear/encrypted lengths in metadata + bufmeta->setData(kKeyPlainSizes, 0, + smpl->clearsizes.array(), smpl->clearsizes.size() * 4); + bufmeta->setData(kKeyEncryptedSizes, 0, + smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4); + bufmeta->setData(kKeyCryptoIV, 0, smpl->iv, 16); // use 16 or the actual size? + bufmeta->setInt32(kKeyCryptoDefaultIVSize, mDefaultIVSize); + bufmeta->setInt32(kKeyCryptoMode, mCryptoMode); + bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16); + } + + if (!mIsAVC || mWantsNALFragments) { + if (newBuffer) { + ssize_t num_bytes_read = + mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size); + + if (num_bytes_read < (ssize_t)size) { + mBuffer->release(); + mBuffer = NULL; + + ALOGV("i/o error"); + return ERROR_IO; + } + + CHECK(mBuffer != NULL); + mBuffer->set_range(0, size); + mBuffer->meta_data()->setInt64( + kKeyTime, ((int64_t)cts * 1000000) / mTimescale); + + if (targetSampleTimeUs >= 0) { + mBuffer->meta_data()->setInt64( + kKeyTargetTime, targetSampleTimeUs); + } + + if (isSyncSample) { + mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } + + ++mCurrentSampleIndex; + } + + if (!mIsAVC) { + *out = mBuffer; + mBuffer = NULL; + + return OK; + } + + // Each NAL unit is split up into its constituent fragments and + // each one of them returned in its own buffer. + + CHECK(mBuffer->range_length() >= mNALLengthSize); + + const uint8_t *src = + (const uint8_t *)mBuffer->data() + mBuffer->range_offset(); + + size_t nal_size = parseNALSize(src); + if (mBuffer->range_length() < mNALLengthSize + nal_size) { + ALOGE("incomplete NAL unit."); + + mBuffer->release(); + mBuffer = NULL; + + return ERROR_MALFORMED; + } + + MediaBuffer *clone = mBuffer->clone(); + CHECK(clone != NULL); + clone->set_range(mBuffer->range_offset() + mNALLengthSize, nal_size); + + CHECK(mBuffer != NULL); + mBuffer->set_range( + mBuffer->range_offset() + mNALLengthSize + nal_size, + mBuffer->range_length() - mNALLengthSize - nal_size); + + if (mBuffer->range_length() == 0) { + mBuffer->release(); + mBuffer = NULL; + } + + *out = clone; + + return OK; + } else { + ALOGV("whole NAL"); + // Whole NAL units are returned but each fragment is prefixed by + // the start code (0x00 00 00 01). + ssize_t num_bytes_read = 0; + int32_t drm = 0; + bool usesDRM = (mFormat->findInt32(kKeyIsDRM, &drm) && drm != 0); + if (usesDRM) { + num_bytes_read = + mDataSource->readAt(offset, (uint8_t*)mBuffer->data(), size); + } else { + num_bytes_read = mDataSource->readAt(offset, mSrcBuffer, size); + } + + if (num_bytes_read < (ssize_t)size) { + mBuffer->release(); + mBuffer = NULL; + + ALOGV("i/o error"); + return ERROR_IO; + } + + if (usesDRM) { + CHECK(mBuffer != NULL); + mBuffer->set_range(0, size); + + } else { + uint8_t *dstData = (uint8_t *)mBuffer->data(); + size_t srcOffset = 0; + size_t dstOffset = 0; + + while (srcOffset < size) { + bool isMalFormed = (srcOffset + mNALLengthSize > size); + size_t nalLength = 0; + if (!isMalFormed) { + nalLength = parseNALSize(&mSrcBuffer[srcOffset]); + srcOffset += mNALLengthSize; + isMalFormed = srcOffset + nalLength > size; + } + + if (isMalFormed) { + ALOGE("Video is malformed"); + mBuffer->release(); + mBuffer = NULL; + return ERROR_MALFORMED; + } + + if (nalLength == 0) { + continue; + } + + CHECK(dstOffset + 4 <= mBuffer->size()); + + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 0; + dstData[dstOffset++] = 1; + memcpy(&dstData[dstOffset], &mSrcBuffer[srcOffset], nalLength); + srcOffset += nalLength; + dstOffset += nalLength; + } + CHECK_EQ(srcOffset, size); + CHECK(mBuffer != NULL); + mBuffer->set_range(0, dstOffset); + } + + mBuffer->meta_data()->setInt64( + kKeyTime, ((int64_t)cts * 1000000) / mTimescale); + + if (targetSampleTimeUs >= 0) { + mBuffer->meta_data()->setInt64( + kKeyTargetTime, targetSampleTimeUs); + } + + if (isSyncSample) { + mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); + } + + ++mCurrentSampleIndex; + + *out = mBuffer; + mBuffer = NULL; + + return OK; + } +} + MPEG4Extractor::Track *MPEG4Extractor::findTrackByMimePrefix( const char *mimePrefix) { for (Track *track = mFirstTrack; track != NULL; track = track->next) { @@ -2398,6 +3605,9 @@ static bool BetterSniffMPEG4( off64_t chunkDataSize = offset + chunkSize - chunkDataOffset; + char chunkstring[5]; + MakeFourCCString(chunkType, chunkstring); + ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset); switch (chunkType) { case FOURCC('f', 't', 'y', 'p'): { diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 326930f..a0f17b5 100755..100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -212,7 +212,6 @@ private: int64_t mTrackDurationUs; int64_t mMaxChunkDurationUs; - bool mIsRealTimeRecording; int64_t mEstimatedTrackSizeBytes; int64_t mMdatSizeBytes; int32_t mTimeScale; @@ -335,6 +334,7 @@ private: MPEG4Writer::MPEG4Writer(const char *filename) : mFd(-1), mInitCheck(NO_INIT), + mIsRealTimeRecording(true), mUse4ByteNalLength(true), mUse32BitOffset(true), mIsFileSizeLimitExplicitlyRequested(false), @@ -359,6 +359,7 @@ MPEG4Writer::MPEG4Writer(const char *filename) MPEG4Writer::MPEG4Writer(int fd) : mFd(dup(fd)), mInitCheck(mFd < 0? NO_INIT: OK), + mIsRealTimeRecording(true), mUse4ByteNalLength(true), mUse32BitOffset(true), mIsFileSizeLimitExplicitlyRequested(false), @@ -428,6 +429,42 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) { ALOGE("Attempt to add source AFTER recording is started"); return UNKNOWN_ERROR; } + + // At most 2 tracks can be supported. + if (mTracks.size() >= 2) { + ALOGE("Too many tracks (%d) to add", mTracks.size()); + return ERROR_UNSUPPORTED; + } + + CHECK(source.get() != NULL); + + // A track of type other than video or audio is not supported. + const char *mime; + source->getFormat()->findCString(kKeyMIMEType, &mime); + bool isAudio = !strncasecmp(mime, "audio/", 6); + bool isVideo = !strncasecmp(mime, "video/", 6); + if (!isAudio && !isVideo) { + ALOGE("Track (%s) other than video or audio is not supported", + mime); + return ERROR_UNSUPPORTED; + } + + // At this point, we know the track to be added is either + // video or audio. Thus, we only need to check whether it + // is an audio track or not (if it is not, then it must be + // a video track). + + // No more than one video or one audio track is supported. + for (List<Track*>::iterator it = mTracks.begin(); + it != mTracks.end(); ++it) { + if ((*it)->isAudio() == isAudio) { + ALOGE("%s track already exists", isAudio? "Audio": "Video"); + return ERROR_UNSUPPORTED; + } + } + + // This is the first track of either audio or video. + // Go ahead to add the track. Track *track = new Track(this, source, 1 + mTracks.size()); mTracks.push_back(track); @@ -435,6 +472,11 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) { } status_t MPEG4Writer::startTracks(MetaData *params) { + if (mTracks.empty()) { + ALOGE("No source added"); + return INVALID_OPERATION; + } + for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end(); ++it) { status_t err = (*it)->start(params); @@ -555,6 +597,11 @@ status_t MPEG4Writer::start(MetaData *param) { mUse4ByteNalLength = false; } + int32_t isRealTimeRecording; + if (param && param->findInt32(kKeyRealTimeRecording, &isRealTimeRecording)) { + mIsRealTimeRecording = isRealTimeRecording; + } + mStartTimestampUs = -1; if (mStarted) { @@ -575,13 +622,50 @@ status_t MPEG4Writer::start(MetaData *param) { /* * When the requested file size limit is small, the priority * is to meet the file size limit requirement, rather than - * to make the file streamable. + * to make the file streamable. mStreamableFile does not tell + * whether the actual recorded file is streamable or not. */ mStreamableFile = (mMaxFileSizeLimitBytes != 0 && mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes); - mWriteMoovBoxToMemory = mStreamableFile; + /* + * mWriteMoovBoxToMemory is true if the amount of data in moov box is + * smaller than the reserved free space at the beginning of a file, AND + * when the content of moov box is constructed. Note that video/audio + * frame data is always written to the file but not in the memory. + * + * Before stop()/reset() is called, mWriteMoovBoxToMemory is always + * false. When reset() is called at the end of a recording session, + * Moov box needs to be constructed. + * + * 1) Right before a moov box is constructed, mWriteMoovBoxToMemory + * to set to mStreamableFile so that if + * the file is intended to be streamable, it is set to true; + * otherwise, it is set to false. When the value is set to false, + * all the content of the moov box is written immediately to + * the end of the file. When the value is set to true, all the + * content of the moov box is written to an in-memory cache, + * mMoovBoxBuffer, util the following condition happens. Note + * that the size of the in-memory cache is the same as the + * reserved free space at the beginning of the file. + * + * 2) While the data of the moov box is written to an in-memory + * cache, the data size is checked against the reserved space. + * If the data size surpasses the reserved space, subsequent moov + * data could no longer be hold in the in-memory cache. This also + * indicates that the reserved space was too small. At this point, + * _all_ moov data must be written to the end of the file. + * mWriteMoovBoxToMemory must be set to false to direct the write + * to the file. + * + * 3) If the data size in moov box is smaller than the reserved + * space after moov box is completely constructed, the in-memory + * cache copy of the moov box is written to the reserved free + * space. Thus, immediately after the moov is completedly + * constructed, mWriteMoovBoxToMemory is always set to false. + */ + mWriteMoovBoxToMemory = false; mMoovBoxBuffer = NULL; mMoovBoxBufferOffset = 0; @@ -786,15 +870,25 @@ status_t MPEG4Writer::reset() { } lseek64(mFd, mOffset, SEEK_SET); - const off64_t moovOffset = mOffset; - mWriteMoovBoxToMemory = mStreamableFile; - mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize); + // Construct moov box now mMoovBoxBufferOffset = 0; - CHECK(mMoovBoxBuffer != NULL); + mWriteMoovBoxToMemory = mStreamableFile; + if (mWriteMoovBoxToMemory) { + // There is no need to allocate in-memory cache + // for moov box if the file is not streamable. + + mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize); + CHECK(mMoovBoxBuffer != NULL); + } writeMoovBox(maxDurationUs); - mWriteMoovBoxToMemory = false; - if (mStreamableFile) { + // mWriteMoovBoxToMemory could be set to false in + // MPEG4Writer::write() method + if (mWriteMoovBoxToMemory) { + mWriteMoovBoxToMemory = false; + // Content of the moov box is saved in the cache, and the in-memory + // moov box needs to be written to the file in a single shot. + CHECK_LE(mMoovBoxBufferOffset + 8, mEstimatedMoovBoxSize); // Moov box @@ -806,13 +900,15 @@ status_t MPEG4Writer::reset() { lseek64(mFd, mOffset, SEEK_SET); writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset); write("free", 4); + } else { + ALOGI("The mp4 file will not be streamable."); + } - // Free temp memory + // Free in-memory cache for moov box + if (mMoovBoxBuffer != NULL) { free(mMoovBoxBuffer); mMoovBoxBuffer = NULL; mMoovBoxBufferOffset = 0; - } else { - ALOGI("The mp4 file will not be streamable."); } CHECK(mBoxes.empty()); @@ -994,23 +1090,28 @@ size_t MPEG4Writer::write( const size_t bytes = size * nmemb; if (mWriteMoovBoxToMemory) { - // This happens only when we write the moov box at the end of - // recording, not for each output video/audio frame we receive. + off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes; if (moovBoxSize > mEstimatedMoovBoxSize) { + // The reserved moov box at the beginning of the file + // is not big enough. Moov box should be written to + // the end of the file from now on, but not to the + // in-memory cache. + + // We write partial moov box that is in the memory to + // the file first. for (List<off64_t>::iterator it = mBoxes.begin(); it != mBoxes.end(); ++it) { (*it) += mOffset; } lseek64(mFd, mOffset, SEEK_SET); ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset); - ::write(mFd, ptr, size * nmemb); + ::write(mFd, ptr, bytes); mOffset += (bytes + mMoovBoxBufferOffset); - free(mMoovBoxBuffer); - mMoovBoxBuffer = NULL; - mMoovBoxBufferOffset = 0; + + // All subsequent moov box content will be written + // to the end of the file. mWriteMoovBoxToMemory = false; - mStreamableFile = false; } else { memcpy(mMoovBoxBuffer + mMoovBoxBufferOffset, ptr, bytes); mMoovBoxBufferOffset += bytes; @@ -1545,12 +1646,18 @@ void MPEG4Writer::threadFunc() { mChunkReadyCondition.wait(mLock); } - // Actual write without holding the lock in order to - // reduce the blocking time for media track threads. + // In real time recording mode, write without holding the lock in order + // to reduce the blocking time for media track threads. + // Otherwise, hold the lock until the existing chunks get written to the + // file. if (chunkFound) { - mLock.unlock(); + if (mIsRealTimeRecording) { + mLock.unlock(); + } writeChunkToFile(&chunk); - mLock.lock(); + if (mIsRealTimeRecording) { + mLock.lock(); + } } } @@ -1600,18 +1707,10 @@ status_t MPEG4Writer::Track::start(MetaData *params) { mRotation = rotationDegrees; } - mIsRealTimeRecording = true; - { - int32_t isNotRealTime; - if (params && params->findInt32(kKeyNotRealTime, &isNotRealTime)) { - mIsRealTimeRecording = (isNotRealTime == 0); - } - } - initTrackingProgressStatus(params); sp<MetaData> meta = new MetaData; - if (mIsRealTimeRecording && mOwner->numTracks() > 1) { + if (mOwner->isRealTimeRecording() && mOwner->numTracks() > 1) { /* * This extra delay of accepting incoming audio/video signals * helps to align a/v start time at the beginning of a recording @@ -1989,7 +2088,10 @@ status_t MPEG4Writer::Track::threadEntry() { } else { prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0); } - androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); + + if (mOwner->isRealTimeRecording()) { + androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO); + } sp<MetaData> meta_data; @@ -2150,7 +2252,7 @@ status_t MPEG4Writer::Track::threadEntry() { } - if (mIsRealTimeRecording) { + if (mOwner->isRealTimeRecording()) { if (mIsAudio) { updateDriftTime(meta_data); } @@ -2436,6 +2538,10 @@ int64_t MPEG4Writer::getDriftTimeUs() { return mDriftTimeUs; } +bool MPEG4Writer::isRealTimeRecording() const { + return mIsRealTimeRecording; +} + bool MPEG4Writer::useNalLengthFour() { return mUse4ByteNalLength; } diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp new file mode 100644 index 0000000..2484212 --- /dev/null +++ b/media/libstagefright/MediaAdapter.cpp @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaAdapter" +#include <utils/Log.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaAdapter.h> +#include <media/stagefright/MediaBuffer.h> + +namespace android { + +MediaAdapter::MediaAdapter(const sp<MetaData> &meta) + : mCurrentMediaBuffer(NULL), + mStarted(false), + mOutputFormat(meta) { +} + +MediaAdapter::~MediaAdapter() { + Mutex::Autolock autoLock(mAdapterLock); + mOutputFormat.clear(); + CHECK(mCurrentMediaBuffer == NULL); +} + +status_t MediaAdapter::start(MetaData *params) { + Mutex::Autolock autoLock(mAdapterLock); + if (!mStarted) { + mStarted = true; + } + return OK; +} + +status_t MediaAdapter::stop() { + Mutex::Autolock autoLock(mAdapterLock); + if (mStarted) { + mStarted = false; + // If stop() happens immediately after a pushBuffer(), we should + // clean up the mCurrentMediaBuffer + if (mCurrentMediaBuffer != NULL) { + mCurrentMediaBuffer->release(); + mCurrentMediaBuffer = NULL; + } + // While read() is still waiting, we should signal it to finish. + mBufferReadCond.signal(); + } + return OK; +} + +sp<MetaData> MediaAdapter::getFormat() { + Mutex::Autolock autoLock(mAdapterLock); + return mOutputFormat; +} + +void MediaAdapter::signalBufferReturned(MediaBuffer *buffer) { + Mutex::Autolock autoLock(mAdapterLock); + CHECK(buffer != NULL); + buffer->setObserver(0); + buffer->release(); + ALOGV("buffer returned %p", buffer); + mBufferReturnedCond.signal(); +} + +status_t MediaAdapter::read( + MediaBuffer **buffer, const ReadOptions *options) { + Mutex::Autolock autoLock(mAdapterLock); + if (!mStarted) { + ALOGV("Read before even started!"); + return ERROR_END_OF_STREAM; + } + + while (mCurrentMediaBuffer == NULL && mStarted) { + ALOGV("waiting @ read()"); + mBufferReadCond.wait(mAdapterLock); + } + + if (!mStarted) { + ALOGV("read interrupted after stop"); + CHECK(mCurrentMediaBuffer == NULL); + return ERROR_END_OF_STREAM; + } + + CHECK(mCurrentMediaBuffer != NULL); + + *buffer = mCurrentMediaBuffer; + mCurrentMediaBuffer = NULL; + (*buffer)->setObserver(this); + + return OK; +} + +status_t MediaAdapter::pushBuffer(MediaBuffer *buffer) { + if (buffer == NULL) { + ALOGE("pushBuffer get an NULL buffer"); + return -EINVAL; + } + + Mutex::Autolock autoLock(mAdapterLock); + if (!mStarted) { + ALOGE("pushBuffer called before start"); + return INVALID_OPERATION; + } + mCurrentMediaBuffer = buffer; + mBufferReadCond.signal(); + + ALOGV("wait for the buffer returned @ pushBuffer! %p", buffer); + mBufferReturnedCond.wait(mAdapterLock); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index cb8a651..f412dc8 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -22,7 +22,7 @@ #include "include/SoftwareRenderer.h" -#include <gui/SurfaceTextureClient.h> +#include <gui/Surface.h> #include <media/ICrypto.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> @@ -30,10 +30,14 @@ #include <media/stagefright/foundation/AString.h> #include <media/stagefright/foundation/hexdump.h> #include <media/stagefright/ACodec.h> +#include <media/stagefright/BufferProducerWrapper.h> +#include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaErrors.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/NativeWindowWrapper.h> +#include "include/avc_utils.h" + namespace android { // static @@ -62,12 +66,14 @@ MediaCodec::MediaCodec(const sp<ALooper> &looper) : mState(UNINITIALIZED), mLooper(looper), mCodec(new ACodec), + mReplyID(0), mFlags(0), mSoftRenderer(NULL), mDequeueInputTimeoutGeneration(0), mDequeueInputReplyID(0), mDequeueOutputTimeoutGeneration(0), - mDequeueOutputReplyID(0) { + mDequeueOutputReplyID(0), + mHaveInputSurface(false) { } MediaCodec::~MediaCodec() { @@ -132,7 +138,7 @@ status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) { status_t MediaCodec::configure( const sp<AMessage> &format, - const sp<SurfaceTextureClient> &nativeWindow, + const sp<Surface> &nativeWindow, const sp<ICrypto> &crypto, uint32_t flags) { sp<AMessage> msg = new AMessage(kWhatConfigure, id()); @@ -154,6 +160,26 @@ status_t MediaCodec::configure( return PostAndAwaitResponse(msg, &response); } +status_t MediaCodec::createInputSurface( + sp<IGraphicBufferProducer>* bufferProducer) { + sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, id()); + + sp<AMessage> response; + status_t err = PostAndAwaitResponse(msg, &response); + if (err == NO_ERROR) { + // unwrap the sp<IGraphicBufferProducer> + sp<RefBase> obj; + bool found = response->findObject("input-surface", &obj); + CHECK(found); + sp<BufferProducerWrapper> wrapper( + static_cast<BufferProducerWrapper*>(obj.get())); + *bufferProducer = wrapper->getBufferProducer(); + } else { + ALOGW("createInputSurface failed, err=%d", err); + } + return err; +} + status_t MediaCodec::start() { sp<AMessage> msg = new AMessage(kWhatStart, id()); @@ -288,6 +314,13 @@ status_t MediaCodec::releaseOutputBuffer(size_t index) { return PostAndAwaitResponse(msg, &response); } +status_t MediaCodec::signalEndOfInputStream() { + sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, id()); + + sp<AMessage> response; + return PostAndAwaitResponse(msg, &response); +} + status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const { sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id()); @@ -476,6 +509,11 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { "(omx error 0x%08x, internalError %d)", omxError, internalError); + if (omxError == OMX_ErrorResourcesLost + && internalError == DEAD_OBJECT) { + mFlags |= kFlagSawMediaServerDie; + } + bool sendErrorReponse = true; switch (mState) { @@ -504,6 +542,19 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { // the shutdown complete notification. sendErrorReponse = false; + + if (mFlags & kFlagSawMediaServerDie) { + // MediaServer died, there definitely won't + // be a shutdown complete notification after + // all. + + // note that we're directly going from + // STOPPING->UNINITIALIZED, instead of the + // usual STOPPING->INITIALIZED state. + setState(UNINITIALIZED); + + (new AMessage)->postReply(mReplyID); + } break; } @@ -571,10 +622,44 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { CHECK_EQ(mState, CONFIGURING); setState(CONFIGURED); + // reset input surface flag + mHaveInputSurface = false; + (new AMessage)->postReply(mReplyID); break; } + case ACodec::kWhatInputSurfaceCreated: + { + // response to ACodec::kWhatCreateInputSurface + status_t err = NO_ERROR; + sp<AMessage> response = new AMessage(); + if (!msg->findInt32("err", &err)) { + sp<RefBase> obj; + msg->findObject("input-surface", &obj); + CHECK(obj != NULL); + response->setObject("input-surface", obj); + mHaveInputSurface = true; + } else { + response->setInt32("err", err); + } + response->postReply(mReplyID); + break; + } + + case ACodec::kWhatSignaledInputEOS: + { + // response to ACodec::kWhatSignalEndOfInputStream + sp<AMessage> response = new AMessage(); + status_t err; + if (msg->findInt32("err", &err)) { + response->setInt32("err", err); + } + response->postReply(mReplyID); + break; + } + + case ACodec::kWhatBuffersAllocated: { int32_t portIndex; @@ -659,8 +744,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { } mOutputFormat = msg; - mFlags |= kFlagOutputFormatChanged; - postActivityNotificationIfPossible(); + + if (mFlags & kFlagIsEncoder) { + // Before we announce the format change we should + // collect codec specific data and amend the output + // format as necessary. + mFlags |= kFlagGatherCodecSpecificData; + } else { + mFlags |= kFlagOutputFormatChanged; + postActivityNotificationIfPossible(); + } break; } @@ -730,6 +823,25 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { buffer->meta()->setInt32("omxFlags", omxFlags); + if (mFlags & kFlagGatherCodecSpecificData) { + // This is the very first output buffer after a + // format change was signalled, it'll either contain + // the one piece of codec specific data we can expect + // or there won't be codec specific data. + if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) { + status_t err = + amendOutputFormatWithCodecSpecificData(buffer); + + if (err != OK) { + ALOGE("Codec spit out malformed codec " + "specific data!"); + } + } + + mFlags &= ~kFlagGatherCodecSpecificData; + mFlags |= kFlagOutputFormatChanged; + } + if (mFlags & kFlagDequeueOutputPending) { CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID)); @@ -873,6 +985,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { if (flags & CONFIGURE_FLAG_ENCODE) { format->setInt32("encoder", true); + mFlags |= kFlagIsEncoder; } extractCSD(format); @@ -881,11 +994,12 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { break; } - case kWhatStart: + case kWhatCreateInputSurface: { uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); + // Must be configured, but can't have been started yet. if (mState != CONFIGURED) { sp<AMessage> response = new AMessage; response->setInt32("err", INVALID_OPERATION); @@ -895,19 +1009,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { } mReplyID = replyID; - setState(STARTING); - - mCodec->initiateStart(); + mCodec->initiateCreateInputSurface(); break; } - case kWhatStop: + case kWhatStart: { uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); - if (mState != INITIALIZED - && mState != CONFIGURED && mState != STARTED) { + if (mState != CONFIGURED) { sp<AMessage> response = new AMessage; response->setInt32("err", INVALID_OPERATION); @@ -916,31 +1027,53 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { } mReplyID = replyID; - setState(STOPPING); + setState(STARTING); - mCodec->initiateShutdown(true /* keepComponentAllocated */); - returnBuffersToCodec(); + mCodec->initiateStart(); break; } + case kWhatStop: case kWhatRelease: { + State targetState = + (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED; + uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mState != INITIALIZED && mState != CONFIGURED && mState != STARTED) { + // We may be in "UNINITIALIZED" state already without the + // client being aware of this if media server died while + // we were being stopped. The client would assume that + // after stop() returned, it would be safe to call release() + // and it should be in this case, no harm to allow a release() + // if we're already uninitialized. + // Similarly stopping a stopped MediaCodec should be benign. sp<AMessage> response = new AMessage; - response->setInt32("err", INVALID_OPERATION); + response->setInt32( + "err", + mState == targetState ? OK : INVALID_OPERATION); response->postReply(replyID); break; } + if (mFlags & kFlagSawMediaServerDie) { + // It's dead, Jim. Don't expect initiateShutdown to yield + // any useful results now... + setState(UNINITIALIZED); + (new AMessage)->postReply(replyID); + break; + } + mReplyID = replyID; - setState(RELEASING); + setState(msg->what() == kWhatStop ? STOPPING : RELEASING); + + mCodec->initiateShutdown( + msg->what() == kWhatStop /* keepComponentAllocated */); - mCodec->initiateShutdown(); returnBuffersToCodec(); break; } @@ -950,6 +1083,14 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { uint32_t replyID; CHECK(msg->senderAwaitsResponse(&replyID)); + if (mHaveInputSurface) { + ALOGE("dequeueInputBuffer can't be used with input surface"); + sp<AMessage> response = new AMessage; + response->setInt32("err", INVALID_OPERATION); + response->postReply(replyID); + break; + } + if (handleDequeueInputBuffer(replyID, true /* new request */)) { break; } @@ -1093,6 +1234,24 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatSignalEndOfInputStream: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + if (mState != STARTED || (mFlags & kFlagStickyError)) { + sp<AMessage> response = new AMessage; + response->setInt32("err", INVALID_OPERATION); + + response->postReply(replyID); + break; + } + + mReplyID = replyID; + mCodec->signalEndOfInputStream(); + break; + } + case kWhatGetBuffers: { uint32_t replyID; @@ -1203,6 +1362,23 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatSetParameters: + { + uint32_t replyID; + CHECK(msg->senderAwaitsResponse(&replyID)); + + sp<AMessage> params; + CHECK(msg->findMessage("params", ¶ms)); + + status_t err = onSetParameters(params); + + sp<AMessage> response = new AMessage; + response->setInt32("err", err); + + response->postReply(replyID); + break; + } + default: TRESPASS(); } @@ -1268,12 +1444,19 @@ void MediaCodec::setState(State newState) { mFlags &= ~kFlagOutputFormatChanged; mFlags &= ~kFlagOutputBuffersChanged; mFlags &= ~kFlagStickyError; + mFlags &= ~kFlagIsEncoder; + mFlags &= ~kFlagGatherCodecSpecificData; mActivityNotify.clear(); } if (newState == UNINITIALIZED) { mComponentName.clear(); + + // The component is gone, mediaserver's probably back up already + // but should definitely be back up should we try to instantiate + // another component.. and the cycle continues. + mFlags &= ~kFlagSawMediaServerDie; } mState = newState; @@ -1473,7 +1656,7 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) { return -EACCES; } - if (render) { + if (render && (info->mData == NULL || info->mData->size() != 0)) { info->mNotify->setInt32("render", true); if (mSoftRenderer != NULL) { @@ -1509,7 +1692,7 @@ ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) { } status_t MediaCodec::setNativeWindow( - const sp<SurfaceTextureClient> &surfaceTextureClient) { + const sp<Surface> &surfaceTextureClient) { status_t err; if (mNativeWindow != NULL) { @@ -1556,4 +1739,59 @@ void MediaCodec::postActivityNotificationIfPossible() { } } +status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) { + sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); + msg->setMessage("params", params); + + sp<AMessage> response; + return PostAndAwaitResponse(msg, &response); +} + +status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) { + mCodec->signalSetParameters(params); + + return OK; +} + +status_t MediaCodec::amendOutputFormatWithCodecSpecificData( + const sp<ABuffer> &buffer) { + AString mime; + CHECK(mOutputFormat->findString("mime", &mime)); + + if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) { + // Codec specific data should be SPS and PPS in a single buffer, + // each prefixed by a startcode (0x00 0x00 0x00 0x01). + // We separate the two and put them into the output format + // under the keys "csd-0" and "csd-1". + + unsigned csdIndex = 0; + + const uint8_t *data = buffer->data(); + size_t size = buffer->size(); + + const uint8_t *nalStart; + size_t nalSize; + while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) { + sp<ABuffer> csd = new ABuffer(nalSize + 4); + memcpy(csd->data(), "\x00\x00\x00\x01", 4); + memcpy(csd->data() + 4, nalStart, nalSize); + + mOutputFormat->setBuffer( + StringPrintf("csd-%u", csdIndex).c_str(), csd); + + ++csdIndex; + } + + if (csdIndex != 2) { + return ERROR_MALFORMED; + } + } else { + // For everything else we just stash the codec specific data into + // the output format as a single piece of csd under "csd-0". + mOutputFormat->setBuffer("csd-0", buffer); + } + + return OK; +} + } // namespace android diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp index e7b5903..5d8029c 100644 --- a/media/libstagefright/MediaDefs.cpp +++ b/media/libstagefright/MediaDefs.cpp @@ -40,6 +40,7 @@ const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw"; const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw"; const char *MEDIA_MIMETYPE_AUDIO_FLAC = "audio/flac"; const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS = "audio/aac-adts"; +const char *MEDIA_MIMETYPE_AUDIO_MSGSM = "audio/gsm"; const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mp4"; const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/x-wav"; diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp index b18c916..9ab6611 100644 --- a/media/libstagefright/MediaExtractor.cpp +++ b/media/libstagefright/MediaExtractor.cpp @@ -21,7 +21,6 @@ #include "include/AMRExtractor.h" #include "include/MP3Extractor.h" #include "include/MPEG4Extractor.h" -#include "include/FragmentedMP4Extractor.h" #include "include/WAVExtractor.h" #include "include/OggExtractor.h" #include "include/MPEG2PSExtractor.h" @@ -94,12 +93,7 @@ sp<MediaExtractor> MediaExtractor::Create( MediaExtractor *ret = NULL; if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG4) || !strcasecmp(mime, "audio/mp4")) { - int fragmented = 0; - if (meta != NULL && meta->findInt32("fragmented", &fragmented) && fragmented) { - ret = new FragmentedMP4Extractor(source); - } else { - ret = new MPEG4Extractor(source); - } + ret = new MPEG4Extractor(source); } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { ret = new MP3Extractor(source, meta); } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB) diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp new file mode 100644 index 0000000..94ce5de --- /dev/null +++ b/media/libstagefright/MediaMuxer.cpp @@ -0,0 +1,173 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaMuxer" +#include <utils/Log.h> + +#include <media/stagefright/MediaMuxer.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaAdapter.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaCodec.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MPEG4Writer.h> +#include <media/stagefright/Utils.h> + +namespace android { + +MediaMuxer::MediaMuxer(const char *path, OutputFormat format) + : mState(UNINITIALIZED) { + if (format == OUTPUT_FORMAT_MPEG_4) { + mWriter = new MPEG4Writer(path); + mFileMeta = new MetaData; + mState = INITIALIZED; + } + +} + +MediaMuxer::MediaMuxer(int fd, OutputFormat format) + : mState(UNINITIALIZED) { + if (format == OUTPUT_FORMAT_MPEG_4) { + mWriter = new MPEG4Writer(fd); + mFileMeta = new MetaData; + mState = INITIALIZED; + } +} + +MediaMuxer::~MediaMuxer() { + Mutex::Autolock autoLock(mMuxerLock); + + // Clean up all the internal resources. + mFileMeta.clear(); + mWriter.clear(); + mTrackList.clear(); +} + +ssize_t MediaMuxer::addTrack(const sp<AMessage> &format) { + Mutex::Autolock autoLock(mMuxerLock); + + if (format.get() == NULL) { + ALOGE("addTrack() get a null format"); + return -EINVAL; + } + + if (mState != INITIALIZED) { + ALOGE("addTrack() must be called after constructor and before start()."); + return INVALID_OPERATION; + } + + sp<MetaData> trackMeta = new MetaData; + convertMessageToMetaData(format, trackMeta); + + sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta); + status_t result = mWriter->addSource(newTrack); + if (result == OK) { + return mTrackList.add(newTrack); + } + return -1; +} + +status_t MediaMuxer::setOrientationHint(int degrees) { + Mutex::Autolock autoLock(mMuxerLock); + if (mState != INITIALIZED) { + ALOGE("setOrientationHint() must be called before start()."); + return INVALID_OPERATION; + } + + if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) { + ALOGE("setOrientationHint() get invalid degrees"); + return -EINVAL; + } + + mFileMeta->setInt32(kKeyRotation, degrees); + return OK; +} + +status_t MediaMuxer::start() { + Mutex::Autolock autoLock(mMuxerLock); + if (mState == INITIALIZED) { + mState = STARTED; + mFileMeta->setInt32(kKeyRealTimeRecording, false); + return mWriter->start(mFileMeta.get()); + } else { + ALOGE("start() is called in invalid state %d", mState); + return INVALID_OPERATION; + } +} + +status_t MediaMuxer::stop() { + Mutex::Autolock autoLock(mMuxerLock); + + if (mState == STARTED) { + mState = STOPPED; + for (size_t i = 0; i < mTrackList.size(); i++) { + if (mTrackList[i]->stop() != OK) { + return INVALID_OPERATION; + } + } + return mWriter->stop(); + } else { + ALOGE("stop() is called in invalid state %d", mState); + return INVALID_OPERATION; + } +} + +status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex, + int64_t timeUs, uint32_t flags) { + Mutex::Autolock autoLock(mMuxerLock); + + if (buffer.get() == NULL) { + ALOGE("WriteSampleData() get an NULL buffer."); + return -EINVAL; + } + + if (mState != STARTED) { + ALOGE("WriteSampleData() is called in invalid state %d", mState); + return INVALID_OPERATION; + } + + if (trackIndex >= mTrackList.size()) { + ALOGE("WriteSampleData() get an invalid index %d", trackIndex); + return -EINVAL; + } + + MediaBuffer* mediaBuffer = new MediaBuffer(buffer); + + mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned(). + mediaBuffer->set_range(buffer->offset(), buffer->size()); + + sp<MetaData> sampleMetaData = mediaBuffer->meta_data(); + sampleMetaData->setInt64(kKeyTime, timeUs); + // Just set the kKeyDecodingTime as the presentation time for now. + sampleMetaData->setInt64(kKeyDecodingTime, timeUs); + + if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) { + sampleMetaData->setInt32(kKeyIsSyncFrame, true); + } + + sp<MediaAdapter> currentTrack = mTrackList[trackIndex]; + // This pushBuffer will wait until the mediaBuffer is consumed. + return currentTrack->pushBuffer(mediaBuffer); +} + +} // namespace android diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp index 404fa94..7bc7da2 100644 --- a/media/libstagefright/NuMediaExtractor.cpp +++ b/media/libstagefright/NuMediaExtractor.cpp @@ -228,6 +228,34 @@ status_t NuMediaExtractor::getTrackFormat( return convertMetaDataToMessage(meta, format); } +status_t NuMediaExtractor::getFileFormat(sp<AMessage> *format) const { + Mutex::Autolock autoLock(mLock); + + *format = NULL; + + if (mImpl == NULL) { + return -EINVAL; + } + + sp<MetaData> meta = mImpl->getMetaData(); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + *format = new AMessage(); + (*format)->setString("mime", mime); + + uint32_t type; + const void *pssh; + size_t psshsize; + if (meta->findData(kKeyPssh, &type, &pssh, &psshsize)) { + sp<ABuffer> buf = new ABuffer(psshsize); + memcpy(buf->data(), pssh, psshsize); + (*format)->setBuffer("pssh", buf); + } + + return OK; +} + status_t NuMediaExtractor::selectTrack(size_t index) { Mutex::Autolock autoLock(mLock); diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index 7cdb793..1822f07 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -32,7 +32,7 @@ struct MuxOMX : public IOMX { MuxOMX(const sp<IOMX> &remoteOMX); virtual ~MuxOMX(); - virtual IBinder *onAsBinder() { return NULL; } + virtual IBinder *onAsBinder() { return mRemoteOMX->asBinder().get(); } virtual bool livesLocally(node_id node, pid_t pid); @@ -83,6 +83,12 @@ struct MuxOMX : public IOMX { node_id node, OMX_U32 port_index, const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer); + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp<IGraphicBufferProducer> *bufferProducer); + + virtual status_t signalEndOfInputStream(node_id node); + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data); @@ -274,6 +280,18 @@ status_t MuxOMX::useGraphicBuffer( node, port_index, graphicBuffer, buffer); } +status_t MuxOMX::createInputSurface( + node_id node, OMX_U32 port_index, + sp<IGraphicBufferProducer> *bufferProducer) { + status_t err = getOMX(node)->createInputSurface( + node, port_index, bufferProducer); + return err; +} + +status_t MuxOMX::signalEndOfInputStream(node_id node) { + return getOMX(node)->signalEndOfInputStream(node); +} + status_t MuxOMX::allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 70de174..89a51f8 100755..100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -359,12 +359,7 @@ sp<MediaSource> OMXCodec::Create( observer->setCodec(codec); err = codec->configureCodec(meta); - if (err == OK) { - if (!strcmp("OMX.Nvidia.mpeg2v.decode", componentName)) { - codec->mFlags |= kOnlySubmitOneInputBufferAtOneTime; - } - return codec; } @@ -522,6 +517,17 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) { CODEC_LOGE("setAACFormat() failed (err = %d)", err); return err; } + } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_MPEG, mMIME)) { + int32_t numChannels, sampleRate; + if (meta->findInt32(kKeyChannelCount, &numChannels) + && meta->findInt32(kKeySampleRate, &sampleRate)) { + // Since we did not always check for these, leave them optional + // and have the decoder figure it all out. + setRawAudioFormat( + mIsEncoder ? kPortIndexInput : kPortIndexOutput, + sampleRate, + numChannels); + } } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME) || !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) { // These are PCM-like formats with a fixed sample rate but @@ -1213,13 +1219,6 @@ status_t OMXCodec::setVideoOutputFormat( CHECK_EQ(err, (status_t)OK); CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); - CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar - || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar - || format.eColorFormat == OMX_COLOR_FormatCbYCrY - || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar - || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka); - int32_t colorFormat; if (meta->findInt32(kKeyColorFormat, &colorFormat) && colorFormat != OMX_COLOR_FormatUnused @@ -1340,8 +1339,7 @@ OMXCodec::OMXCodec( mLeftOverBuffer(NULL), mPaused(false), mNativeWindow( - (!strncmp(componentName, "OMX.google.", 11) - || !strcmp(componentName, "OMX.Nvidia.mpeg2v.decode")) + (!strncmp(componentName, "OMX.google.", 11)) ? NULL : nativeWindow) { mPortStatus[kPortIndexInput] = ENABLED; mPortStatus[kPortIndexOutput] = ENABLED; @@ -1390,6 +1388,8 @@ void OMXCodec::setComponentRole( "audio_decoder.raw", "audio_encoder.raw" }, { MEDIA_MIMETYPE_AUDIO_FLAC, "audio_decoder.flac", "audio_encoder.flac" }, + { MEDIA_MIMETYPE_AUDIO_MSGSM, + "audio_decoder.gsm", "audio_encoder.gsm" }, }; static const size_t kNumMimeToRole = diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp index 773854f..773854f 100755..100644 --- a/media/libstagefright/SkipCutBuffer.cpp +++ b/media/libstagefright/SkipCutBuffer.cpp diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp index bccffd8..af8186c 100644 --- a/media/libstagefright/StagefrightMediaScanner.cpp +++ b/media/libstagefright/StagefrightMediaScanner.cpp @@ -42,7 +42,7 @@ static bool FileHasAcceptableExtension(const char *extension) { ".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac", ".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota", ".mkv", ".mka", ".webm", ".ts", ".fl", ".flac", ".mxmf", - ".avi", ".mpeg", ".mpg", ".mpga" + ".avi", ".mpeg", ".mpg", ".awb", ".mpga" }; static const size_t kNumValidExtensions = sizeof(kValidExtensions) / sizeof(kValidExtensions[0]); diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 3c002fc..409038a 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -298,6 +298,10 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer, // wait for a buffer to be queued mFrameAvailableCondition.wait(mMutex); } else if (err == OK) { + err = item.mFence->waitForever("SurfaceMediaSource::read"); + if (err) { + ALOGW("read: failed to wait for buffer fence: %d", err); + } // First time seeing the buffer? Added it to the SMS slot if (item.mGraphicBuffer != NULL) { diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp index 348a9d3..7496752 100644 --- a/media/libstagefright/ThrottledSource.cpp +++ b/media/libstagefright/ThrottledSource.cpp @@ -31,10 +31,6 @@ ThrottledSource::ThrottledSource( CHECK(mBandwidthLimitBytesPerSecond > 0); } -status_t ThrottledSource::initCheck() const { - return mSource->initCheck(); -} - ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) { Mutex::Autolock autoLock(mLock); @@ -62,17 +58,9 @@ ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) { if (whenUs > nowUs) { usleep(whenUs - nowUs); } - return n; } -status_t ThrottledSource::getSize(off64_t *size) { - return mSource->getSize(size); -} - -uint32_t ThrottledSource::flags() { - return mSource->flags(); -} } // namespace android diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index 74e9222..b0df379 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -21,7 +21,7 @@ #include "include/ESDS.h" #include <arpa/inet.h> - +#include <cutils/properties.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> @@ -78,6 +78,11 @@ status_t convertMetaDataToMessage( msg->setInt64("durationUs", durationUs); } + int32_t isSync; + if (meta->findInt32(kKeyIsSyncFrame, &isSync) && isSync != 0) { + msg->setInt32("is-sync-frame", 1); + } + if (!strncasecmp("video/", mime, 6)) { int32_t width, height; CHECK(meta->findInt32(kKeyWidth, &width)); @@ -85,6 +90,13 @@ status_t convertMetaDataToMessage( msg->setInt32("width", width); msg->setInt32("height", height); + + int32_t sarWidth, sarHeight; + if (meta->findInt32(kKeySARWidth, &sarWidth) + && meta->findInt32(kKeySARHeight, &sarHeight)) { + msg->setInt32("sar-width", sarWidth); + msg->setInt32("sar-height", sarHeight); + } } else if (!strncasecmp("audio/", mime, 6)) { int32_t numChannels, sampleRate; CHECK(meta->findInt32(kKeyChannelCount, &numChannels)); @@ -363,6 +375,11 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) { meta->setInt64(kKeyDuration, durationUs); } + int32_t isSync; + if (msg->findInt32("is-sync-frame", &isSync) && isSync != 0) { + meta->setInt32(kKeyIsSyncFrame, 1); + } + if (mime.startsWith("video/")) { int32_t width; int32_t height; @@ -372,6 +389,13 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) { } else { ALOGW("did not find width and/or height"); } + + int32_t sarWidth, sarHeight; + if (msg->findInt32("sar-width", &sarWidth) + && msg->findInt32("sar-height", &sarHeight)) { + meta->setInt32(kKeySARWidth, sarWidth); + meta->setInt32(kKeySARHeight, sarHeight); + } } else if (mime.startsWith("audio/")) { int32_t numChannels; if (msg->findInt32("channel-count", &numChannels)) { @@ -431,6 +455,21 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) { #endif } +AString MakeUserAgent() { + AString ua; + ua.append("stagefright/1.2 (Linux;Android "); + +#if (PROPERTY_VALUE_MAX < 8) +#error "PROPERTY_VALUE_MAX must be at least 8" +#endif + + char value[PROPERTY_VALUE_MAX]; + property_get("ro.build.version.release", value, "Unknown"); + ua.append(value); + ua.append(")"); + + return ua; +} } // namespace android diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp index 2a7f628..22af6fb 100644 --- a/media/libstagefright/WAVExtractor.cpp +++ b/media/libstagefright/WAVExtractor.cpp @@ -38,6 +38,7 @@ enum { WAVE_FORMAT_PCM = 0x0001, WAVE_FORMAT_ALAW = 0x0006, WAVE_FORMAT_MULAW = 0x0007, + WAVE_FORMAT_MSGSM = 0x0031, WAVE_FORMAT_EXTENSIBLE = 0xFFFE }; @@ -178,6 +179,7 @@ status_t WAVExtractor::init() { if (mWaveFormat != WAVE_FORMAT_PCM && mWaveFormat != WAVE_FORMAT_ALAW && mWaveFormat != WAVE_FORMAT_MULAW + && mWaveFormat != WAVE_FORMAT_MSGSM && mWaveFormat != WAVE_FORMAT_EXTENSIBLE) { return ERROR_UNSUPPORTED; } @@ -216,6 +218,10 @@ status_t WAVExtractor::init() { && mBitsPerSample != 24) { return ERROR_UNSUPPORTED; } + } else if (mWaveFormat == WAVE_FORMAT_MSGSM) { + if (mBitsPerSample != 0) { + return ERROR_UNSUPPORTED; + } } else { CHECK(mWaveFormat == WAVE_FORMAT_MULAW || mWaveFormat == WAVE_FORMAT_ALAW); @@ -283,6 +289,10 @@ status_t WAVExtractor::init() { mTrackMeta->setCString( kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_G711_ALAW); break; + case WAVE_FORMAT_MSGSM: + mTrackMeta->setCString( + kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MSGSM); + break; default: CHECK_EQ(mWaveFormat, (uint16_t)WAVE_FORMAT_MULAW); mTrackMeta->setCString( @@ -294,11 +304,17 @@ status_t WAVExtractor::init() { mTrackMeta->setInt32(kKeyChannelMask, mChannelMask); mTrackMeta->setInt32(kKeySampleRate, mSampleRate); - size_t bytesPerSample = mBitsPerSample >> 3; - - int64_t durationUs = - 1000000LL * (mDataSize / (mNumChannels * bytesPerSample)) - / mSampleRate; + int64_t durationUs = 0; + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + // 65 bytes decode to 320 8kHz samples + durationUs = + 1000000LL * (mDataSize / 65 * 320) / 8000; + } else { + size_t bytesPerSample = mBitsPerSample >> 3; + durationUs = + 1000000LL * (mDataSize / (mNumChannels * bytesPerSample)) + / mSampleRate; + } mTrackMeta->setInt64(kKeyDuration, durationUs); @@ -388,7 +404,16 @@ status_t WAVSource::read( int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { - int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3); + int64_t pos = 0; + + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + // 65 bytes decode to 320 8kHz samples + int64_t samplenumber = (seekTimeUs * mSampleRate) / 1000000; + int64_t framenumber = samplenumber / 320; + pos = framenumber * 65; + } else { + pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3); + } if (pos > mSize) { pos = mSize; } @@ -414,6 +439,15 @@ status_t WAVSource::read( maxBytesToRead = maxBytesAvailable; } + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + // Microsoft packs 2 frames into 65 bytes, rather than using separate 33-byte frames, + // so read multiples of 65, and use smaller buffers to account for ~10:1 expansion ratio + if (maxBytesToRead > 1024) { + maxBytesToRead = 1024; + } + maxBytesToRead = (maxBytesToRead / 65) * 65; + } + ssize_t n = mDataSource->readAt( mCurrentPos, buffer->data(), maxBytesToRead); @@ -470,12 +504,17 @@ status_t WAVSource::read( } } - size_t bytesPerSample = mBitsPerSample >> 3; + int64_t timeStampUs = 0; + + if (mWaveFormat == WAVE_FORMAT_MSGSM) { + timeStampUs = 1000000LL * (mCurrentPos - mOffset) * 320 / 65 / mSampleRate; + } else { + size_t bytesPerSample = mBitsPerSample >> 3; + timeStampUs = 1000000LL * (mCurrentPos - mOffset) + / (mNumChannels * bytesPerSample) / mSampleRate; + } - buffer->meta_data()->setInt64( - kKeyTime, - 1000000LL * (mCurrentPos - mOffset) - / (mNumChannels * bytesPerSample) / mSampleRate); + buffer->meta_data()->setInt64(kKeyTime, timeStampUs); buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1); mCurrentPos += n; diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp index a141752..b822868 100644 --- a/media/libstagefright/avc_utils.cpp +++ b/media/libstagefright/avc_utils.cpp @@ -22,6 +22,7 @@ #include <media/stagefright/foundation/ABitReader.h> #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/hexdump.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaErrors.h> #include <media/stagefright/MetaData.h> @@ -41,7 +42,9 @@ unsigned parseUE(ABitReader *br) { // Determine video dimensions from the sequence parameterset. void FindAVCDimensions( - const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height) { + const sp<ABuffer> &seqParamSet, + int32_t *width, int32_t *height, + int32_t *sarWidth, int32_t *sarHeight) { ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1); unsigned profile_idc = br.getBits(8); @@ -129,6 +132,48 @@ void FindAVCDimensions( *height -= (frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY; } + + if (sarWidth != NULL) { + *sarWidth = 0; + } + + if (sarHeight != NULL) { + *sarHeight = 0; + } + + if (br.getBits(1)) { // vui_parameters_present_flag + unsigned sar_width = 0, sar_height = 0; + + if (br.getBits(1)) { // aspect_ratio_info_present_flag + unsigned aspect_ratio_idc = br.getBits(8); + + if (aspect_ratio_idc == 255 /* extendedSAR */) { + sar_width = br.getBits(16); + sar_height = br.getBits(16); + } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) { + static const int32_t kFixedSARWidth[] = { + 1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160 + }; + + static const int32_t kFixedSARHeight[] = { + 1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99 + }; + + sar_width = kFixedSARWidth[aspect_ratio_idc - 1]; + sar_height = kFixedSARHeight[aspect_ratio_idc - 1]; + } + } + + ALOGV("sample aspect ratio = %u : %u", sar_width, sar_height); + + if (sarWidth != NULL) { + *sarWidth = sar_width; + } + + if (sarHeight != NULL) { + *sarHeight = sar_height; + } + } } status_t getNextNALUnit( @@ -254,7 +299,9 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) { } int32_t width, height; - FindAVCDimensions(seqParamSet, &width, &height); + int32_t sarWidth, sarHeight; + FindAVCDimensions( + seqParamSet, &width, &height, &sarWidth, &sarHeight); size_t stopOffset; sp<ABuffer> picParamSet = FindNAL(data, size, 8, &stopOffset); @@ -301,8 +348,29 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) { meta->setInt32(kKeyWidth, width); meta->setInt32(kKeyHeight, height); - ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)", - width, height, AVCProfileToString(profile), level / 10, level % 10); + if (sarWidth > 1 || sarHeight > 1) { + // We treat 0:0 (unspecified) as 1:1. + + meta->setInt32(kKeySARWidth, sarWidth); + meta->setInt32(kKeySARHeight, sarHeight); + + ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d) " + "SAR %d : %d", + width, + height, + AVCProfileToString(profile), + level / 10, + level % 10, + sarWidth, + sarHeight); + } else { + ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)", + width, + height, + AVCProfileToString(profile), + level / 10, + level % 10); + } return meta; } diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk index 75c2bed..e62c0d4 100644 --- a/media/libstagefright/chromium_http/Android.mk +++ b/media/libstagefright/chromium_http/Android.mk @@ -23,6 +23,7 @@ LOCAL_SHARED_LIBRARIES += \ libchromium_net \ libutils \ libcutils \ + liblog \ libstagefright_foundation \ libstagefright \ libdrmframework diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp index 91ce175..a862d8b 100644 --- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp +++ b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp @@ -340,5 +340,11 @@ status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) { return err; } +// static +status_t ChromiumHTTPDataSource::UpdateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + return SfDelegate::UpdateProxyConfig(host, port, exclusionList); +} + } // namespace android diff --git a/media/libstagefright/chromium_http/chromium_http_stub.cpp b/media/libstagefright/chromium_http/chromium_http_stub.cpp index 560a61f..289f6de 100644 --- a/media/libstagefright/chromium_http/chromium_http_stub.cpp +++ b/media/libstagefright/chromium_http/chromium_http_stub.cpp @@ -26,6 +26,11 @@ HTTPBase *createChromiumHTTPDataSource(uint32_t flags) { return new ChromiumHTTPDataSource(flags); } +status_t UpdateChromiumHTTPDataSourceProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + return ChromiumHTTPDataSource::UpdateProxyConfig(host, port, exclusionList); +} + DataSource *createDataUriSource(const char *uri) { return new DataUriSource(uri); } diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp index 0ecf8cc..553ef14 100644 --- a/media/libstagefright/chromium_http/support.cpp +++ b/media/libstagefright/chromium_http/support.cpp @@ -37,8 +37,8 @@ #include <arpa/inet.h> #include <binder/Parcel.h> #include <cutils/log.h> -#include <cutils/properties.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/Utils.h> #include <string> #include <utils/Errors.h> @@ -127,7 +127,7 @@ IMPLEMENT_META_INTERFACE(AudioService, "android.media.IAudioService"); static Mutex gNetworkThreadLock; static base::Thread *gNetworkThread = NULL; -static scoped_refptr<net::URLRequestContext> gReqContext; +static scoped_refptr<SfRequestContext> gReqContext; static scoped_ptr<net::NetworkChangeNotifier> gNetworkChangeNotifier; bool logMessageHandler( @@ -239,19 +239,7 @@ net::NetLog::LogLevel SfNetLog::GetLogLevel() const { //////////////////////////////////////////////////////////////////////////////// SfRequestContext::SfRequestContext() { - AString ua; - ua.append("stagefright/1.2 (Linux;Android "); - -#if (PROPERTY_VALUE_MAX < 8) -#error "PROPERTY_VALUE_MAX must be at least 8" -#endif - - char value[PROPERTY_VALUE_MAX]; - property_get("ro.build.version.release", value, "Unknown"); - ua.append(value); - ua.append(")"); - - mUserAgent = ua.c_str(); + mUserAgent = MakeUserAgent().c_str(); set_net_log(new SfNetLog()); @@ -264,8 +252,10 @@ SfRequestContext::SfRequestContext() { set_ssl_config_service( net::SSLConfigService::CreateSystemSSLConfigService()); + mProxyConfigService = new net::ProxyConfigServiceAndroid; + set_proxy_service(net::ProxyService::CreateWithoutProxyResolver( - new net::ProxyConfigServiceAndroid, net_log())); + mProxyConfigService, net_log())); set_http_transaction_factory(new net::HttpCache( host_resolver(), @@ -286,6 +276,31 @@ const std::string &SfRequestContext::GetUserAgent(const GURL &url) const { return mUserAgent; } +status_t SfRequestContext::updateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + Mutex::Autolock autoLock(mProxyConfigLock); + + if (host == NULL || *host == '\0') { + MY_LOGV("updateProxyConfig NULL"); + + std::string proxy; + std::string exList; + mProxyConfigService->UpdateProxySettings(proxy, exList); + } else { +#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0 + LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, + "updateProxyConfig %s:%d, exclude '%s'", + host, port, exclusionList); +#endif + + std::string proxy = StringPrintf("%s:%d", host, port).c_str(); + std::string exList = exclusionList; + mProxyConfigService->UpdateProxySettings(proxy, exList); + } + + return OK; +} + //////////////////////////////////////////////////////////////////////////////// SfNetworkLibrary::SfNetworkLibrary() {} @@ -331,6 +346,14 @@ SfDelegate::~SfDelegate() { CHECK(mURLRequest == NULL); } +// static +status_t SfDelegate::UpdateProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + InitializeNetworkThreadIfNecessary(); + + return gReqContext->updateProxyConfig(host, port, exclusionList); +} + void SfDelegate::setOwner(ChromiumHTTPDataSource *owner) { mOwner = owner; } diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h index d2c5bc0..975a1d3 100644 --- a/media/libstagefright/chromium_http/support.h +++ b/media/libstagefright/chromium_http/support.h @@ -27,8 +27,13 @@ #include "net/base/io_buffer.h" #include <utils/KeyedVector.h> +#include <utils/Mutex.h> #include <utils/String8.h> +namespace net { + struct ProxyConfigServiceAndroid; +}; + namespace android { struct SfNetLog : public net::NetLog { @@ -55,8 +60,14 @@ struct SfRequestContext : public net::URLRequestContext { virtual const std::string &GetUserAgent(const GURL &url) const; + status_t updateProxyConfig( + const char *host, int32_t port, const char *exclusionList); + private: + Mutex mProxyConfigLock; + std::string mUserAgent; + net::ProxyConfigServiceAndroid *mProxyConfigService; DISALLOW_EVIL_CONSTRUCTORS(SfRequestContext); }; @@ -120,6 +131,9 @@ struct SfDelegate : public net::URLRequest::Delegate { virtual void OnReadCompleted(net::URLRequest *request, int bytes_read); + static status_t UpdateProxyConfig( + const char *host, int32_t port, const char *exclusionList); + private: typedef Delegate inherited; diff --git a/media/libstagefright/chromium_http_stub.cpp b/media/libstagefright/chromium_http_stub.cpp index cbd8796..ed8a878 100644 --- a/media/libstagefright/chromium_http_stub.cpp +++ b/media/libstagefright/chromium_http_stub.cpp @@ -30,6 +30,9 @@ static Mutex gLibMutex; HTTPBase *(*gLib_createChromiumHTTPDataSource)(uint32_t flags); DataSource *(*gLib_createDataUriSource)(const char *uri); +status_t (*gLib_UpdateChromiumHTTPDataSourceProxyConfig)( + const char *host, int32_t port, const char *exclusionList); + static bool load_libstagefright_chromium_http() { Mutex::Autolock autoLock(gLibMutex); void *sym; @@ -59,6 +62,14 @@ static bool load_libstagefright_chromium_http() { } gLib_createDataUriSource = (DataSource *(*)(const char *))sym; + sym = dlsym(gHandle, "UpdateChromiumHTTPDataSourceProxyConfig"); + if (sym == NULL) { + gHandle = NULL; + return false; + } + gLib_UpdateChromiumHTTPDataSourceProxyConfig = + (status_t (*)(const char *, int32_t, const char *))sym; + return true; } @@ -70,6 +81,16 @@ HTTPBase *createChromiumHTTPDataSource(uint32_t flags) { return gLib_createChromiumHTTPDataSource(flags); } +status_t UpdateChromiumHTTPDataSourceProxyConfig( + const char *host, int32_t port, const char *exclusionList) { + if (!load_libstagefright_chromium_http()) { + return INVALID_OPERATION; + } + + return gLib_UpdateChromiumHTTPDataSourceProxyConfig( + host, port, exclusionList); +} + DataSource *createDataUriSource(const char *uri) { if (!load_libstagefright_chromium_http()) { return NULL; diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk index 4dc38a8..ffa64f9 100644 --- a/media/libstagefright/codecs/aacdec/Android.mk +++ b/media/libstagefright/codecs/aacdec/Android.mk @@ -20,7 +20,7 @@ LOCAL_CFLAGS := LOCAL_STATIC_LIBRARIES := libFraunhoferAAC LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils libcutils + libstagefright_omx libstagefright_foundation libutils libcutils liblog LOCAL_MODULE := libstagefright_soft_aacdec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index d88813e..cf50dc9 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -29,6 +29,7 @@ #define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */ #define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */ #define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */ // names of properties that can be used to override the default DRC settings #define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level" @@ -118,7 +119,7 @@ status_t SoftAAC2::initDecoder() { status = OK; } } - mIsFirst = true; + mDecoderHasData = false; // for streams that contain metadata, use the mobile profile DRC settings unless overridden // by platform properties: @@ -146,6 +147,8 @@ status_t SoftAAC2::initDecoder() { unsigned boost = atoi(value); ALOGV("AAC decoder using AAC_DRC_BOOST_FACTOR of %d", boost); aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, boost); + } else { + aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST); } return status; @@ -327,6 +330,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL); return; } + inQueue.erase(inQueue.begin()); info->mOwnedByUs = false; notifyEmptyBufferDone(header); @@ -358,7 +362,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { inInfo->mOwnedByUs = false; notifyEmptyBufferDone(inHeader); - if (!mIsFirst) { + if (mDecoderHasData) { // flush out the decoder's delayed data by calling DecodeFrame // one more time, with the AACDEC_FLUSH flag set INT_PCM *outBuffer = @@ -370,6 +374,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { outBuffer, outHeader->nAllocLen, AACDEC_FLUSH); + mDecoderHasData = false; if (decoderErr != AAC_DEC_OK) { mSignalledError = true; @@ -385,9 +390,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { * sizeof(int16_t) * mStreamInfo->numChannels; } else { - // Since we never discarded frames from the start, we won't have - // to add any padding at the end either. - + // we never submitted any data to the decoder, so there's nothing to flush out outHeader->nFilledLen = 0; } @@ -473,6 +476,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { inBuffer, inBufferLength, bytesValid); + mDecoderHasData = true; decoderErr = aacDecoder_DecodeFrame(mAACDecoder, outBuffer, @@ -484,6 +488,35 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { } } + size_t numOutBytes = + mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; + + if (decoderErr == AAC_DEC_OK) { + UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; + inHeader->nFilledLen -= inBufferUsedLength; + inHeader->nOffset += inBufferUsedLength; + } else { + ALOGW("AAC decoder returned error %d, substituting silence", + decoderErr); + + memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); + + // Discard input buffer. + inHeader->nFilledLen = 0; + + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + + // fall through + } + + if (inHeader->nFilledLen == 0) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } + /* * AAC+/eAAC+ streams can be signalled in two ways: either explicitly * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual @@ -502,15 +535,9 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { if (mStreamInfo->sampleRate != prevSampleRate || mStreamInfo->numChannels != prevNumChannels) { maybeConfigureDownmix(); - ALOGI("Reconfiguring decoder: %d Hz, %d channels", - mStreamInfo->sampleRate, - mStreamInfo->numChannels); - - // We're going to want to revisit this input buffer, but - // may have already advanced the offset. Undo that if - // necessary. - inHeader->nOffset -= adtsHeaderSize; - inHeader->nFilledLen += adtsHeaderSize; + ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels", + prevSampleRate, mStreamInfo->sampleRate, + prevNumChannels, mStreamInfo->numChannels); notify(OMX_EventPortSettingsChanged, 1, 0, NULL); mOutputPortSettingsChange = AWAITING_DISABLED; @@ -523,38 +550,10 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { return; } - size_t numOutBytes = - mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; - - if (decoderErr == AAC_DEC_OK) { - UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; - inHeader->nFilledLen -= inBufferUsedLength; - inHeader->nOffset += inBufferUsedLength; - } else { - ALOGW("AAC decoder returned error %d, substituting silence", - decoderErr); - - memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes); - - // Discard input buffer. - inHeader->nFilledLen = 0; - - aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - - // fall through - } - if (decoderErr == AAC_DEC_OK || mNumSamplesOutput > 0) { // We'll only output data if we successfully decoded it or // we've previously decoded valid data, in the latter case // (decode failed) we'll output a silent frame. - if (mIsFirst) { - mIsFirst = false; - // the first decoded frame should be discarded to account - // for decoder delay - numOutBytes = 0; - } - outHeader->nFilledLen = numOutBytes; outHeader->nFlags = 0; @@ -571,14 +570,6 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) { outHeader = NULL; } - if (inHeader->nFilledLen == 0) { - inInfo->mOwnedByUs = false; - inQueue.erase(inQueue.begin()); - inInfo = NULL; - notifyEmptyBufferDone(inHeader); - inHeader = NULL; - } - if (decoderErr == AAC_DEC_OK) { ++mInputBufferCount; } @@ -589,11 +580,32 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) { if (portIndex == 0) { // Make sure that the next buffer output does not still // depend on fragments from the last one decoded. - aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); - mIsFirst = true; + // drain all existing data + drainDecoder(); } } +void SoftAAC2::drainDecoder() { + // a buffer big enough for 6 channels of decoded HE-AAC + short buf [2048*6]; + aacDecoder_DecodeFrame(mAACDecoder, + buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); + aacDecoder_DecodeFrame(mAACDecoder, + buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR); + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + mDecoderHasData = false; +} + +void SoftAAC2::onReset() { + drainDecoder(); + // reset the "configured" state + mInputBufferCount = 0; + mNumSamplesOutput = 0; + // To make the codec behave the same before and after a reset, we need to invalidate the + // streaminfo struct. This does that: + mStreamInfo->sampleRate = 0; +} + void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { if (portIndex != 1) { return; diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h index 0353196..2d960ab 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.h +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h @@ -41,6 +41,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { @@ -51,7 +52,7 @@ private: HANDLE_AACDECODER mAACDecoder; CStreamInfo *mStreamInfo; bool mIsADTS; - bool mIsFirst; + bool mDecoderHasData; size_t mInputBufferCount; bool mSignalledError; int64_t mAnchorTimeUs; @@ -67,6 +68,7 @@ private: status_t initDecoder(); bool isConfigured() const; void maybeConfigureDownmix() const; + void drainDecoder(); DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2); }; diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk index 820734d..057c69b 100644 --- a/media/libstagefright/codecs/aacenc/Android.mk +++ b/media/libstagefright/codecs/aacenc/Android.mk @@ -109,7 +109,7 @@ ifeq ($(AAC_LIBRARY), fraunhofer) LOCAL_STATIC_LIBRARIES := libFraunhoferAAC LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils + libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_aacenc LOCAL_MODULE_TAGS := optional @@ -132,7 +132,7 @@ else # visualon libstagefright_aacenc LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_enc_common LOCAL_MODULE := libstagefright_soft_aacenc diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp index 7719435..5749733 100644 --- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp +++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp @@ -481,7 +481,7 @@ void SoftAACEncoder2::onQueueFilled(OMX_U32 portIndex) { void* inBuffer[] = { (unsigned char *)mInputFrame }; INT inBufferIds[] = { IN_AUDIO_DATA }; - INT inBufferSize[] = { numBytesPerInputFrame }; + INT inBufferSize[] = { (INT)numBytesPerInputFrame }; INT inBufferElSize[] = { sizeof(int16_t) }; AACENC_BufDesc inBufDesc; diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk index b48a459..8d6c6f8 100644 --- a/media/libstagefright/codecs/amrnb/dec/Android.mk +++ b/media/libstagefright/codecs/amrnb/dec/Android.mk @@ -72,7 +72,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_amrnbdec libstagefright_amrwbdec LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_amrnb_common LOCAL_MODULE := libstagefright_soft_amrdec diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk index 457656a..f4e467a 100644 --- a/media/libstagefright/codecs/amrnb/enc/Android.mk +++ b/media/libstagefright/codecs/amrnb/enc/Android.mk @@ -92,7 +92,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_amrnbenc LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_amrnb_common LOCAL_MODULE := libstagefright_soft_amrnbenc diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp index 07f8b4f..50b739c 100644 --- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp +++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp @@ -257,7 +257,7 @@ OMX_ERRORTYPE SoftAMRNBEncoder::internalSetParameter( } if (pcmParams->nChannels != 1 - || pcmParams->nSamplingRate != kSampleRate) { + || pcmParams->nSamplingRate != (OMX_U32)kSampleRate) { return OMX_ErrorUndefined; } diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk index edfd7b7..c5b8e0c 100644 --- a/media/libstagefright/codecs/amrwbenc/Android.mk +++ b/media/libstagefright/codecs/amrwbenc/Android.mk @@ -130,7 +130,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_amrwbenc LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils \ + libstagefright_omx libstagefright_foundation libutils liblog \ libstagefright_enc_common LOCAL_MODULE := libstagefright_soft_amrwbenc diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk index cffe469..7d17c2a 100644 --- a/media/libstagefright/codecs/avc/enc/Android.mk +++ b/media/libstagefright/codecs/avc/enc/Android.mk @@ -62,6 +62,7 @@ LOCAL_SHARED_LIBRARIES := \ libstagefright_foundation \ libstagefright_omx \ libutils \ + liblog \ libui diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk index 546a357..f01d605 100644 --- a/media/libstagefright/codecs/flac/enc/Android.mk +++ b/media/libstagefright/codecs/flac/enc/Android.mk @@ -10,7 +10,7 @@ LOCAL_C_INCLUDES := \ external/flac/include LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_STATIC_LIBRARIES := \ libFLAC \ diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp index 233aed3..e64fe72 100644 --- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp +++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp @@ -109,7 +109,7 @@ void SoftFlacEncoder::initPorts() { def.eDir = OMX_DirInput; def.nBufferCountMin = kNumBuffers;// TODO verify that 1 is enough def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2; + def.nBufferSize = kMaxInputBufferSize; def.bEnabled = OMX_TRUE; def.bPopulated = OMX_FALSE; def.eDomain = OMX_PortDomainAudio; @@ -234,6 +234,22 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter( return OMX_ErrorNone; } + case OMX_IndexParamPortDefinition: + { + OMX_PARAM_PORTDEFINITIONTYPE *defParams = + (OMX_PARAM_PORTDEFINITIONTYPE *)params; + + if (defParams->nPortIndex == 0) { + if (defParams->nBufferSize > kMaxInputBufferSize) { + ALOGE("Input buffer size must be at most %zu bytes", + kMaxInputBufferSize); + return OMX_ErrorUnsupportedSetting; + } + } + + // fall through + } + default: ALOGV("SoftFlacEncoder::internalSetParameter(default)"); return SimpleSoftOMXComponent::internalSetParameter(index, params); @@ -273,7 +289,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) { return; } - if (inHeader->nFilledLen > kMaxNumSamplesPerFrame * sizeof(FLAC__int32) * 2) { + if (inHeader->nFilledLen > kMaxInputBufferSize) { ALOGE("input buffer too large (%ld).", inHeader->nFilledLen); mSignalledError = true; notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); @@ -290,6 +306,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) { const unsigned nbInputSamples = inHeader->nFilledLen / 2; const OMX_S16 * const pcm16 = reinterpret_cast<OMX_S16 *>(inHeader->pBuffer); + CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame); for (unsigned i=0 ; i < nbInputSamples ; i++) { mInputBufferPcm32[i] = (FLAC__int32) pcm16[i]; } diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h index 1e0148a..97361fa 100644 --- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h +++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h @@ -52,6 +52,7 @@ private: enum { kNumBuffers = 2, kMaxNumSamplesPerFrame = 1152, + kMaxInputBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2, kMaxOutputBufferSize = 65536, //TODO check if this can be reduced }; diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk index 28be646..4c80da6 100644 --- a/media/libstagefright/codecs/g711/dec/Android.mk +++ b/media/libstagefright/codecs/g711/dec/Android.mk @@ -9,7 +9,7 @@ LOCAL_C_INCLUDES := \ frameworks/native/include/media/openmax LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_g711dec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/gsm/Android.mk b/media/libstagefright/codecs/gsm/Android.mk new file mode 100644 index 0000000..2e43120 --- /dev/null +++ b/media/libstagefright/codecs/gsm/Android.mk @@ -0,0 +1,4 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk new file mode 100644 index 0000000..71613d2 --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/Android.mk @@ -0,0 +1,21 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + SoftGSM.cpp + +LOCAL_C_INCLUDES := \ + frameworks/av/media/libstagefright/include \ + frameworks/native/include/media/openmax \ + external/libgsm/inc + +LOCAL_SHARED_LIBRARIES := \ + libstagefright libstagefright_omx libstagefright_foundation libutils liblog + +LOCAL_STATIC_LIBRARIES := \ + libgsm + +LOCAL_MODULE := libstagefright_soft_gsmdec +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 diff --git a/media/libstagefright/codecs/gsm/dec/NOTICE b/media/libstagefright/codecs/gsm/dec/NOTICE new file mode 100644 index 0000000..c5b1efa --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp new file mode 100644 index 0000000..00e0c85 --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp @@ -0,0 +1,269 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftGSM" +#include <utils/Log.h> + +#include "SoftGSM.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaDefs.h> + +namespace android { + +template<class T> +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +SoftGSM::SoftGSM( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mSignalledError(false) { + + CHECK(!strcmp(name, "OMX.google.gsm.decoder")); + + mGsm = gsm_create(); + CHECK(mGsm); + int msopt = 1; + gsm_option(mGsm, GSM_OPT_WAV49, &msopt); + + initPorts(); +} + +SoftGSM::~SoftGSM() { + gsm_destroy(mGsm); +} + +void SoftGSM::initPorts() { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + + def.nPortIndex = 0; + def.eDir = OMX_DirInput; + def.nBufferCountMin = kNumBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = sizeof(gsm_frame); + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainAudio; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 1; + + def.format.audio.cMIMEType = + const_cast<char *>(MEDIA_MIMETYPE_AUDIO_MSGSM); + + def.format.audio.pNativeRender = NULL; + def.format.audio.bFlagErrorConcealment = OMX_FALSE; + def.format.audio.eEncoding = OMX_AUDIO_CodingGSMFR; + + addPort(def); + + def.nPortIndex = 1; + def.eDir = OMX_DirOutput; + def.nBufferCountMin = kNumBuffers; + def.nBufferCountActual = def.nBufferCountMin; + def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t); + def.bEnabled = OMX_TRUE; + def.bPopulated = OMX_FALSE; + def.eDomain = OMX_PortDomainAudio; + def.bBuffersContiguous = OMX_FALSE; + def.nBufferAlignment = 2; + + def.format.audio.cMIMEType = const_cast<char *>("audio/raw"); + def.format.audio.pNativeRender = NULL; + def.format.audio.bFlagErrorConcealment = OMX_FALSE; + def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; + + addPort(def); +} + +OMX_ERRORTYPE SoftGSM::internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params) { + switch (index) { + case OMX_IndexParamAudioPcm: + { + OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex > 1) { + return OMX_ErrorUndefined; + } + + pcmParams->eNumData = OMX_NumericalDataSigned; + pcmParams->eEndian = OMX_EndianBig; + pcmParams->bInterleaved = OMX_TRUE; + pcmParams->nBitPerSample = 16; + pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; + pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; + pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; + + pcmParams->nChannels = 1; + pcmParams->nSamplingRate = 8000; + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftGSM::internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params) { + switch (index) { + case OMX_IndexParamAudioPcm: + { + OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex != 0 && pcmParams->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + if (pcmParams->nChannels != 1) { + return OMX_ErrorUndefined; + } + + if (pcmParams->nSamplingRate != 8000) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + case OMX_IndexParamStandardComponentRole: + { + const OMX_PARAM_COMPONENTROLETYPE *roleParams = + (const OMX_PARAM_COMPONENTROLETYPE *)params; + + if (strncmp((const char *)roleParams->cRole, + "audio_decoder.gsm", + OMX_MAX_STRINGNAME_SIZE - 1)) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, params); + } +} + +void SoftGSM::onQueueFilled(OMX_U32 portIndex) { + if (mSignalledError) { + return; + } + + List<BufferInfo *> &inQueue = getPortQueue(0); + List<BufferInfo *> &outQueue = getPortQueue(1); + + while (!inQueue.empty() && !outQueue.empty()) { + BufferInfo *inInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + + BufferInfo *outInfo = *outQueue.begin(); + OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; + + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outQueue.erase(outQueue.begin()); + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + return; + } + + if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) { + ALOGE("input buffer too large (%ld).", inHeader->nFilledLen); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + mSignalledError = true; + } + + if(((inHeader->nFilledLen / 65) * 65) != inHeader->nFilledLen) { + ALOGE("input buffer not multiple of 65 (%ld).", inHeader->nFilledLen); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + mSignalledError = true; + } + + uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset; + + int n = mSignalledError ? 0 : DecodeGSM(mGsm, + reinterpret_cast<int16_t *>(outHeader->pBuffer), inputptr, inHeader->nFilledLen); + + outHeader->nTimeStamp = inHeader->nTimeStamp; + outHeader->nOffset = 0; + outHeader->nFilledLen = n * sizeof(int16_t); + outHeader->nFlags = 0; + + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + } +} + + +// static +int SoftGSM::DecodeGSM(gsm handle, + int16_t *out, uint8_t *in, size_t inSize) { + + int ret = 0; + while (inSize > 0) { + gsm_decode(handle, in, out); + in += 33; + inSize -= 33; + out += 160; + ret += 160; + gsm_decode(handle, in, out); + in += 32; + inSize -= 32; + out += 160; + ret += 160; + } + return ret; +} + + +} // namespace android + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftGSM(name, callbacks, appData, component); +} + diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h new file mode 100644 index 0000000..8ab6116 --- /dev/null +++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_GSM_H_ + +#define SOFT_GSM_H_ + +#include "SimpleSoftOMXComponent.h" + +extern "C" { +#include "gsm.h" +} + +namespace android { + +struct SoftGSM : public SimpleSoftOMXComponent { + SoftGSM(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + +protected: + virtual ~SoftGSM(); + + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params); + + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params); + + virtual void onQueueFilled(OMX_U32 portIndex); + +private: + enum { + kNumBuffers = 4, + kMaxNumSamplesPerFrame = 16384, + }; + + bool mSignalledError; + gsm mGsm; + + void initPorts(); + + static int DecodeGSM(gsm handle, int16_t *out, uint8_t *in, size_t inSize); + + DISALLOW_EVIL_CONSTRUCTORS(SoftGSM); +}; + +} // namespace android + +#endif // SOFT_GSM_H_ + diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk index a6b1edc..a3d5779 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk +++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk @@ -67,7 +67,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_m4vh263dec LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_mpeg4dec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index d527fde..020cc0a 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -326,7 +326,7 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { OMX_BUFFERHEADERTYPE *outHeader = port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) { inQueue.erase(inQueue.begin()); inInfo->mOwnedByUs = false; notifyEmptyBufferDone(inHeader); @@ -445,6 +445,11 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { inHeader->nOffset += bufferSize; inHeader->nFilledLen = 0; + if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + } else { + outHeader->nFlags = 0; + } if (inHeader->nFilledLen == 0) { inInfo->mOwnedByUs = false; @@ -458,7 +463,6 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) { outHeader->nOffset = 0; outHeader->nFilledLen = (mWidth * mHeight * 3) / 2; - outHeader->nFlags = 0; List<BufferInfo *>::iterator it = outQueue.begin(); while ((*it)->mHeader != outHeader) { diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk index 865cc9c..83a2dd2 100644 --- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk +++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk @@ -65,6 +65,7 @@ LOCAL_SHARED_LIBRARIES := \ libstagefright_foundation \ libstagefright_omx \ libutils \ + liblog \ libui diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk index ec8d7ec..135c715 100644 --- a/media/libstagefright/codecs/mp3dec/Android.mk +++ b/media/libstagefright/codecs/mp3dec/Android.mk @@ -70,7 +70,7 @@ LOCAL_C_INCLUDES := \ $(LOCAL_PATH)/include LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_STATIC_LIBRARIES := \ libstagefright_mp3dec diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index fb1135c..9f25536 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -166,6 +166,21 @@ OMX_ERRORTYPE SoftMP3::internalSetParameter( return OMX_ErrorNone; } + case OMX_IndexParamAudioPcm: + { + const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams = + (const OMX_AUDIO_PARAM_PCMMODETYPE *)params; + + if (pcmParams->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + mNumChannels = pcmParams->nChannels; + mSamplingRate = pcmParams->nSamplingRate; + + return OMX_ErrorNone; + } + default: return SimpleSoftOMXComponent::internalSetParameter(index, params); } @@ -343,6 +358,11 @@ void SoftMP3::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { } } +void SoftMP3::onReset() { + pvmp3_InitDecoder(mConfig, mDecoderBuf); + mIsFirst = true; +} + } // namespace android android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h index 3a05466..4af91ea 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.h +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h @@ -42,6 +42,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk index 0082d7c..7f2c46d 100644 --- a/media/libstagefright/codecs/on2/dec/Android.mk +++ b/media/libstagefright/codecs/on2/dec/Android.mk @@ -15,7 +15,7 @@ LOCAL_STATIC_LIBRARIES := \ libvpx LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils + libstagefright libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_vpxdec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index bf9ab3a..a400b4c 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -66,7 +66,7 @@ void SoftVPX::initPorts() { def.eDir = OMX_DirInput; def.nBufferCountMin = kNumBuffers; def.nBufferCountActual = def.nBufferCountMin; - def.nBufferSize = 256 * 1024; + def.nBufferSize = 768 * 1024; def.bEnabled = OMX_TRUE; def.bPopulated = OMX_FALSE; def.eDomain = OMX_PortDomainVideo; diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk new file mode 100644 index 0000000..a92d376 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/Android.mk @@ -0,0 +1,24 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + SoftVPXEncoder.cpp + +LOCAL_C_INCLUDES := \ + $(TOP)/external/libvpx/libvpx \ + $(TOP)/external/openssl/include \ + $(TOP)/external/libvpx/libvpx/vpx_codec \ + $(TOP)/external/libvpx/libvpx/vpx_ports \ + frameworks/av/media/libstagefright/include \ + frameworks/native/include/media/openmax \ + +LOCAL_STATIC_LIBRARIES := \ + libvpx + +LOCAL_SHARED_LIBRARIES := \ + libstagefright libstagefright_omx libstagefright_foundation libutils liblog \ + +LOCAL_MODULE := libstagefright_soft_vpxenc +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 diff --git a/media/libstagefright/codecs/on2/enc/NOTICE b/media/libstagefright/codecs/on2/enc/NOTICE new file mode 100644 index 0000000..faed58a --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2013, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp new file mode 100644 index 0000000..e25637a --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -0,0 +1,686 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "SoftVPXEncoder" +#include "SoftVPXEncoder.h" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaDefs.h> + +namespace android { + + +template<class T> +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + // OMX IL 1.1.2 + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 1; + params->nVersion.s.nRevision = 2; + params->nVersion.s.nStep = 0; +} + + +static int GetCPUCoreCount() { + int cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK_GE(cpuCoreCount, 1); + return cpuCoreCount; +} + + +// This color conversion utility is copied from SoftMPEG4Encoder.cpp +inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv, + uint8_t* outyuv, + int32_t width, + int32_t height) { + int32_t outYsize = width * height; + uint32_t *outy = (uint32_t *) outyuv; + uint16_t *outcb = (uint16_t *) (outyuv + outYsize); + uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2)); + + /* Y copying */ + memcpy(outy, inyuv, outYsize); + + /* U & V copying */ + uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize); + for (int32_t i = height >> 1; i > 0; --i) { + for (int32_t j = width >> 2; j > 0; --j) { + uint32_t temp = *inyuv_4++; + uint32_t tempU = temp & 0xFF; + tempU = tempU | ((temp >> 8) & 0xFF00); + + uint32_t tempV = (temp >> 8) & 0xFF; + tempV = tempV | ((temp >> 16) & 0xFF00); + + // Flip U and V + *outcb++ = tempV; + *outcr++ = tempU; + } + } +} + + +SoftVPXEncoder::SoftVPXEncoder(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SimpleSoftOMXComponent(name, callbacks, appData, component), + mCodecContext(NULL), + mCodecConfiguration(NULL), + mCodecInterface(NULL), + mWidth(176), + mHeight(144), + mBitrate(192000), // in bps + mBitrateControlMode(VPX_VBR), // variable bitrate + mFrameDurationUs(33333), // Defaults to 30 fps + mDCTPartitions(0), + mErrorResilience(OMX_FALSE), + mColorFormat(OMX_COLOR_FormatYUV420Planar), + mLevel(OMX_VIDEO_VP8Level_Version0), + mConversionBuffer(NULL) { + + initPorts(); +} + + +SoftVPXEncoder::~SoftVPXEncoder() { + releaseEncoder(); +} + + +void SoftVPXEncoder::initPorts() { + OMX_PARAM_PORTDEFINITIONTYPE inputPort; + OMX_PARAM_PORTDEFINITIONTYPE outputPort; + + InitOMXParams(&inputPort); + InitOMXParams(&outputPort); + + inputPort.nBufferCountMin = kNumBuffers; + inputPort.nBufferCountActual = inputPort.nBufferCountMin; + inputPort.bEnabled = OMX_TRUE; + inputPort.bPopulated = OMX_FALSE; + inputPort.eDomain = OMX_PortDomainVideo; + inputPort.bBuffersContiguous = OMX_FALSE; + inputPort.format.video.pNativeRender = NULL; + inputPort.format.video.nFrameWidth = mWidth; + inputPort.format.video.nFrameHeight = mHeight; + inputPort.format.video.nStride = inputPort.format.video.nFrameWidth; + inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight; + inputPort.format.video.nBitrate = 0; + // frameRate is reciprocal of frameDuration, which is + // in microseconds. It is also in Q16 format. + inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16; + inputPort.format.video.bFlagErrorConcealment = OMX_FALSE; + inputPort.nPortIndex = kInputPortIndex; + inputPort.eDir = OMX_DirInput; + inputPort.nBufferAlignment = kInputBufferAlignment; + inputPort.format.video.cMIMEType = + const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW); + inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; + inputPort.format.video.eColorFormat = mColorFormat; + inputPort.format.video.pNativeWindow = NULL; + inputPort.nBufferSize = + (inputPort.format.video.nStride * + inputPort.format.video.nSliceHeight * 3) / 2; + + addPort(inputPort); + + outputPort.nBufferCountMin = kNumBuffers; + outputPort.nBufferCountActual = outputPort.nBufferCountMin; + outputPort.bEnabled = OMX_TRUE; + outputPort.bPopulated = OMX_FALSE; + outputPort.eDomain = OMX_PortDomainVideo; + outputPort.bBuffersContiguous = OMX_FALSE; + outputPort.format.video.pNativeRender = NULL; + outputPort.format.video.nFrameWidth = mWidth; + outputPort.format.video.nFrameHeight = mHeight; + outputPort.format.video.nStride = outputPort.format.video.nFrameWidth; + outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight; + outputPort.format.video.nBitrate = mBitrate; + outputPort.format.video.xFramerate = 0; + outputPort.format.video.bFlagErrorConcealment = OMX_FALSE; + outputPort.nPortIndex = kOutputPortIndex; + outputPort.eDir = OMX_DirOutput; + outputPort.nBufferAlignment = kOutputBufferAlignment; + outputPort.format.video.cMIMEType = + const_cast<char *>(MEDIA_MIMETYPE_VIDEO_VPX); + outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX; + outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused; + outputPort.format.video.pNativeWindow = NULL; + outputPort.nBufferSize = 256 * 1024; // arbitrary + + addPort(outputPort); +} + + +status_t SoftVPXEncoder::initEncoder() { + vpx_codec_err_t codec_return; + + mCodecContext = new vpx_codec_ctx_t; + mCodecConfiguration = new vpx_codec_enc_cfg_t; + mCodecInterface = vpx_codec_vp8_cx(); + + if (mCodecInterface == NULL) { + return UNKNOWN_ERROR; + } + + codec_return = vpx_codec_enc_config_default(mCodecInterface, + mCodecConfiguration, + 0); // Codec specific flags + + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error populating default configuration for vpx encoder."); + return UNKNOWN_ERROR; + } + + mCodecConfiguration->g_w = mWidth; + mCodecConfiguration->g_h = mHeight; + mCodecConfiguration->g_threads = GetCPUCoreCount(); + mCodecConfiguration->g_error_resilient = mErrorResilience; + + switch (mLevel) { + case OMX_VIDEO_VP8Level_Version0: + mCodecConfiguration->g_profile = 0; + break; + + case OMX_VIDEO_VP8Level_Version1: + mCodecConfiguration->g_profile = 1; + break; + + case OMX_VIDEO_VP8Level_Version2: + mCodecConfiguration->g_profile = 2; + break; + + case OMX_VIDEO_VP8Level_Version3: + mCodecConfiguration->g_profile = 3; + break; + + default: + mCodecConfiguration->g_profile = 0; + } + + // OMX timebase unit is microsecond + // g_timebase is in seconds (i.e. 1/1000000 seconds) + mCodecConfiguration->g_timebase.num = 1; + mCodecConfiguration->g_timebase.den = 1000000; + // rc_target_bitrate is in kbps, mBitrate in bps + mCodecConfiguration->rc_target_bitrate = mBitrate/1000; + mCodecConfiguration->rc_end_usage = mBitrateControlMode; + + codec_return = vpx_codec_enc_init(mCodecContext, + mCodecInterface, + mCodecConfiguration, + 0); // flags + + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error initializing vpx encoder"); + return UNKNOWN_ERROR; + } + + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_TOKEN_PARTITIONS, + mDCTPartitions); + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error setting dct partitions for vpx encoder."); + return UNKNOWN_ERROR; + } + + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + if (mConversionBuffer == NULL) { + mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2); + if (mConversionBuffer == NULL) { + ALOGE("Allocating conversion buffer failed."); + return UNKNOWN_ERROR; + } + } + } + return OK; +} + + +status_t SoftVPXEncoder::releaseEncoder() { + if (mCodecContext != NULL) { + vpx_codec_destroy(mCodecContext); + delete mCodecContext; + mCodecContext = NULL; + } + + if (mCodecConfiguration != NULL) { + delete mCodecConfiguration; + mCodecConfiguration = NULL; + } + + if (mConversionBuffer != NULL) { + delete mConversionBuffer; + mConversionBuffer = NULL; + } + + // this one is not allocated by us + mCodecInterface = NULL; + + return OK; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index, + OMX_PTR param) { + // can include extension index OMX_INDEXEXTTYPE + const int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamVideoPortFormat: { + OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = + (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param; + + if (formatParams->nPortIndex == kInputPortIndex) { + if (formatParams->nIndex >= kNumberOfSupportedColorFormats) { + return OMX_ErrorNoMore; + } + + // Color formats, in order of preference + if (formatParams->nIndex == 0) { + formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; + } else if (formatParams->nIndex == 1) { + formatParams->eColorFormat = + OMX_COLOR_FormatYUV420SemiPlanar; + } else { + formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque; + } + + formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; + // Converting from microseconds + // Also converting to Q16 format + formatParams->xFramerate = (1000000/mFrameDurationUs) << 16; + return OMX_ErrorNone; + } else if (formatParams->nPortIndex == kOutputPortIndex) { + formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX; + formatParams->eColorFormat = OMX_COLOR_FormatUnused; + formatParams->xFramerate = 0; + return OMX_ErrorNone; + } else { + return OMX_ErrorBadPortIndex; + } + } + + case OMX_IndexParamVideoBitrate: { + OMX_VIDEO_PARAM_BITRATETYPE *bitrate = + (OMX_VIDEO_PARAM_BITRATETYPE *)param; + + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + bitrate->nTargetBitrate = mBitrate; + + if (mBitrateControlMode == VPX_VBR) { + bitrate->eControlRate = OMX_Video_ControlRateVariable; + } else if (mBitrateControlMode == VPX_CBR) { + bitrate->eControlRate = OMX_Video_ControlRateConstant; + } else { + return OMX_ErrorUnsupportedSetting; + } + return OMX_ErrorNone; + } + + // VP8 specific parameters that use extension headers + case OMX_IndexParamVideoVp8: { + OMX_VIDEO_PARAM_VP8TYPE *vp8Params = + (OMX_VIDEO_PARAM_VP8TYPE *)param; + + if (vp8Params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain; + vp8Params->eLevel = mLevel; + vp8Params->nDCTPartitions = mDCTPartitions; + vp8Params->bErrorResilientMode = mErrorResilience; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelQuerySupported: { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; + + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + switch (profileAndLevel->nProfileIndex) { + case 0: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0; + break; + + case 1: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1; + break; + + case 2: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2; + break; + + case 3: + profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3; + break; + + default: + return OMX_ErrorNoMore; + } + + profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoProfileLevelCurrent: { + OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel = + (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param; + + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + profileAndLevel->eLevel = mLevel; + profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain; + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::internalGetParameter(index, param); + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index, + const OMX_PTR param) { + // can include extension index OMX_INDEXEXTTYPE + const int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamStandardComponentRole: + return internalSetRoleParams( + (const OMX_PARAM_COMPONENTROLETYPE *)param); + + case OMX_IndexParamVideoBitrate: + return internalSetBitrateParams( + (const OMX_VIDEO_PARAM_BITRATETYPE *)param); + + case OMX_IndexParamPortDefinition: + return internalSetPortParams( + (const OMX_PARAM_PORTDEFINITIONTYPE *)param); + + case OMX_IndexParamVideoPortFormat: + return internalSetFormatParams( + (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param); + + case OMX_IndexParamVideoVp8: + return internalSetVp8Params( + (const OMX_VIDEO_PARAM_VP8TYPE *)param); + + case OMX_IndexParamVideoProfileLevelCurrent: + return internalSetProfileLevel( + (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param); + + default: + return SimpleSoftOMXComponent::internalSetParameter(index, param); + } +} + +OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel( + const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) { + if (profileAndLevel->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) { + return OMX_ErrorBadParameter; + } + + if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 || + profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) { + mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel; + } else { + return OMX_ErrorBadParameter; + } + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params( + const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) { + if (vp8Params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) { + return OMX_ErrorBadParameter; + } + + if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 || + vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) { + mLevel = vp8Params->eLevel; + } else { + return OMX_ErrorBadParameter; + } + + if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) { + mDCTPartitions = vp8Params->nDCTPartitions; + } else { + return OMX_ErrorBadParameter; + } + + mErrorResilience = vp8Params->bErrorResilientMode; + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) { + if (format->nPortIndex == kInputPortIndex) { + if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar || + format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) { + mColorFormat = format->eColorFormat; + return OMX_ErrorNone; + } else { + ALOGE("Unsupported color format %i", format->eColorFormat); + return OMX_ErrorUnsupportedSetting; + } + } else if (format->nPortIndex == kOutputPortIndex) { + if (format->eCompressionFormat == OMX_VIDEO_CodingVPX) { + return OMX_ErrorNone; + } else { + return OMX_ErrorUnsupportedSetting; + } + } else { + return OMX_ErrorBadPortIndex; + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE* role) { + const char* roleText = (const char*)role->cRole; + const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1; + + if (strncmp(roleText, "video_encoder.vpx", roleTextMaxSize)) { + ALOGE("Unsupported component role"); + return OMX_ErrorBadParameter; + } + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE* port) { + if (port->nPortIndex == kInputPortIndex) { + mWidth = port->format.video.nFrameWidth; + mHeight = port->format.video.nFrameHeight; + + // xFramerate comes in Q16 format, in frames per second unit + const uint32_t framerate = port->format.video.xFramerate >> 16; + // frame duration is in microseconds + mFrameDurationUs = (1000000/framerate); + + if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar || + port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { + mColorFormat = port->format.video.eColorFormat; + } else { + return OMX_ErrorUnsupportedSetting; + } + + return OMX_ErrorNone; + } else if (port->nPortIndex == kOutputPortIndex) { + mBitrate = port->format.video.nBitrate; + return OMX_ErrorNone; + } else { + return OMX_ErrorBadPortIndex; + } +} + + +OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) { + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + mBitrate = bitrate->nTargetBitrate; + + if (bitrate->eControlRate == OMX_Video_ControlRateVariable) { + mBitrateControlMode = VPX_VBR; + } else if (bitrate->eControlRate == OMX_Video_ControlRateConstant) { + mBitrateControlMode = VPX_CBR; + } else { + return OMX_ErrorUnsupportedSetting; + } + + return OMX_ErrorNone; +} + + +void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) { + // Initialize encoder if not already + if (mCodecContext == NULL) { + if (OK != initEncoder()) { + ALOGE("Failed to initialize encoder"); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + return; + } + } + + vpx_codec_err_t codec_return; + List<BufferInfo *> &inputBufferInfoQueue = getPortQueue(kInputPortIndex); + List<BufferInfo *> &outputBufferInfoQueue = getPortQueue(kOutputPortIndex); + + while (!inputBufferInfoQueue.empty() && !outputBufferInfoQueue.empty()) { + BufferInfo *inputBufferInfo = *inputBufferInfoQueue.begin(); + OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader; + + BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin(); + OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; + + if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); + inputBufferInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inputBufferHeader); + + outputBufferHeader->nFilledLen = 0; + outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); + outputBufferInfo->mOwnedByUs = false; + notifyFillBufferDone(outputBufferHeader); + return; + } + + uint8_t* source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset; + + // NOTE: As much as nothing is known about color format + // when it is denoted as AndroidOpaque, it is at least + // assumed to be planar. + if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + ConvertSemiPlanarToPlanar(source, mConversionBuffer, mWidth, mHeight); + source = mConversionBuffer; + } + vpx_image_t raw_frame; + vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight, + kInputBufferAlignment, source); + codec_return = vpx_codec_encode(mCodecContext, + &raw_frame, + inputBufferHeader->nTimeStamp, // in timebase units + mFrameDurationUs, // frame duration in timebase units + 0, // frame flags + VPX_DL_REALTIME); // encoding deadline + if (codec_return != VPX_CODEC_OK) { + ALOGE("vpx encoder failed to encode frame"); + notify(OMX_EventError, + OMX_ErrorUndefined, + 0, // Extra notification data + NULL); // Notification data pointer + return; + } + + vpx_codec_iter_t encoded_packet_iterator = NULL; + const vpx_codec_cx_pkt_t* encoded_packet; + + while ((encoded_packet = vpx_codec_get_cx_data( + mCodecContext, &encoded_packet_iterator))) { + if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { + outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts; + outputBufferHeader->nFlags = 0; + outputBufferHeader->nOffset = 0; + outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz; + memcpy(outputBufferHeader->pBuffer, + encoded_packet->data.frame.buf, + encoded_packet->data.frame.sz); + outputBufferInfo->mOwnedByUs = false; + outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); + notifyFillBufferDone(outputBufferHeader); + } + } + + inputBufferInfo->mOwnedByUs = false; + inputBufferInfoQueue.erase(inputBufferInfoQueue.begin()); + notifyEmptyBufferDone(inputBufferHeader); + } +} +} // namespace android + + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftVPXEncoder(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h new file mode 100644 index 0000000..3bc05c0 --- /dev/null +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h @@ -0,0 +1,203 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_VPX_ENCODER_H_ + +#define SOFT_VPX_ENCODER_H_ + +#include "SimpleSoftOMXComponent.h" + +#include <OMX_VideoExt.h> +#include <OMX_IndexExt.h> + +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_codec.h" +#include "vpx/vp8cx.h" + +namespace android { + +// Exposes a vpx encoder as an OMX Component +// +// Boilerplate for callback bindings are taken care +// by the base class SimpleSoftOMXComponent and its +// parent SoftOMXComponent. +// +// Only following encoder settings are available +// - target bitrate +// - rate control (constant / variable) +// - frame rate +// - error resilience +// - token partitioning +// - reconstruction & loop filters (g_profile) +// +// Only following color formats are recognized +// - YUV420Planar +// - YUV420SemiPlanar +// - AndroidOpaque +// +// Following settings are not configurable by the client +// - encoding deadline is realtime +// - multithreaded encoding utilizes a number of threads equal +// to online cpu's available +// - the algorithm interface for encoder is vp8 +// - fractional bits of frame rate is discarded +// - OMX timestamps are in microseconds, therefore +// encoder timebase is fixed to 1/1000000 + +class SoftVPXEncoder : public SimpleSoftOMXComponent { + public: + SoftVPXEncoder(const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + + protected: + virtual ~SoftVPXEncoder(); + + // Returns current values for requested OMX + // parameters + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR param); + + // Validates, extracts and stores relevant OMX + // parameters + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR param); + + // OMX callback when buffers available + // Note that both an input and output buffer + // is expected to be available to carry out + // encoding of the frame + virtual void onQueueFilled(OMX_U32 portIndex); + + private: + // number of buffers allocated per port + static const uint32_t kNumBuffers = 4; + + // OMX port indexes that refer to input and + // output ports respectively + static const uint32_t kInputPortIndex = 0; + static const uint32_t kOutputPortIndex = 1; + + // Byte-alignment required for buffers + static const uint32_t kInputBufferAlignment = 1; + static const uint32_t kOutputBufferAlignment = 2; + + // Max value supported for DCT partitions + static const uint32_t kMaxDCTPartitions = 3; + + // Number of supported input color formats + static const uint32_t kNumberOfSupportedColorFormats = 3; + + // vpx specific opaque data structure that + // stores encoder state + vpx_codec_ctx_t* mCodecContext; + + // vpx specific data structure that + // stores encoder configuration + vpx_codec_enc_cfg_t* mCodecConfiguration; + + // vpx specific read-only data structure + // that specifies algorithm interface (e.g. vp8) + vpx_codec_iface_t* mCodecInterface; + + // Width of the input frames + int32_t mWidth; + + // Height of the input frames + int32_t mHeight; + + // Target bitrate set for the encoder, in bits per second. + int32_t mBitrate; + + // Bitrate control mode, either constant or variable + vpx_rc_mode mBitrateControlMode; + + // Frame duration is the reciprocal of framerate, denoted + // in microseconds + uint64_t mFrameDurationUs; + + // vp8 specific configuration parameter + // that enables token partitioning of + // the stream into substreams + int32_t mDCTPartitions; + + // Parameter that denotes whether error resilience + // is enabled in encoder + OMX_BOOL mErrorResilience; + + // Color format for the input port + OMX_COLOR_FORMATTYPE mColorFormat; + + // Encoder profile corresponding to OMX level parameter + // + // The inconsistency in the naming is caused by + // OMX spec referring vpx profiles (g_profile) + // as "levels" whereas using the name "profile" for + // something else. + OMX_VIDEO_VP8LEVELTYPE mLevel; + + // Conversion buffer is needed to convert semi + // planar yuv420 to planar format + // It is only allocated if input format is + // indeed YUV420SemiPlanar. + uint8_t* mConversionBuffer; + + // Initializes input and output OMX ports with sensible + // default values. + void initPorts(); + + // Initializes vpx encoder with available settings. + status_t initEncoder(); + + // Releases vpx encoder instance, with it's associated + // data structures. + // + // Unless called earlier, this is handled by the + // dtor. + status_t releaseEncoder(); + + // Handles port changes with respect to color formats + OMX_ERRORTYPE internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE* format); + + // Verifies the component role tried to be set to this OMX component is + // strictly video_encoder.vpx + OMX_ERRORTYPE internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE* role); + + // Updates bitrate to reflect port settings. + OMX_ERRORTYPE internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE* bitrate); + + // Handles port definition changes. + OMX_ERRORTYPE internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE* port); + + // Handles vp8 specific parameters. + OMX_ERRORTYPE internalSetVp8Params( + const OMX_VIDEO_PARAM_VP8TYPE* vp8Params); + + // Updates encoder profile + OMX_ERRORTYPE internalSetProfileLevel( + const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel); + + DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder); +}; + +} // namespace android + +#endif // SOFT_VPX_ENCODER_H_ diff --git a/media/libstagefright/codecs/on2/h264dec/Android.mk b/media/libstagefright/codecs/on2/h264dec/Android.mk index 772fd60..2539f98 100644 --- a/media/libstagefright/codecs/on2/h264dec/Android.mk +++ b/media/libstagefright/codecs/on2/h264dec/Android.mk @@ -97,7 +97,7 @@ ifeq ($(ARCH_ARM_HAVE_NEON),true) endif LOCAL_SHARED_LIBRARIES := \ - libstagefright libstagefright_omx libstagefright_foundation libutils \ + libstagefright libstagefright_omx libstagefright_foundation libutils liblog \ LOCAL_MODULE := libstagefright_soft_h264dec @@ -124,4 +124,3 @@ LOCAL_MODULE_TAGS := debug LOCAL_MODULE := decoder include $(BUILD_EXECUTABLE) - diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp index 6c3f834..6e36651 100644 --- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp +++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp @@ -311,18 +311,14 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) { BufferInfo *inInfo = *inQueue.begin(); OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; ++mPicId; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); - mEOSStatus = INPUT_EOS_SEEN; - continue; - } OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE; memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE)); header->nTimeStamp = inHeader->nTimeStamp; header->nFlags = inHeader->nFlags; + if (header->nFlags & OMX_BUFFERFLAG_EOS) { + mEOSStatus = INPUT_EOS_SEEN; + } mPicToHeaderMap.add(mPicId, header); inQueue.erase(inQueue.begin()); diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c index 53b2fd8..cc838fd 100755 --- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c +++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c @@ -220,7 +220,7 @@ u32 h264bsdNextMbAddress(u32 *pSliceGroupMap, u32 picSizeInMbs, u32 currMbAddr) /* Variables */ - u32 i, sliceGroup, tmp; + u32 i, sliceGroup; /* Code */ @@ -231,11 +231,9 @@ u32 h264bsdNextMbAddress(u32 *pSliceGroupMap, u32 picSizeInMbs, u32 currMbAddr) sliceGroup = pSliceGroupMap[currMbAddr]; i = currMbAddr + 1; - tmp = pSliceGroupMap[i]; - while ((i < picSizeInMbs) && (tmp != sliceGroup)) + while ((i < picSizeInMbs) && (pSliceGroupMap[i] != sliceGroup)) { i++; - tmp = pSliceGroupMap[i]; } if (i == picSizeInMbs) diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk index 285c747..fe90a03 100644 --- a/media/libstagefright/codecs/raw/Android.mk +++ b/media/libstagefright/codecs/raw/Android.mk @@ -9,7 +9,7 @@ LOCAL_C_INCLUDES := \ frameworks/native/include/media/openmax LOCAL_SHARED_LIBRARIES := \ - libstagefright_omx libstagefright_foundation libutils + libstagefright_omx libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_rawdec LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk index 395dd6b..2232353 100644 --- a/media/libstagefright/codecs/vorbis/dec/Android.mk +++ b/media/libstagefright/codecs/vorbis/dec/Android.mk @@ -11,10 +11,9 @@ LOCAL_C_INCLUDES := \ LOCAL_SHARED_LIBRARIES := \ libvorbisidec libstagefright libstagefright_omx \ - libstagefright_foundation libutils + libstagefright_foundation libutils liblog LOCAL_MODULE := libstagefright_soft_vorbisdec LOCAL_MODULE_TAGS := optional include $(BUILD_SHARED_LIBRARY) - diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp index fab0b0c..4115324 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp @@ -410,6 +410,22 @@ void SoftVorbis::onPortFlushCompleted(OMX_U32 portIndex) { } } +void SoftVorbis::onReset() { + mInputBufferCount = 0; + mNumFramesOutput = 0; + if (mState != NULL) { + vorbis_dsp_clear(mState); + delete mState; + mState = NULL; + } + + if (mVi != NULL) { + vorbis_info_clear(mVi); + delete mVi; + mVi = NULL; + } +} + void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { if (portIndex != 1) { return; diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h index e252f55..cb628a0 100644 --- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h +++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h @@ -43,6 +43,7 @@ protected: virtual void onQueueFilled(OMX_U32 portIndex); virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); private: enum { diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp index 2704a37..77f21b7 100644 --- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp +++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp @@ -24,7 +24,7 @@ #include <media/stagefright/MetaData.h> #include <system/window.h> #include <ui/GraphicBufferMapper.h> -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> namespace android { diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp index dff931d..ad10d2b 100644 --- a/media/libstagefright/foundation/ALooperRoster.cpp +++ b/media/libstagefright/foundation/ALooperRoster.cpp @@ -82,7 +82,8 @@ status_t ALooperRoster::postMessage_l( ssize_t index = mHandlers.indexOfKey(msg->target()); if (index < 0) { - ALOGW("failed to post message. Target handler not registered."); + ALOGW("failed to post message '%s'. Target handler not registered.", + msg->debugString().c_str()); return -ENOENT; } diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk index b7577d6..d65e213 100644 --- a/media/libstagefright/foundation/Android.mk +++ b/media/libstagefright/foundation/Android.mk @@ -20,6 +20,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES := \ libbinder \ libutils \ + liblog LOCAL_CFLAGS += -Wno-multichar diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index 733753b..505bdb3 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -40,10 +40,13 @@ namespace android { -LiveSession::LiveSession(uint32_t flags, bool uidValid, uid_t uid) - : mFlags(flags), +LiveSession::LiveSession( + const sp<AMessage> ¬ify, uint32_t flags, bool uidValid, uid_t uid) + : mNotify(notify), + mFlags(flags), mUIDValid(uidValid), mUID(uid), + mInPreparationPhase(true), mDataSource(new LiveDataSource), mHTTPDataSource( HTTPBase::Create( @@ -179,7 +182,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { if (playlist == NULL) { ALOGE("unable to fetch master playlist '%s'.", url.c_str()); - mDataSource->queueEOS(ERROR_IO); + signalEOS(ERROR_IO); return; } @@ -207,7 +210,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { void LiveSession::onDisconnect() { ALOGI("onDisconnect"); - mDataSource->queueEOS(ERROR_END_OF_STREAM); + signalEOS(ERROR_END_OF_STREAM); Mutex::Autolock autoLock(mLock); mDisconnectPending = false; @@ -561,7 +564,8 @@ rinse_repeat: // unchanged from the last time we tried. } else { ALOGE("failed to load playlist at url '%s'", url.c_str()); - mDataSource->queueEOS(ERROR_IO); + signalEOS(ERROR_IO); + return; } } else { @@ -627,22 +631,20 @@ rinse_repeat: if (index < mPlaylist->size()) { int32_t newSeqNumber = firstSeqNumberInPlaylist + index; - if (newSeqNumber != mSeqNumber) { - ALOGI("seeking to seq no %d", newSeqNumber); + ALOGI("seeking to seq no %d", newSeqNumber); - mSeqNumber = newSeqNumber; + mSeqNumber = newSeqNumber; - mDataSource->reset(); + mDataSource->reset(); - // reseting the data source will have had the - // side effect of discarding any previously queued - // bandwidth change discontinuity. - // Therefore we'll need to treat these seek - // discontinuities as involving a bandwidth change - // even if they aren't directly. - seekDiscontinuity = true; - bandwidthChanged = true; - } + // reseting the data source will have had the + // side effect of discarding any previously queued + // bandwidth change discontinuity. + // Therefore we'll need to treat these seek + // discontinuities as involving a bandwidth change + // even if they aren't directly. + seekDiscontinuity = true; + bandwidthChanged = true; } } @@ -704,7 +706,7 @@ rinse_repeat: mSeqNumber, firstSeqNumberInPlaylist, firstSeqNumberInPlaylist + mPlaylist->size() - 1); - mDataSource->queueEOS(ERROR_END_OF_STREAM); + signalEOS(ERROR_END_OF_STREAM); return; } } @@ -737,7 +739,7 @@ rinse_repeat: status_t err = fetchFile(uri.c_str(), &buffer, range_offset, range_length); if (err != OK) { ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str()); - mDataSource->queueEOS(err); + signalEOS(err); return; } @@ -748,7 +750,7 @@ rinse_repeat: if (err != OK) { ALOGE("decryptBuffer failed w/ error %d", err); - mDataSource->queueEOS(err); + signalEOS(err); return; } @@ -760,7 +762,7 @@ rinse_repeat: mBandwidthItems.removeAt(bandwidthIndex); if (mBandwidthItems.isEmpty()) { - mDataSource->queueEOS(ERROR_UNSUPPORTED); + signalEOS(ERROR_UNSUPPORTED); return; } @@ -824,11 +826,42 @@ rinse_repeat: postMonitorQueue(); } +void LiveSession::signalEOS(status_t err) { + if (mInPreparationPhase && mNotify != NULL) { + sp<AMessage> notify = mNotify->dup(); + + notify->setInt32( + "what", + err == ERROR_END_OF_STREAM + ? kWhatPrepared : kWhatPreparationFailed); + + if (err != ERROR_END_OF_STREAM) { + notify->setInt32("err", err); + } + + notify->post(); + + mInPreparationPhase = false; + } + + mDataSource->queueEOS(err); +} + void LiveSession::onMonitorQueue() { if (mSeekTimeUs >= 0 || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) { onDownloadNext(); } else { + if (mInPreparationPhase) { + if (mNotify != NULL) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatPrepared); + notify->post(); + } + + mInPreparationPhase = false; + } + postMonitorQueue(1000000ll); } } diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 44e03dc..68bbca2 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -163,9 +163,6 @@ status_t M3UParser::parse(const void *_data, size_t size) { while (offsetLF < size && data[offsetLF] != '\n') { ++offsetLF; } - if (offsetLF >= size) { - break; - } AString line; if (offsetLF > offset && data[offsetLF - 1] == '\r') { diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk index ff35d4a..80a1a3a 100644 --- a/media/libstagefright/id3/Android.mk +++ b/media/libstagefright/id3/Android.mk @@ -16,7 +16,7 @@ LOCAL_SRC_FILES := \ testid3.cpp LOCAL_SHARED_LIBRARIES := \ - libstagefright libutils libbinder libstagefright_foundation + libstagefright libutils liblog libbinder libstagefright_foundation LOCAL_STATIC_LIBRARIES := \ libstagefright_id3 diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 1422687..2306f31 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -36,7 +36,7 @@ struct MediaBuffer; struct MediaExtractor; struct MediaSource; struct NuCachedSource2; -struct ISurfaceTexture; +struct IGraphicBufferProducer; class DrmManagerClinet; class DecryptHandle; @@ -81,7 +81,7 @@ struct AwesomePlayer { bool isPlaying() const; - status_t setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture); + status_t setSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer); void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink); status_t setLooping(bool shouldLoop); diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h index 82e08fd..785f939 100644 --- a/media/libstagefright/include/ChromiumHTTPDataSource.h +++ b/media/libstagefright/include/ChromiumHTTPDataSource.h @@ -53,6 +53,9 @@ struct ChromiumHTTPDataSource : public HTTPBase { virtual status_t reconnectAtOffset(off64_t offset); + static status_t UpdateProxyConfig( + const char *host, int32_t port, const char *exclusionList); + protected: virtual ~ChromiumHTTPDataSource(); diff --git a/media/libstagefright/include/FragmentedMP4Extractor.h b/media/libstagefright/include/FragmentedMP4Extractor.h deleted file mode 100644 index 763cd3a..0000000 --- a/media/libstagefright/include/FragmentedMP4Extractor.h +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef FRAGMENTED_MP4_EXTRACTOR_H_ - -#define FRAGMENTED_MP4_EXTRACTOR_H_ - -#include "include/FragmentedMP4Parser.h" - -#include <media/stagefright/MediaExtractor.h> -#include <utils/Vector.h> -#include <utils/String8.h> - -namespace android { - -struct AMessage; -class DataSource; -class SampleTable; -class String8; - -class FragmentedMP4Extractor : public MediaExtractor { -public: - // Extractor assumes ownership of "source". - FragmentedMP4Extractor(const sp<DataSource> &source); - - virtual size_t countTracks(); - virtual sp<MediaSource> getTrack(size_t index); - virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags); - virtual sp<MetaData> getMetaData(); - virtual uint32_t flags() const; - -protected: - virtual ~FragmentedMP4Extractor(); - -private: - sp<ALooper> mLooper; - sp<FragmentedMP4Parser> mParser; - sp<DataSource> mDataSource; - status_t mInitCheck; - size_t mAudioTrackIndex; - size_t mTrackCount; - - sp<MetaData> mFileMetaData; - - Vector<uint32_t> mPath; - - FragmentedMP4Extractor(const FragmentedMP4Extractor &); - FragmentedMP4Extractor &operator=(const FragmentedMP4Extractor &); -}; - -bool SniffFragmentedMP4( - const sp<DataSource> &source, String8 *mimeType, float *confidence, - sp<AMessage> *); - -} // namespace android - -#endif // MPEG4_EXTRACTOR_H_ diff --git a/media/libstagefright/include/FragmentedMP4Parser.h b/media/libstagefright/include/FragmentedMP4Parser.h index 0edafb9..dbe02b8 100644 --- a/media/libstagefright/include/FragmentedMP4Parser.h +++ b/media/libstagefright/include/FragmentedMP4Parser.h @@ -263,7 +263,7 @@ private: void copyBuffer( sp<ABuffer> *dst, - size_t offset, uint64_t size, size_t extra = 0) const; + size_t offset, uint64_t size) const; DISALLOW_EVIL_CONSTRUCTORS(FragmentedMP4Parser); }; diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h index b8e10f7..c2dc351 100644 --- a/media/libstagefright/include/HTTPBase.h +++ b/media/libstagefright/include/HTTPBase.h @@ -48,6 +48,9 @@ struct HTTPBase : public DataSource { virtual status_t setBandwidthStatCollectFreq(int32_t freqMs); + static status_t UpdateProxyConfig( + const char *host, int32_t port, const char *exclusionList); + void setUID(uid_t uid); bool getUID(uid_t *uid) const; diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h index f329cc9..db44a33 100644 --- a/media/libstagefright/include/LiveSession.h +++ b/media/libstagefright/include/LiveSession.h @@ -35,7 +35,9 @@ struct LiveSession : public AHandler { // Don't log any URLs. kFlagIncognito = 1, }; - LiveSession(uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + LiveSession( + const sp<AMessage> ¬ify, + uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); sp<DataSource> getDataSource(); @@ -53,6 +55,12 @@ struct LiveSession : public AHandler { bool isSeekable() const; bool hasDynamicDuration() const; + // Posted notification's "what" field will carry one of the following: + enum { + kWhatPrepared, + kWhatPreparationFailed, + }; + protected: virtual ~LiveSession(); @@ -76,10 +84,13 @@ private: unsigned long mBandwidth; }; + sp<AMessage> mNotify; uint32_t mFlags; bool mUIDValid; uid_t mUID; + bool mInPreparationPhase; + sp<LiveDataSource> mDataSource; sp<HTTPBase> mHTTPDataSource; @@ -144,6 +155,8 @@ private: // This is computed by summing the durations of all segments before it. int64_t getSegmentStartTimeUs(int32_t seqNumber) const; + void signalEOS(status_t err); + DISALLOW_EVIL_CONSTRUCTORS(LiveSession); }; diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index 5c549e0..35eff96 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -18,7 +18,12 @@ #define MPEG4_EXTRACTOR_H_ +#include <arpa/inet.h> + +#include <media/stagefright/DataSource.h> #include <media/stagefright/MediaExtractor.h> +#include <media/stagefright/Utils.h> +#include <utils/List.h> #include <utils/Vector.h> #include <utils/String8.h> @@ -29,6 +34,11 @@ class DataSource; class SampleTable; class String8; +struct SidxEntry { + size_t mSize; + uint32_t mDurationUs; +}; + class MPEG4Extractor : public MediaExtractor { public: // Extractor assumes ownership of "source". @@ -39,6 +49,7 @@ public: virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags); virtual sp<MetaData> getMetaData(); + virtual uint32_t flags() const; // for DRM virtual char* getDrmTrackInfo(size_t trackID, int *len); @@ -47,6 +58,12 @@ protected: virtual ~MPEG4Extractor(); private: + + struct PsshInfo { + uint8_t uuid[16]; + uint32_t datalen; + uint8_t *data; + }; struct Track { Track *next; sp<MetaData> meta; @@ -56,6 +73,12 @@ private: bool skipTrack; }; + Vector<SidxEntry> mSidxEntries; + uint64_t mSidxDuration; + off64_t mMoofOffset; + + Vector<PsshInfo> mPssh; + sp<DataSource> mDataSource; status_t mInitCheck; bool mHasVideo; @@ -93,6 +116,8 @@ private: status_t parseTrackHeader(off64_t data_offset, off64_t data_size); + status_t parseSegmentIndex(off64_t data_offset, size_t data_size); + Track *findTrackByMimePrefix(const char *mimePrefix); MPEG4Extractor(const MPEG4Extractor &); diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index 2c87b34..24b8d98 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -79,6 +79,12 @@ public: node_id node, OMX_U32 port_index, const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer); + virtual status_t createInputSurface( + node_id node, OMX_U32 port_index, + sp<IGraphicBufferProducer> *bufferProducer); + + virtual status_t signalEndOfInputStream(node_id node); + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data); diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index 47ca579..67aba6b 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -27,6 +27,7 @@ namespace android { class IOMXObserver; struct OMXMaster; +struct GraphicBufferSource; struct OMXNodeInstance { OMXNodeInstance( @@ -65,6 +66,11 @@ struct OMXNodeInstance { OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer, OMX::buffer_id *buffer); + status_t createInputSurface( + OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer); + + status_t signalEndOfInputStream(); + status_t allocateBuffer( OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer, void **buffer_data); @@ -82,12 +88,18 @@ struct OMXNodeInstance { OMX_U32 rangeOffset, OMX_U32 rangeLength, OMX_U32 flags, OMX_TICKS timestamp); + status_t emptyDirectBuffer( + OMX_BUFFERHEADERTYPE *header, + OMX_U32 rangeOffset, OMX_U32 rangeLength, + OMX_U32 flags, OMX_TICKS timestamp); + status_t getExtensionIndex( const char *parameterName, OMX_INDEXTYPE *index); void onMessage(const omx_message &msg); void onObserverDied(OMXMaster *master); void onGetHandleFailed(); + void onEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2); static OMX_CALLBACKTYPE kCallbacks; @@ -100,6 +112,13 @@ private: sp<IOMXObserver> mObserver; bool mDying; + // Lock only covers mGraphicBufferSource. We can't always use mLock + // because of rare instances where we'd end up locking it recursively. + Mutex mGraphicBufferSourceLock; + // Access this through getGraphicBufferSource(). + sp<GraphicBufferSource> mGraphicBufferSource; + + struct ActiveBuffer { OMX_U32 mPortIndex; OMX::buffer_id mID; @@ -132,6 +151,11 @@ private: OMX_IN OMX_PTR pAppData, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); + status_t storeMetaDataInBuffers_l(OMX_U32 portIndex, OMX_BOOL enable); + + sp<GraphicBufferSource> getGraphicBufferSource(); + void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource); + OMXNodeInstance(const OMXNodeInstance &); OMXNodeInstance &operator=(const OMXNodeInstance &); }; diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h new file mode 100644 index 0000000..ca59dc0 --- /dev/null +++ b/media/libstagefright/include/SDPLoader.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDP_LOADER_H_ + +#define SDP_LOADER_H_ + +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/foundation/AHandler.h> +#include <utils/String8.h> + +namespace android { + +struct HTTPBase; + +struct SDPLoader : public AHandler { + enum Flags { + // Don't log any URLs. + kFlagIncognito = 1, + }; + enum { + kWhatSDPLoaded = 'sdpl' + }; + SDPLoader(const sp<AMessage> ¬ify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0); + + void load(const char* url, const KeyedVector<String8, String8> *headers); + + void cancel(); + +protected: + virtual ~SDPLoader() {} + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kWhatLoad = 'load', + }; + + void onLoad(const sp<AMessage> &msg); + + sp<AMessage> mNotify; + const char* mUrl; + uint32_t mFlags; + bool mUIDValid; + uid_t mUID; + sp<ALooper> mNetLooper; + bool mCancelled; + + sp<HTTPBase> mHTTPDataSource; + + DISALLOW_EVIL_CONSTRUCTORS(SDPLoader); +}; + +} // namespace android + +#endif // SDP_LOADER_H_ diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/include/SimpleSoftOMXComponent.h index 50cd275..f8c61eb 100644 --- a/media/libstagefright/include/SimpleSoftOMXComponent.h +++ b/media/libstagefright/include/SimpleSoftOMXComponent.h @@ -71,6 +71,7 @@ protected: virtual void onPortFlushCompleted(OMX_U32 portIndex); virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled); + virtual void onReset(); PortInfo *editPortInfo(OMX_U32 portIndex); diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h index 7fe7c06..673268b 100644 --- a/media/libstagefright/include/ThrottledSource.h +++ b/media/libstagefright/include/ThrottledSource.h @@ -28,18 +28,44 @@ struct ThrottledSource : public DataSource { const sp<DataSource> &source, int32_t bandwidthLimitBytesPerSecond); - virtual status_t initCheck() const; - + // implementation of readAt() that sleeps to achieve the desired max throughput virtual ssize_t readAt(off64_t offset, void *data, size_t size); - virtual status_t getSize(off64_t *size); - virtual uint32_t flags(); + // returns an empty string to prevent callers from using the Uri to construct a new datasource + virtual String8 getUri() { + return String8(); + } + + // following methods all call through to the wrapped DataSource's methods + + status_t initCheck() const { + return mSource->initCheck(); + } + + virtual status_t getSize(off64_t *size) { + return mSource->getSize(size); + } + + virtual uint32_t flags() { + return mSource->flags(); + } + + virtual status_t reconnectAtOffset(off64_t offset) { + return mSource->reconnectAtOffset(offset); + } + + virtual sp<DecryptHandle> DrmInitialization(const char *mime = NULL) { + return mSource->DrmInitialization(mime); + } + + virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client) { + mSource->getDrmInfo(handle, client); + }; virtual String8 getMIMEType() const { return mSource->getMIMEType(); } - private: Mutex mLock; diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h index e418822..d517320 100644 --- a/media/libstagefright/include/avc_utils.h +++ b/media/libstagefright/include/avc_utils.h @@ -36,8 +36,11 @@ enum { kAVCProfileCAVLC444Intra = 0x2c }; +// Optionally returns sample aspect ratio as well. void FindAVCDimensions( - const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height); + const sp<ABuffer> &seqParamSet, + int32_t *width, int32_t *height, + int32_t *sarWidth = NULL, int32_t *sarHeight = NULL); unsigned parseUE(ABitReader *br); diff --git a/media/libstagefright/include/chromium_http_stub.h b/media/libstagefright/include/chromium_http_stub.h index 869d4ac..e0651a4 100644 --- a/media/libstagefright/include/chromium_http_stub.h +++ b/media/libstagefright/include/chromium_http_stub.h @@ -23,6 +23,10 @@ namespace android { extern "C" { HTTPBase *createChromiumHTTPDataSource(uint32_t flags); + +status_t UpdateChromiumHTTPDataSourceProxyConfig( + const char *host, int32_t port, const char *exclusionList); + DataSource *createDataUriSource(const char *uri); } } diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index 8f7d12b..b304749 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -263,8 +263,8 @@ void BlockIterator::advance_l() { mCluster, nextCluster, pos, len); ALOGV("ParseNext returned %ld", res); - if (res > 0) { - // EOF + if (res != 0) { + // EOF or error mCluster = NULL; break; @@ -758,31 +758,69 @@ static void addESDSFromCodecPrivate( esds = NULL; } -void addVorbisCodecInfo( +status_t addVorbisCodecInfo( const sp<MetaData> &meta, const void *_codecPrivate, size_t codecPrivateSize) { - // printf("vorbis private data follows:\n"); // hexdump(_codecPrivate, codecPrivateSize); - CHECK(codecPrivateSize >= 3); + if (codecPrivateSize < 1) { + return ERROR_MALFORMED; + } const uint8_t *codecPrivate = (const uint8_t *)_codecPrivate; - CHECK(codecPrivate[0] == 0x02); - size_t len1 = codecPrivate[1]; - size_t len2 = codecPrivate[2]; + if (codecPrivate[0] != 0x02) { + return ERROR_MALFORMED; + } - CHECK(codecPrivateSize > 3 + len1 + len2); + // codecInfo starts with two lengths, len1 and len2, that are + // "Xiph-style-lacing encoded"... - CHECK(codecPrivate[3] == 0x01); - meta->setData(kKeyVorbisInfo, 0, &codecPrivate[3], len1); + size_t offset = 1; + size_t len1 = 0; + while (offset < codecPrivateSize && codecPrivate[offset] == 0xff) { + len1 += 0xff; + ++offset; + } + if (offset >= codecPrivateSize) { + return ERROR_MALFORMED; + } + len1 += codecPrivate[offset++]; - CHECK(codecPrivate[len1 + 3] == 0x03); + size_t len2 = 0; + while (offset < codecPrivateSize && codecPrivate[offset] == 0xff) { + len2 += 0xff; + ++offset; + } + if (offset >= codecPrivateSize) { + return ERROR_MALFORMED; + } + len2 += codecPrivate[offset++]; + + if (codecPrivateSize < offset + len1 + len2) { + return ERROR_MALFORMED; + } + + if (codecPrivate[offset] != 0x01) { + return ERROR_MALFORMED; + } + meta->setData(kKeyVorbisInfo, 0, &codecPrivate[offset], len1); + + offset += len1; + if (codecPrivate[offset] != 0x03) { + return ERROR_MALFORMED; + } + + offset += len2; + if (codecPrivate[offset] != 0x05) { + return ERROR_MALFORMED; + } - CHECK(codecPrivate[len1 + len2 + 3] == 0x05); meta->setData( - kKeyVorbisBooks, 0, &codecPrivate[len1 + len2 + 3], - codecPrivateSize - len1 - len2 - 3); + kKeyVorbisBooks, 0, &codecPrivate[offset], + codecPrivateSize - offset); + + return OK; } void MatroskaExtractor::addTracks() { @@ -809,6 +847,8 @@ void MatroskaExtractor::addTracks() { sp<MetaData> meta = new MetaData; + status_t err = OK; + switch (track->GetType()) { case VIDEO_TRACK: { @@ -855,7 +895,8 @@ void MatroskaExtractor::addTracks() { } else if (!strcmp("A_VORBIS", codecID)) { meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); - addVorbisCodecInfo(meta, codecPrivate, codecPrivateSize); + err = addVorbisCodecInfo( + meta, codecPrivate, codecPrivateSize); } else if (!strcmp("A_MPEG/L3", codecID)) { meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG); } else { @@ -872,6 +913,11 @@ void MatroskaExtractor::addTracks() { continue; } + if (err != OK) { + ALOGE("skipping track, codec specific data was malformed."); + continue; + } + long long durationNs = mSegment->GetDuration(); meta->setInt64(kKeyDuration, (durationNs + 500) / 1000); diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp index 451c837..0102656 100644 --- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp +++ b/media/libstagefright/mp4/FragmentedMP4Parser.cpp @@ -18,6 +18,7 @@ #define LOG_TAG "FragmentedMP4Parser" #include <utils/Log.h> +#include "include/avc_utils.h" #include "include/ESDS.h" #include "include/FragmentedMP4Parser.h" #include "TrackFragment.h" @@ -323,8 +324,7 @@ status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) { off_t totalOffset = mFirstMoofOffset; for (int i = 0; i < numSidxEntries; i++) { const SidxEntry *se = &info->mSidx[i]; - totalTime += se->mDurationUs; - if (totalTime > position) { + if (totalTime + se->mDurationUs > position) { mBuffer->setRange(0,0); mBufferPos = totalOffset; if (mFinalResult == ERROR_END_OF_STREAM) { @@ -333,9 +333,10 @@ status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) { resumeIfNecessary(); } info->mFragments.clear(); - info->mDecodingTime = position * info->mMediaTimeScale / 1000000ll; + info->mDecodingTime = totalTime * info->mMediaTimeScale / 1000000ll; return OK; } + totalTime += se->mDurationUs; totalOffset += se->mSize; } } @@ -965,6 +966,10 @@ status_t FragmentedMP4Parser::makeAccessUnit( sample.mSize); (*accessUnit)->meta()->setInt64("timeUs", presentationTimeUs); + if (IsIDR(*accessUnit)) { + (*accessUnit)->meta()->setInt32("is-sync-frame", 1); + } + return OK; } @@ -1007,6 +1012,9 @@ status_t FragmentedMP4Parser::makeAccessUnit( "timeUs", presentationTimeUs); } } + if (IsIDR(*accessUnit)) { + (*accessUnit)->meta()->setInt32("is-sync-frame", 1); + } return OK; } @@ -1975,8 +1983,8 @@ status_t FragmentedMP4Parser::parseTrackFragmentRun( } void FragmentedMP4Parser::copyBuffer( - sp<ABuffer> *dst, size_t offset, uint64_t size, size_t extra) const { - sp<ABuffer> buf = new ABuffer(size + extra); + sp<ABuffer> *dst, size_t offset, uint64_t size) const { + sp<ABuffer> buf = new ABuffer(size); memcpy(buf->data(), mBuffer->data() + offset, size); *dst = buf; diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index 4f6c4b2..9850a46 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -452,6 +452,10 @@ int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) { timeUs += mParser->mAbsoluteTimeAnchorUs; } + if (mParser->mTimeOffsetValid) { + timeUs += mParser->mTimeOffsetUs; + } + return timeUs; } @@ -534,6 +538,16 @@ status_t ATSParser::Stream::parse( mBuffer->setRange(0, 0); mExpectedContinuityCounter = -1; +#if 0 + // Uncomment this if you'd rather see no corruption whatsoever on + // screen and suspend updates until we come across another IDR frame. + + if (mStreamType == STREAMTYPE_H264) { + ALOGI("clearing video queue"); + mQueue->clear(true /* clearFormat */); + } +#endif + return OK; } @@ -920,6 +934,8 @@ sp<MediaSource> ATSParser::Stream::getSource(SourceType type) { ATSParser::ATSParser(uint32_t flags) : mFlags(flags), mAbsoluteTimeAnchorUs(-1ll), + mTimeOffsetValid(false), + mTimeOffsetUs(0ll), mNumTSPacketsParsed(0), mNumPCRs(0) { mPSISections.add(0 /* PID */, new PSISection); @@ -950,6 +966,13 @@ void ATSParser::signalDiscontinuity( CHECK(mPrograms.empty()); mAbsoluteTimeAnchorUs = timeUs; return; + } else if (type == DISCONTINUITY_TIME_OFFSET) { + int64_t offset; + CHECK(extra->findInt64("offset", &offset)); + + mTimeOffsetValid = true; + mTimeOffsetUs = offset; + return; } for (size_t i = 0; i < mPrograms.size(); ++i) { @@ -1036,7 +1059,7 @@ status_t ATSParser::parsePID( ssize_t sectionIndex = mPSISections.indexOfKey(PID); if (sectionIndex >= 0) { - const sp<PSISection> §ion = mPSISections.valueAt(sectionIndex); + sp<PSISection> section = mPSISections.valueAt(sectionIndex); if (payload_unit_start_indicator) { CHECK(section->isEmpty()); @@ -1045,7 +1068,6 @@ status_t ATSParser::parsePID( br->skipBits(skip * 8); } - CHECK((br->numBitsLeft() % 8) == 0); status_t err = section->append(br->data(), br->numBitsLeft() / 8); @@ -1080,10 +1102,13 @@ status_t ATSParser::parsePID( if (!handled) { mPSISections.removeItem(PID); + section.clear(); } } - section->clear(); + if (section != NULL) { + section->clear(); + } return OK; } diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h index 46edc45..a10edc9 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.h +++ b/media/libstagefright/mpeg2ts/ATSParser.h @@ -39,6 +39,7 @@ struct ATSParser : public RefBase { DISCONTINUITY_AUDIO_FORMAT = 2, DISCONTINUITY_VIDEO_FORMAT = 4, DISCONTINUITY_ABSOLUTE_TIME = 8, + DISCONTINUITY_TIME_OFFSET = 16, DISCONTINUITY_SEEK = DISCONTINUITY_TIME, @@ -106,6 +107,9 @@ private: int64_t mAbsoluteTimeAnchorUs; + bool mTimeOffsetValid; + int64_t mTimeOffsetUs; + size_t mNumTSPacketsParsed; void parseProgramAssociationTable(ABitReader *br); diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index a605a05..3de3a61 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -28,9 +28,12 @@ namespace android { +const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs + AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta) : mIsAudio(false), mFormat(meta), + mLastQueuedTimeUs(0), mEOSResult(OK) { const char *mime; CHECK(meta->findCString(kKeyMIMEType, &mime)); @@ -141,9 +144,8 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) { return; } - int64_t timeUs; - CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); - ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", timeUs, timeUs / 1E6); + CHECK(buffer->meta()->findInt64("timeUs", &mLastQueuedTimeUs)); + ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); Mutex::Autolock autoLock(mLock); mBuffers.push_back(buffer); @@ -171,6 +173,7 @@ void AnotherPacketSource::queueDiscontinuity( } mEOSResult = OK; + mLastQueuedTimeUs = 0; sp<ABuffer> buffer = new ABuffer(0); buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type)); @@ -247,4 +250,15 @@ status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) { return OK; } +bool AnotherPacketSource::isFinished(int64_t duration) const { + if (duration > 0) { + int64_t diff = duration - mLastQueuedTimeUs; + if (diff < kNearEOSMarkUs && diff > -kNearEOSMarkUs) { + ALOGV("Detecting EOS due to near end"); + return true; + } + } + return (mEOSResult != OK); +} + } // namespace android diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h index d685b98..1db4068 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h @@ -58,6 +58,8 @@ struct AnotherPacketSource : public MediaSource { status_t dequeueAccessUnit(sp<ABuffer> *buffer); + bool isFinished(int64_t duration) const; + protected: virtual ~AnotherPacketSource(); @@ -67,6 +69,7 @@ private: bool mIsAudio; sp<MetaData> mFormat; + int64_t mLastQueuedTimeUs; List<sp<ABuffer> > mBuffers; status_t mEOSResult; diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index 82fb637..9f3b19c 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -147,9 +147,9 @@ status_t ElementaryStreamQueue::appendData( } if (startOffset > 0) { - ALOGI("found something resembling an H.264/MPEG syncword at " - "offset %ld", - startOffset); + ALOGI("found something resembling an H.264/MPEG syncword " + "at offset %d", + startOffset); } data = &ptr[startOffset]; @@ -180,9 +180,9 @@ status_t ElementaryStreamQueue::appendData( } if (startOffset > 0) { - ALOGI("found something resembling an H.264/MPEG syncword at " - "offset %ld", - startOffset); + ALOGI("found something resembling an H.264/MPEG syncword " + "at offset %d", + startOffset); } data = &ptr[startOffset]; @@ -213,8 +213,9 @@ status_t ElementaryStreamQueue::appendData( } if (startOffset > 0) { - ALOGI("found something resembling an AAC syncword at offset %ld", - startOffset); + ALOGI("found something resembling an AAC syncword at " + "offset %d", + startOffset); } data = &ptr[startOffset]; @@ -241,8 +242,8 @@ status_t ElementaryStreamQueue::appendData( if (startOffset > 0) { ALOGI("found something resembling an MPEG audio " - "syncword at offset %ld", - startOffset); + "syncword at offset %d", + startOffset); } data = &ptr[startOffset]; @@ -394,10 +395,30 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() { } sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { - int64_t timeUs; + if (mBuffer->size() == 0) { + return NULL; + } + + CHECK(!mRangeInfos.empty()); + const RangeInfo &info = *mRangeInfos.begin(); + if (mBuffer->size() < info.mLength) { + return NULL; + } + + CHECK_GE(info.mTimestampUs, 0ll); + + // The idea here is consume all AAC frames starting at offsets before + // info.mLength so we can assign a meaningful timestamp without + // having to interpolate. + // The final AAC frame may well extend into the next RangeInfo but + // that's ok. size_t offset = 0; - while (offset + 7 <= mBuffer->size()) { + while (offset < info.mLength) { + if (offset + 7 > mBuffer->size()) { + return NULL; + } + ABitReader bits(mBuffer->data() + offset, mBuffer->size() - offset); // adts_fixed_header @@ -450,24 +471,15 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { } if (offset + aac_frame_length > mBuffer->size()) { - break; + return NULL; } size_t headerSize = protection_absent ? 7 : 9; - int64_t tmpUs = fetchTimestamp(aac_frame_length); - CHECK_GE(tmpUs, 0ll); - - if (offset == 0) { - timeUs = tmpUs; - } - offset += aac_frame_length; } - if (offset == 0) { - return NULL; - } + int64_t timeUs = fetchTimestamp(offset); sp<ABuffer> accessUnit = new ABuffer(offset); memcpy(accessUnit->data(), mBuffer->data(), offset); @@ -492,7 +504,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { if (first) { timeUs = info->mTimestampUs; - first = false; } if (info->mLength > size) { @@ -509,6 +520,8 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { mRangeInfos.erase(mRangeInfos.begin()); info = NULL; } + + first = false; } if (timeUs == 0ll) { @@ -536,7 +549,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() { size_t nalSize; bool foundSlice = false; while ((err = getNextNALUnit(&data, &size, &nalStart, &nalSize)) == OK) { - CHECK_GT(nalSize, 0u); + if (nalSize == 0) continue; unsigned nalType = nalStart[0] & 0x1f; bool flush = false; diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index d7fbbbe..a8b4939 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -2,6 +2,7 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ + GraphicBufferSource.cpp \ OMX.cpp \ OMXMaster.cpp \ OMXNodeInstance.cpp \ @@ -18,7 +19,9 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libmedia \ libutils \ + liblog \ libui \ + libgui \ libcutils \ libstagefright_foundation \ libdl diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp new file mode 100644 index 0000000..ef27879 --- /dev/null +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -0,0 +1,467 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "GraphicBufferSource" +//#define LOG_NDEBUG 0 +#include <utils/Log.h> + +#include <GraphicBufferSource.h> + +#include <OMX_Core.h> +#include <media/stagefright/foundation/ADebug.h> + +#include <MetadataBufferType.h> +#include <ui/GraphicBuffer.h> + +namespace android { + +static const bool EXTRA_CHECK = true; + + +GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, + uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount) : + mInitCheck(UNKNOWN_ERROR), + mNodeInstance(nodeInstance), + mExecuting(false), + mNumFramesAvailable(0), + mEndOfStream(false), + mEndOfStreamSent(false) { + + ALOGV("GraphicBufferSource w=%u h=%u c=%u", + bufferWidth, bufferHeight, bufferCount); + + if (bufferWidth == 0 || bufferHeight == 0) { + ALOGE("Invalid dimensions %ux%u", bufferWidth, bufferHeight); + mInitCheck = BAD_VALUE; + return; + } + + String8 name("GraphicBufferSource"); + + mBufferQueue = new BufferQueue(true); + mBufferQueue->setConsumerName(name); + mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight); + mBufferQueue->setSynchronousMode(true); + mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER | + GRALLOC_USAGE_HW_TEXTURE); + + mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount); + if (mInitCheck != NO_ERROR) { + ALOGE("Unable to set BQ max acquired buffer count to %u: %d", + bufferCount, mInitCheck); + return; + } + + // Note that we can't create an sp<...>(this) in a ctor that will not keep a + // reference once the ctor ends, as that would cause the refcount of 'this' + // dropping to 0 at the end of the ctor. Since all we need is a wp<...> + // that's what we create. + wp<BufferQueue::ConsumerListener> listener; + listener = static_cast<BufferQueue::ConsumerListener*>(this); + + sp<BufferQueue::ConsumerListener> proxy; + proxy = new BufferQueue::ProxyConsumerListener(listener); + + mInitCheck = mBufferQueue->consumerConnect(proxy); + if (mInitCheck != NO_ERROR) { + ALOGE("Error connecting to BufferQueue: %s (%d)", + strerror(-mInitCheck), mInitCheck); + return; + } + + CHECK(mInitCheck == NO_ERROR); +} + +GraphicBufferSource::~GraphicBufferSource() { + ALOGV("~GraphicBufferSource"); + if (mBufferQueue != NULL) { + status_t err = mBufferQueue->consumerDisconnect(); + if (err != NO_ERROR) { + ALOGW("consumerDisconnect failed: %d", err); + } + } +} + +void GraphicBufferSource::omxExecuting() { + Mutex::Autolock autoLock(mMutex); + ALOGV("--> executing; avail=%d, codec vec size=%zd", + mNumFramesAvailable, mCodecBuffers.size()); + CHECK(!mExecuting); + mExecuting = true; + + // Start by loading up as many buffers as possible. We want to do this, + // rather than just submit the first buffer, to avoid a degenerate case: + // if all BQ buffers arrive before we start executing, and we only submit + // one here, the other BQ buffers will just sit until we get notified + // that the codec buffer has been released. We'd then acquire and + // submit a single additional buffer, repeatedly, never using more than + // one codec buffer simultaneously. (We could instead try to submit + // all BQ buffers whenever any codec buffer is freed, but if we get the + // initial conditions right that will never be useful.) + while (mNumFramesAvailable) { + if (!fillCodecBuffer_l()) { + ALOGV("stop load with frames available (codecAvail=%d)", + isCodecBufferAvailable_l()); + break; + } + } + + ALOGV("done loading initial frames, avail=%d", mNumFramesAvailable); + + // If EOS has already been signaled, and there are no more frames to + // submit, try to send EOS now as well. + if (mEndOfStream && mNumFramesAvailable == 0) { + submitEndOfInputStream_l(); + } +} + +void GraphicBufferSource::omxLoaded(){ + Mutex::Autolock autoLock(mMutex); + ALOGV("--> loaded"); + CHECK(mExecuting); + + ALOGV("Dropped down to loaded, avail=%d eos=%d eosSent=%d", + mNumFramesAvailable, mEndOfStream, mEndOfStreamSent); + + // Codec is no longer executing. Discard all codec-related state. + mCodecBuffers.clear(); + // TODO: scan mCodecBuffers to verify that all mGraphicBuffer entries + // are null; complain if not + + mExecuting = false; +} + +void GraphicBufferSource::addCodecBuffer(OMX_BUFFERHEADERTYPE* header) { + Mutex::Autolock autoLock(mMutex); + + if (mExecuting) { + // This should never happen -- buffers can only be allocated when + // transitioning from "loaded" to "idle". + ALOGE("addCodecBuffer: buffer added while executing"); + return; + } + + ALOGV("addCodecBuffer h=%p size=%lu p=%p", + header, header->nAllocLen, header->pBuffer); + CodecBuffer codecBuffer; + codecBuffer.mHeader = header; + mCodecBuffers.add(codecBuffer); +} + +void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { + Mutex::Autolock autoLock(mMutex); + + CHECK(mExecuting); // could this happen if app stop()s early? + + int cbi = findMatchingCodecBuffer_l(header); + if (cbi < 0) { + // This should never happen. + ALOGE("codecBufferEmptied: buffer not recognized (h=%p)", header); + return; + } + + ALOGV("codecBufferEmptied h=%p size=%lu filled=%lu p=%p", + header, header->nAllocLen, header->nFilledLen, + header->pBuffer); + CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); + + // header->nFilledLen may not be the original value, so we can't compare + // that to zero to see of this was the EOS buffer. Instead we just + // see if the GraphicBuffer reference was null, which should only ever + // happen for EOS. + if (codecBuffer.mGraphicBuffer == NULL) { + CHECK(mEndOfStream && mEndOfStreamSent); + // No GraphicBuffer to deal with, no additional input or output is + // expected, so just return. + return; + } + + if (EXTRA_CHECK) { + // Pull the graphic buffer handle back out of the buffer, and confirm + // that it matches expectations. + OMX_U8* data = header->pBuffer; + buffer_handle_t bufferHandle; + memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t)); + if (bufferHandle != codecBuffer.mGraphicBuffer->handle) { + // should never happen + ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p", + bufferHandle, codecBuffer.mGraphicBuffer->handle); + CHECK(!"codecBufferEmptied: mismatched buffer"); + } + } + + // Find matching entry in our cached copy of the BufferQueue slots. + // If we find a match, release that slot. If we don't, the BufferQueue + // has dropped that GraphicBuffer, and there's nothing for us to release. + // + // (We could store "id" in CodecBuffer and avoid the slot search.) + int id; + for (id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) { + if (mBufferSlot[id] == NULL) { + continue; + } + + if (mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) { + ALOGV("cbi %d matches bq slot %d, handle=%p", + cbi, id, mBufferSlot[id]->handle); + + mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, + Fence::NO_FENCE); + break; + } + } + if (id == BufferQueue::NUM_BUFFER_SLOTS) { + ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d", + cbi); + } + + // Mark the codec buffer as available by clearing the GraphicBuffer ref. + codecBuffer.mGraphicBuffer = NULL; + + if (mNumFramesAvailable) { + // Fill this codec buffer. + CHECK(!mEndOfStreamSent); + ALOGV("buffer freed, %d frames avail (eos=%d)", + mNumFramesAvailable, mEndOfStream); + fillCodecBuffer_l(); + } else if (mEndOfStream) { + // No frames available, but EOS is pending, so use this buffer to + // send that. + ALOGV("buffer freed, EOS pending"); + submitEndOfInputStream_l(); + } + return; +} + +bool GraphicBufferSource::fillCodecBuffer_l() { + CHECK(mExecuting && mNumFramesAvailable > 0); + + int cbi = findAvailableCodecBuffer_l(); + if (cbi < 0) { + // No buffers available, bail. + ALOGV("fillCodecBuffer_l: no codec buffers, avail now %d", + mNumFramesAvailable); + return false; + } + + ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d", + mNumFramesAvailable); + BufferQueue::BufferItem item; + status_t err = mBufferQueue->acquireBuffer(&item); + if (err == BufferQueue::NO_BUFFER_AVAILABLE) { + // shouldn't happen + ALOGW("fillCodecBuffer_l: frame was not available"); + return false; + } else if (err != OK) { + // now what? fake end-of-stream? + ALOGW("fillCodecBuffer_l: acquireBuffer returned err=%d", err); + return false; + } + + mNumFramesAvailable--; + + // Wait for it to become available. + err = item.mFence->waitForever("GraphicBufferSource::fillCodecBuffer_l"); + if (err != OK) { + ALOGW("failed to wait for buffer fence: %d", err); + // keep going + } + + // If this is the first time we're seeing this buffer, add it to our + // slot table. + if (item.mGraphicBuffer != NULL) { + ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf); + mBufferSlot[item.mBuf] = item.mGraphicBuffer; + } + + err = submitBuffer_l(mBufferSlot[item.mBuf], item.mTimestamp / 1000, cbi); + if (err != OK) { + ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); + mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, + EGL_NO_SYNC_KHR, Fence::NO_FENCE); + } else { + ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); + } + + return true; +} + +status_t GraphicBufferSource::signalEndOfInputStream() { + Mutex::Autolock autoLock(mMutex); + ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d", + mExecuting, mNumFramesAvailable, mEndOfStream); + + if (mEndOfStream) { + ALOGE("EOS was already signaled"); + return INVALID_OPERATION; + } + + // Set the end-of-stream flag. If no frames are pending from the + // BufferQueue, and a codec buffer is available, and we're executing, + // we initiate the EOS from here. Otherwise, we'll let + // codecBufferEmptied() (or omxExecuting) do it. + // + // Note: if there are no pending frames and all codec buffers are + // available, we *must* submit the EOS from here or we'll just + // stall since no future events are expected. + mEndOfStream = true; + + if (mExecuting && mNumFramesAvailable == 0) { + submitEndOfInputStream_l(); + } + + return OK; +} + +status_t GraphicBufferSource::submitBuffer_l(sp<GraphicBuffer>& graphicBuffer, + int64_t timestampUsec, int cbi) { + ALOGV("submitBuffer_l cbi=%d", cbi); + CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); + codecBuffer.mGraphicBuffer = graphicBuffer; + + OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; + CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t)); + OMX_U8* data = header->pBuffer; + const OMX_U32 type = kMetadataBufferTypeGrallocSource; + buffer_handle_t handle = codecBuffer.mGraphicBuffer->handle; + memcpy(data, &type, 4); + memcpy(data + 4, &handle, sizeof(buffer_handle_t)); + + status_t err = mNodeInstance->emptyDirectBuffer(header, 0, + 4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME, + timestampUsec); + if (err != OK) { + ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err); + codecBuffer.mGraphicBuffer = NULL; + return err; + } + + ALOGV("emptyDirectBuffer succeeded, h=%p p=%p bufhandle=%p", + header, header->pBuffer, handle); + return OK; +} + +void GraphicBufferSource::submitEndOfInputStream_l() { + CHECK(mEndOfStream); + if (mEndOfStreamSent) { + ALOGV("EOS already sent"); + return; + } + + int cbi = findAvailableCodecBuffer_l(); + if (cbi < 0) { + ALOGV("submitEndOfInputStream_l: no codec buffers available"); + return; + } + + // We reject any additional incoming graphic buffers, so there's no need + // to stick a placeholder into codecBuffer.mGraphicBuffer to mark it as + // in-use. + CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi)); + + OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; + if (EXTRA_CHECK) { + // Guard against implementations that don't check nFilledLen. + size_t fillLen = 4 + sizeof(buffer_handle_t); + CHECK(header->nAllocLen >= fillLen); + OMX_U8* data = header->pBuffer; + memset(data, 0xcd, fillLen); + } + + uint64_t timestamp = 0; // does this matter? + + status_t err = mNodeInstance->emptyDirectBuffer(header, /*offset*/ 0, + /*length*/ 0, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS, + timestamp); + if (err != OK) { + ALOGW("emptyDirectBuffer EOS failed: 0x%x", err); + } else { + ALOGV("submitEndOfInputStream_l: buffer submitted, header=%p cbi=%d", + header, cbi); + mEndOfStreamSent = true; + } +} + +int GraphicBufferSource::findAvailableCodecBuffer_l() { + CHECK(mCodecBuffers.size() > 0); + + for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) { + if (mCodecBuffers[i].mGraphicBuffer == NULL) { + return i; + } + } + return -1; +} + +int GraphicBufferSource::findMatchingCodecBuffer_l( + const OMX_BUFFERHEADERTYPE* header) { + for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) { + if (mCodecBuffers[i].mHeader == header) { + return i; + } + } + return -1; +} + +// BufferQueue::ConsumerListener callback +void GraphicBufferSource::onFrameAvailable() { + Mutex::Autolock autoLock(mMutex); + + ALOGV("onFrameAvailable exec=%d avail=%d", + mExecuting, mNumFramesAvailable); + + if (mEndOfStream) { + // This should only be possible if a new buffer was queued after + // EOS was signaled, i.e. the app is misbehaving. + ALOGW("onFrameAvailable: EOS is set, ignoring frame"); + + BufferQueue::BufferItem item; + status_t err = mBufferQueue->acquireBuffer(&item); + if (err == OK) { + mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY, + EGL_NO_SYNC_KHR, item.mFence); + } + return; + } + + mNumFramesAvailable++; + + if (mExecuting) { + fillCodecBuffer_l(); + } +} + +// BufferQueue::ConsumerListener callback +void GraphicBufferSource::onBuffersReleased() { + Mutex::Autolock lock(mMutex); + + uint32_t slotMask; + if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) { + ALOGW("onBuffersReleased: unable to get released buffer set"); + slotMask = 0xffffffff; + } + + ALOGV("onBuffersReleased: 0x%08x", slotMask); + + for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) { + if ((slotMask & 0x01) != 0) { + mBufferSlot[i] = NULL; + } + slotMask >>= 1; + } +} + +} // namespace android diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h new file mode 100644 index 0000000..562d342 --- /dev/null +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GRAPHIC_BUFFER_SOURCE_H_ + +#define GRAPHIC_BUFFER_SOURCE_H_ + +#include <gui/IGraphicBufferProducer.h> +#include <gui/BufferQueue.h> +#include <utils/RefBase.h> + +#include <OMX_Core.h> +#include "../include/OMXNodeInstance.h" +#include <media/stagefright/foundation/ABase.h> + +namespace android { + +/* + * This class is used to feed OMX codecs from a Surface via BufferQueue. + * + * Instances of the class don't run on a dedicated thread. Instead, + * various events trigger data movement: + * + * - Availability of a new frame of data from the BufferQueue (notified + * via the onFrameAvailable callback). + * - The return of a codec buffer (via OnEmptyBufferDone). + * - Application signaling end-of-stream. + * - Transition to or from "executing" state. + * + * Frames of data (and, perhaps, the end-of-stream indication) can arrive + * before the codec is in the "executing" state, so we need to queue + * things up until we're ready to go. + */ +class GraphicBufferSource : public BufferQueue::ConsumerListener { +public: + GraphicBufferSource(OMXNodeInstance* nodeInstance, + uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount); + virtual ~GraphicBufferSource(); + + // We can't throw an exception if the constructor fails, so we just set + // this and require that the caller test the value. + status_t initCheck() const { + return mInitCheck; + } + + // Returns the handle to the producer side of the BufferQueue. Buffers + // queued on this will be received by GraphicBufferSource. + sp<IGraphicBufferProducer> getIGraphicBufferProducer() const { + return mBufferQueue; + } + + // This is called when OMX transitions to OMX_StateExecuting, which means + // we can start handing it buffers. If we already have buffers of data + // sitting in the BufferQueue, this will send them to the codec. + void omxExecuting(); + + // This is called when OMX transitions to OMX_StateLoaded, indicating that + // we are shutting down. + void omxLoaded(); + + // A "codec buffer", i.e. a buffer that can be used to pass data into + // the encoder, has been allocated. (This call does not call back into + // OMXNodeInstance.) + void addCodecBuffer(OMX_BUFFERHEADERTYPE* header); + + // Called from OnEmptyBufferDone. If we have a BQ buffer available, + // fill it with a new frame of data; otherwise, just mark it as available. + void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header); + + // This is called after the last input frame has been submitted. We + // need to submit an empty buffer with the EOS flag set. If we don't + // have a codec buffer ready, we just set the mEndOfStream flag. + status_t signalEndOfInputStream(); + +protected: + // BufferQueue::ConsumerListener interface, called when a new frame of + // data is available. If we're executing and a codec buffer is + // available, we acquire the buffer, copy the GraphicBuffer reference + // into the codec buffer, and call Empty[This]Buffer. If we're not yet + // executing or there's no codec buffer available, we just increment + // mNumFramesAvailable and return. + virtual void onFrameAvailable(); + + // BufferQueue::ConsumerListener interface, called when the client has + // released one or more GraphicBuffers. We clear out the appropriate + // set of mBufferSlot entries. + virtual void onBuffersReleased(); + +private: + // Keep track of codec input buffers. They may either be available + // (mGraphicBuffer == NULL) or in use by the codec. + struct CodecBuffer { + OMX_BUFFERHEADERTYPE* mHeader; + sp<GraphicBuffer> mGraphicBuffer; + }; + + // Returns the index of an available codec buffer. If none are + // available, returns -1. Mutex must be held by caller. + int findAvailableCodecBuffer_l(); + + // Returns true if a codec buffer is available. + bool isCodecBufferAvailable_l() { + return findAvailableCodecBuffer_l() >= 0; + } + + // Finds the mCodecBuffers entry that matches. Returns -1 if not found. + int findMatchingCodecBuffer_l(const OMX_BUFFERHEADERTYPE* header); + + // Fills a codec buffer with a frame from the BufferQueue. This must + // only be called when we know that a frame of data is ready (i.e. we're + // in the onFrameAvailable callback, or if we're in codecBufferEmptied + // and mNumFramesAvailable is nonzero). Returns without doing anything if + // we don't have a codec buffer available. + // + // Returns true if we successfully filled a codec buffer with a BQ buffer. + bool fillCodecBuffer_l(); + + // Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer + // reference into the codec buffer, and submits the data to the codec. + status_t submitBuffer_l(sp<GraphicBuffer>& graphicBuffer, + int64_t timestampUsec, int cbi); + + // Submits an empty buffer, with the EOS flag set. Returns without + // doing anything if we don't have a codec buffer available. + void submitEndOfInputStream_l(); + + // Lock, covers all member variables. + mutable Mutex mMutex; + + // Used to report constructor failure. + status_t mInitCheck; + + // Pointer back to the object that contains us. We send buffers here. + OMXNodeInstance* mNodeInstance; + + // Set by omxExecuting() / omxIdling(). + bool mExecuting; + + // We consume graphic buffers from this. + sp<BufferQueue> mBufferQueue; + + // Number of frames pending in BufferQueue that haven't yet been + // forwarded to the codec. + size_t mNumFramesAvailable; + + // Set to true if we want to send end-of-stream after we run out of + // frames in BufferQueue. + bool mEndOfStream; + bool mEndOfStreamSent; + + // Cache of GraphicBuffers from the buffer queue. When the codec + // is done processing a GraphicBuffer, we can use this to map back + // to a slot number. + sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS]; + + // Tracks codec buffers. + Vector<CodecBuffer> mCodecBuffers; + + DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource); +}; + +} // namespace android + +#endif // GRAPHIC_BUFFER_SOURCE_H_ diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index 29bc733..3987ead 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -345,6 +345,17 @@ status_t OMX::useGraphicBuffer( port_index, graphicBuffer, buffer); } +status_t OMX::createInputSurface( + node_id node, OMX_U32 port_index, + sp<IGraphicBufferProducer> *bufferProducer) { + return findInstance(node)->createInputSurface( + port_index, bufferProducer); +} + +status_t OMX::signalEndOfInputStream(node_id node) { + return findInstance(node)->signalEndOfInputStream(); +} + status_t OMX::allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { @@ -393,6 +404,9 @@ OMX_ERRORTYPE OMX::OnEvent( OMX_IN OMX_PTR pEventData) { ALOGV("OnEvent(%d, %ld, %ld)", eEvent, nData1, nData2); + // Forward to OMXNodeInstance. + findInstance(node)->onEvent(eEvent, nData1, nData2); + omx_message msg; msg.type = omx_message::EVENT; msg.node = node; diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index bff3def..a9eb94f 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -20,14 +20,18 @@ #include "../include/OMXNodeInstance.h" #include "OMXMaster.h" +#include "GraphicBufferSource.h" #include <OMX_Component.h> #include <binder/IMemory.h> +#include <gui/BufferQueue.h> #include <HardwareAPI.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/MediaErrors.h> +static const OMX_U32 kPortIndexInput = 0; + namespace android { struct BufferMeta { @@ -100,6 +104,17 @@ void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) { mHandle = handle; } +sp<GraphicBufferSource> OMXNodeInstance::getGraphicBufferSource() { + Mutex::Autolock autoLock(mGraphicBufferSourceLock); + return mGraphicBufferSource; +} + +void OMXNodeInstance::setGraphicBufferSource( + const sp<GraphicBufferSource>& bufferSource) { + Mutex::Autolock autoLock(mGraphicBufferSourceLock); + mGraphicBufferSource = bufferSource; +} + OMX *OMXNodeInstance::owner() { return mOwner; } @@ -277,15 +292,16 @@ status_t OMXNodeInstance::getState(OMX_STATETYPE* state) { status_t OMXNodeInstance::enableGraphicBuffers( OMX_U32 portIndex, OMX_BOOL enable) { Mutex::Autolock autoLock(mLock); + OMX_STRING name = const_cast<OMX_STRING>( + "OMX.google.android.index.enableAndroidNativeBuffers"); OMX_INDEXTYPE index; - OMX_ERRORTYPE err = OMX_GetExtensionIndex( - mHandle, - const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"), - &index); + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex failed"); + if (enable) { + ALOGE("OMX_GetExtensionIndex %s failed", name); + } return StatusFromOMXError(err); } @@ -316,14 +332,12 @@ status_t OMXNodeInstance::getGraphicBufferUsage( Mutex::Autolock autoLock(mLock); OMX_INDEXTYPE index; - OMX_ERRORTYPE err = OMX_GetExtensionIndex( - mHandle, - const_cast<OMX_STRING>( - "OMX.google.android.index.getAndroidNativeBufferUsage"), - &index); + OMX_STRING name = const_cast<OMX_STRING>( + "OMX.google.android.index.getAndroidNativeBufferUsage"); + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex failed"); + ALOGE("OMX_GetExtensionIndex %s failed", name); return StatusFromOMXError(err); } @@ -354,7 +368,12 @@ status_t OMXNodeInstance::storeMetaDataInBuffers( OMX_U32 portIndex, OMX_BOOL enable) { Mutex::Autolock autolock(mLock); + return storeMetaDataInBuffers_l(portIndex, enable); +} +status_t OMXNodeInstance::storeMetaDataInBuffers_l( + OMX_U32 portIndex, + OMX_BOOL enable) { OMX_INDEXTYPE index; OMX_STRING name = const_cast<OMX_STRING>( "OMX.google.android.index.storeMetaDataInBuffers"); @@ -362,6 +381,7 @@ status_t OMXNodeInstance::storeMetaDataInBuffers( OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { ALOGE("OMX_GetExtensionIndex %s failed", name); + return StatusFromOMXError(err); } @@ -411,6 +431,11 @@ status_t OMXNodeInstance::useBuffer( addActiveBuffer(portIndex, *buffer); + sp<GraphicBufferSource> bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL && portIndex == kPortIndexInput) { + bufferSource->addCodecBuffer(header); + } + return OK; } @@ -482,13 +507,12 @@ status_t OMXNodeInstance::useGraphicBuffer( return useGraphicBuffer2_l(portIndex, graphicBuffer, buffer); } - OMX_ERRORTYPE err = OMX_GetExtensionIndex( - mHandle, - const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"), - &index); + OMX_STRING name = const_cast<OMX_STRING>( + "OMX.google.android.index.useAndroidNativeBuffer"); + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); if (err != OMX_ErrorNone) { - ALOGE("OMX_GetExtensionIndex failed"); + ALOGE("OMX_GetExtensionIndex %s failed", name); return StatusFromOMXError(err); } @@ -530,6 +554,65 @@ status_t OMXNodeInstance::useGraphicBuffer( return OK; } +status_t OMXNodeInstance::createInputSurface( + OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer) { + Mutex::Autolock autolock(mLock); + status_t err; + + const sp<GraphicBufferSource>& surfaceCheck = getGraphicBufferSource(); + if (surfaceCheck != NULL) { + return ALREADY_EXISTS; + } + + // Input buffers will hold meta-data (gralloc references). + err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE); + if (err != OK) { + return err; + } + + // Retrieve the width and height of the graphic buffer, set when the + // codec was configured. + OMX_PARAM_PORTDEFINITIONTYPE def; + def.nSize = sizeof(def); + def.nVersion.s.nVersionMajor = 1; + def.nVersion.s.nVersionMinor = 0; + def.nVersion.s.nRevision = 0; + def.nVersion.s.nStep = 0; + def.nPortIndex = portIndex; + OMX_ERRORTYPE oerr = OMX_GetParameter( + mHandle, OMX_IndexParamPortDefinition, &def); + CHECK(oerr == OMX_ErrorNone); + + if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) { + ALOGE("createInputSurface requires AndroidOpaque color format"); + return INVALID_OPERATION; + } + + GraphicBufferSource* bufferSource = new GraphicBufferSource( + this, def.format.video.nFrameWidth, def.format.video.nFrameHeight, + def.nBufferCountActual); + if ((err = bufferSource->initCheck()) != OK) { + delete bufferSource; + return err; + } + setGraphicBufferSource(bufferSource); + + *bufferProducer = bufferSource->getIGraphicBufferProducer(); + return OK; +} + +status_t OMXNodeInstance::signalEndOfInputStream() { + // For non-Surface input, the MediaCodec should convert the call to a + // pair of requests (dequeue input buffer, queue input buffer with EOS + // flag set). Seems easier than doing the equivalent from here. + sp<GraphicBufferSource> bufferSource(getGraphicBufferSource()); + if (bufferSource == NULL) { + ALOGW("signalEndOfInputStream can only be used with Surface input"); + return INVALID_OPERATION; + }; + return bufferSource->signalEndOfInputStream(); +} + status_t OMXNodeInstance::allocateBuffer( OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer, void **buffer_data) { @@ -560,6 +643,11 @@ status_t OMXNodeInstance::allocateBuffer( addActiveBuffer(portIndex, *buffer); + sp<GraphicBufferSource> bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL && portIndex == kPortIndexInput) { + bufferSource->addCodecBuffer(header); + } + return OK; } @@ -592,6 +680,11 @@ status_t OMXNodeInstance::allocateBufferWithBackup( addActiveBuffer(portIndex, *buffer); + sp<GraphicBufferSource> bufferSource(getGraphicBufferSource()); + if (bufferSource != NULL && portIndex == kPortIndexInput) { + bufferSource->addCodecBuffer(header); + } + return OK; } @@ -646,6 +739,26 @@ status_t OMXNodeInstance::emptyBuffer( return StatusFromOMXError(err); } +// like emptyBuffer, but the data is already in header->pBuffer +status_t OMXNodeInstance::emptyDirectBuffer( + OMX_BUFFERHEADERTYPE *header, + OMX_U32 rangeOffset, OMX_U32 rangeLength, + OMX_U32 flags, OMX_TICKS timestamp) { + Mutex::Autolock autoLock(mLock); + + header->nFilledLen = rangeLength; + header->nOffset = rangeOffset; + header->nFlags = flags; + header->nTimeStamp = timestamp; + + OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header); + if (err != OMX_ErrorNone) { + ALOGW("emptyDirectBuffer failed, OMX err=0x%x", err); + } + + return StatusFromOMXError(err); +} + status_t OMXNodeInstance::getExtensionIndex( const char *parameterName, OMX_INDEXTYPE *index) { Mutex::Autolock autoLock(mLock); @@ -666,6 +779,23 @@ void OMXNodeInstance::onMessage(const omx_message &msg) { static_cast<BufferMeta *>(buffer->pAppPrivate); buffer_meta->CopyFromOMX(buffer); + } else if (msg.type == omx_message::EMPTY_BUFFER_DONE) { + const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource()); + + if (bufferSource != NULL) { + // This is one of the buffers used exclusively by + // GraphicBufferSource. + // Don't dispatch a message back to ACodec, since it doesn't + // know that anyone asked to have the buffer emptied and will + // be very confused. + + OMX_BUFFERHEADERTYPE *buffer = + static_cast<OMX_BUFFERHEADERTYPE *>( + msg.u.buffer_data.buffer); + + bufferSource->codecBufferEmptied(buffer); + return; + } } mObserver->onMessage(msg); @@ -682,6 +812,25 @@ void OMXNodeInstance::onGetHandleFailed() { delete this; } +// OMXNodeInstance::OnEvent calls OMX::OnEvent, which then calls here. +// Don't try to acquire mLock here -- in rare circumstances this will hang. +void OMXNodeInstance::onEvent( + OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) { + const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource()); + + if (bufferSource != NULL && event == OMX_EventCmdComplete && + arg1 == OMX_CommandStateSet) { + if (arg2 == OMX_StateExecuting) { + bufferSource->omxExecuting(); + } else if (arg2 == OMX_StateLoaded) { + // Must be shutting down -- won't have a GraphicBufferSource + // on the way up. + bufferSource->omxLoaded(); + setGraphicBufferSource(NULL); + } + } +} + // static OMX_ERRORTYPE OMXNodeInstance::OnEvent( OMX_IN OMX_HANDLETYPE hComponent, diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp index c79e01f..4999663 100644 --- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp +++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp @@ -450,6 +450,10 @@ void SimpleSoftOMXComponent::onChangeState(OMX_STATETYPE state) { checkTransitions(); } +void SimpleSoftOMXComponent::onReset() { + // no-op +} + void SimpleSoftOMXComponent::onPortEnable(OMX_U32 portIndex, bool enable) { CHECK_LT(portIndex, mPorts.size()); @@ -581,6 +585,10 @@ void SimpleSoftOMXComponent::checkTransitions() { if (transitionComplete) { mState = mTargetState; + if (mState == OMX_StateLoaded) { + onReset(); + } + notify(OMX_EventCmdComplete, OMX_CommandStateSet, mState, NULL); } } diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp index 3747b3b..b3fe98e 100644 --- a/media/libstagefright/omx/SoftOMXPlugin.cpp +++ b/media/libstagefright/omx/SoftOMXPlugin.cpp @@ -51,8 +51,10 @@ static const struct { { "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" }, { "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" }, { "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" }, + { "OMX.google.vpx.encoder", "vpxenc", "video_encoder.vpx" }, { "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" }, { "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" }, + { "OMX.google.gsm.decoder", "gsmdec", "audio_decoder.gsm" }, }; static const size_t kNumComponents = diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk index 04441ca..1061c39 100644 --- a/media/libstagefright/omx/tests/Android.mk +++ b/media/libstagefright/omx/tests/Android.mk @@ -5,7 +5,7 @@ LOCAL_SRC_FILES = \ OMXHarness.cpp \ LOCAL_SHARED_LIBRARIES := \ - libstagefright libbinder libmedia libutils libstagefright_foundation + libstagefright libbinder libmedia libutils liblog libstagefright_foundation LOCAL_C_INCLUDES := \ $(TOP)/frameworks/av/media/libstagefright \ diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 161bd4f..3068541 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -20,13 +20,12 @@ #include "ARTSPConnection.h" -#include <cutils/properties.h> - #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/foundation/base64.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/Utils.h> #include <arpa/inet.h> #include <fcntl.h> @@ -41,6 +40,10 @@ namespace android { // static const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll; +// static +const AString ARTSPConnection::sUserAgent = + StringPrintf("User-Agent: %s\r\n", MakeUserAgent().c_str()); + ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid) : mUIDValid(uidValid), mUID(uid), @@ -50,7 +53,6 @@ ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid) mConnectionID(0), mNextCSeq(0), mReceiveResponseEventPending(false) { - MakeUserAgent(&mUserAgent); } ARTSPConnection::~ARTSPConnection() { @@ -1032,27 +1034,12 @@ void ARTSPConnection::addAuthentication(AString *request) { #endif } -// static -void ARTSPConnection::MakeUserAgent(AString *userAgent) { - userAgent->clear(); - userAgent->setTo("User-Agent: stagefright/1.1 (Linux;Android "); - -#if (PROPERTY_VALUE_MAX < 8) -#error "PROPERTY_VALUE_MAX must be at least 8" -#endif - - char value[PROPERTY_VALUE_MAX]; - property_get("ro.build.version.release", value, "Unknown"); - userAgent->append(value); - userAgent->append(")\r\n"); -} - void ARTSPConnection::addUserAgent(AString *request) const { // Find the boundary between headers and the body. ssize_t i = request->find("\r\n\r\n"); CHECK_GE(i, 0); - request->insert(mUserAgent, i + 2); + request->insert(sUserAgent, i + 2); } } // namespace android diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h index 68f2d59..1fe9c99 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.h +++ b/media/libstagefright/rtsp/ARTSPConnection.h @@ -74,6 +74,8 @@ private: static const int64_t kSelectTimeoutUs; + static const AString sUserAgent; + bool mUIDValid; uid_t mUID; State mState; @@ -89,8 +91,6 @@ private: sp<AMessage> mObserveBinaryMessage; - AString mUserAgent; - void performDisconnect(); void onConnect(const sp<AMessage> &msg); @@ -122,8 +122,6 @@ private: static bool ParseSingleUnsignedLong( const char *from, unsigned long *x); - static void MakeUserAgent(AString *userAgent); - DISALLOW_EVIL_CONSTRUCTORS(ARTSPConnection); }; diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk index 49e2daf..9e2724d 100644 --- a/media/libstagefright/rtsp/Android.mk +++ b/media/libstagefright/rtsp/Android.mk @@ -17,6 +17,7 @@ LOCAL_SRC_FILES:= \ ARTPWriter.cpp \ ARTSPConnection.cpp \ ASessionDescription.cpp \ + SDPLoader.cpp \ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/media/libstagefright/include \ diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index b2f0e5e..e067e20 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -28,13 +28,13 @@ #include "ASessionDescription.h" #include <ctype.h> -#include <cutils/properties.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/ALooper.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/Utils.h> #include <arpa/inet.h> #include <sys/socket.h> @@ -52,20 +52,9 @@ static int64_t kStartupTimeoutUs = 10000000ll; static int64_t kDefaultKeepAliveTimeoutUs = 60000000ll; -namespace android { - -static void MakeUserAgentString(AString *s) { - s->setTo("stagefright/1.1 (Linux;Android "); - -#if (PROPERTY_VALUE_MAX < 8) -#error "PROPERTY_VALUE_MAX must be at least 8" -#endif +static int64_t kPauseDelayUs = 3000000ll; - char value[PROPERTY_VALUE_MAX]; - property_get("ro.build.version.release", value, "Unknown"); - s->append(value); - s->append(")"); -} +namespace android { static bool GetAttribute(const char *s, const char *key, AString *value) { value->clear(); @@ -129,13 +118,16 @@ struct MyHandler : public AHandler { mNumAccessUnitsReceived(0), mCheckPending(false), mCheckGeneration(0), + mCheckTimeoutGeneration(0), mTryTCPInterleaving(false), mTryFakeRTCP(false), mReceivedFirstRTCPPacket(false), mReceivedFirstRTPPacket(false), - mSeekable(false), + mSeekable(true), mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs), - mKeepAliveGeneration(0) { + mKeepAliveGeneration(0), + mPausing(false), + mPauseGeneration(0) { mNetLooper->setName("rtsp net"); mNetLooper->start(false /* runOnCallingThread */, false /* canCallJava */, @@ -173,6 +165,39 @@ struct MyHandler : public AHandler { mConn->connect(mOriginalSessionURL.c_str(), reply); } + void loadSDP(const sp<ASessionDescription>& desc) { + looper()->registerHandler(mConn); + (1 ? mNetLooper : looper())->registerHandler(mRTPConn); + + sp<AMessage> notify = new AMessage('biny', id()); + mConn->observeBinaryData(notify); + + sp<AMessage> reply = new AMessage('sdpl', id()); + reply->setObject("description", desc); + mConn->connect(mOriginalSessionURL.c_str(), reply); + } + + AString getControlURL(sp<ASessionDescription> desc) { + AString sessionLevelControlURL; + if (mSessionDesc->findAttribute( + 0, + "a=control", + &sessionLevelControlURL)) { + if (sessionLevelControlURL.compare("*") == 0) { + return mBaseURL; + } else { + AString controlURL; + CHECK(MakeURL( + mBaseURL.c_str(), + sessionLevelControlURL.c_str(), + &controlURL)); + return controlURL; + } + } else { + return mSessionURL; + } + } + void disconnect() { (new AMessage('abor', id()))->post(); } @@ -180,6 +205,24 @@ struct MyHandler : public AHandler { void seek(int64_t timeUs) { sp<AMessage> msg = new AMessage('seek', id()); msg->setInt64("time", timeUs); + mPauseGeneration++; + msg->post(); + } + + bool isSeekable() const { + return mSeekable; + } + + void pause() { + sp<AMessage> msg = new AMessage('paus', id()); + mPauseGeneration++; + msg->setInt32("pausecheck", mPauseGeneration); + msg->post(kPauseDelayUs); + } + + void resume() { + sp<AMessage> msg = new AMessage('resu', id()); + mPauseGeneration++; msg->post(); } @@ -223,8 +266,7 @@ struct MyHandler : public AHandler { data[offset++] = 6; // TOOL - AString tool; - MakeUserAgentString(&tool); + AString tool = MakeUserAgent(); data[offset++] = tool.size(); @@ -348,6 +390,39 @@ struct MyHandler : public AHandler { return true; } + static bool isLiveStream(const sp<ASessionDescription> &desc) { + AString attrLiveStream; + if (desc->findAttribute(0, "a=LiveStream", &attrLiveStream)) { + ssize_t semicolonPos = attrLiveStream.find(";", 2); + + const char* liveStreamValue; + if (semicolonPos < 0) { + liveStreamValue = attrLiveStream.c_str(); + } else { + AString valString; + valString.setTo(attrLiveStream, + semicolonPos + 1, + attrLiveStream.size() - semicolonPos - 1); + liveStreamValue = valString.c_str(); + } + + uint32_t value = strtoul(liveStreamValue, NULL, 10); + if (value == 1) { + ALOGV("found live stream"); + return true; + } + } else { + // It is a live stream if no duration is returned + int64_t durationUs; + if (!desc->getDurationUs(&durationUs)) { + ALOGV("No duration found, assume live stream"); + return true; + } + } + + return false; + } + virtual void onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case 'conn': @@ -448,6 +523,8 @@ struct MyHandler : public AHandler { } } + mSeekable = !isLiveStream(mSessionDesc); + if (!mBaseURL.startsWith("rtsp://")) { // Some misbehaving servers specify a relative // URL in one of the locations above, combine @@ -467,6 +544,8 @@ struct MyHandler : public AHandler { mBaseURL = tmp; } + mControlURL = getControlURL(mSessionDesc); + if (mSessionDesc->countTracks() < 2) { // There's no actual tracks in this session. // The first "track" is merely session meta @@ -489,6 +568,51 @@ struct MyHandler : public AHandler { break; } + case 'sdpl': + { + int32_t result; + CHECK(msg->findInt32("result", &result)); + + ALOGI("SDP connection request completed with result %d (%s)", + result, strerror(-result)); + + if (result == OK) { + sp<RefBase> obj; + CHECK(msg->findObject("description", &obj)); + mSessionDesc = + static_cast<ASessionDescription *>(obj.get()); + + if (!mSessionDesc->isValid()) { + ALOGE("Failed to parse session description."); + result = ERROR_MALFORMED; + } else { + mBaseURL = mSessionURL; + + mSeekable = !isLiveStream(mSessionDesc); + + mControlURL = getControlURL(mSessionDesc); + + if (mSessionDesc->countTracks() < 2) { + // There's no actual tracks in this session. + // The first "track" is merely session meta + // data. + + ALOGW("Session doesn't contain any playable " + "tracks. Aborting."); + result = ERROR_UNSUPPORTED; + } else { + setupTrack(1); + } + } + } + + if (result != OK) { + sp<AMessage> reply = new AMessage('disc', id()); + mConn->disconnect(reply); + } + break; + } + case 'setu': { size_t index; @@ -606,7 +730,7 @@ struct MyHandler : public AHandler { postKeepAlive(); AString request = "PLAY "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -644,6 +768,8 @@ struct MyHandler : public AHandler { parsePlayResponse(response); sp<AMessage> timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); timeout->post(kStartupTimeoutUs); } } @@ -733,7 +859,8 @@ struct MyHandler : public AHandler { mNumAccessUnitsReceived = 0; mReceivedFirstRTCPPacket = false; mReceivedFirstRTPPacket = false; - mSeekable = false; + mPausing = false; + mSeekable = true; sp<AMessage> reply = new AMessage('tear', id()); @@ -854,9 +981,16 @@ struct MyHandler : public AHandler { int32_t eos; if (msg->findInt32("eos", &eos)) { ALOGI("received BYE on track index %d", trackIndex); -#if 0 - track->mPacketSource->signalEOS(ERROR_END_OF_STREAM); -#endif + if (!mAllTracksHaveTime && dataReceivedOnAllChannels()) { + ALOGI("No time established => fake existing data"); + + track->mEOSReceived = true; + mTryFakeRTCP = true; + mReceivedFirstRTCPPacket = true; + fakeTimestamps(); + } else { + postQueueEOS(trackIndex, ERROR_END_OF_STREAM); + } return; } @@ -884,6 +1018,115 @@ struct MyHandler : public AHandler { break; } + case 'paus': + { + int32_t generation; + CHECK(msg->findInt32("pausecheck", &generation)); + if (generation != mPauseGeneration) { + ALOGV("Ignoring outdated pause message."); + break; + } + + if (!mSeekable) { + ALOGW("This is a live stream, ignoring pause request."); + break; + } + mCheckPending = true; + ++mCheckGeneration; + mPausing = true; + + AString request = "PAUSE "; + request.append(mControlURL); + request.append(" RTSP/1.0\r\n"); + + request.append("Session: "); + request.append(mSessionID); + request.append("\r\n"); + + request.append("\r\n"); + + sp<AMessage> reply = new AMessage('pau2', id()); + mConn->sendRequest(request.c_str(), reply); + break; + } + + case 'pau2': + { + int32_t result; + CHECK(msg->findInt32("result", &result)); + mCheckTimeoutGeneration++; + + ALOGI("PAUSE completed with result %d (%s)", + result, strerror(-result)); + break; + } + + case 'resu': + { + if (mPausing && mSeekPending) { + // If seeking, Play will be sent from see1 instead + break; + } + + if (!mPausing) { + // Dont send PLAY if we have not paused + break; + } + AString request = "PLAY "; + request.append(mControlURL); + request.append(" RTSP/1.0\r\n"); + + request.append("Session: "); + request.append(mSessionID); + request.append("\r\n"); + + request.append("\r\n"); + + sp<AMessage> reply = new AMessage('res2', id()); + mConn->sendRequest(request.c_str(), reply); + break; + } + + case 'res2': + { + int32_t result; + CHECK(msg->findInt32("result", &result)); + + ALOGI("PLAY completed with result %d (%s)", + result, strerror(-result)); + + mCheckPending = false; + postAccessUnitTimeoutCheck(); + + if (result == OK) { + sp<RefBase> obj; + CHECK(msg->findObject("response", &obj)); + sp<ARTSPResponse> response = + static_cast<ARTSPResponse *>(obj.get()); + + if (response->mStatusCode != 200) { + result = UNKNOWN_ERROR; + } else { + parsePlayResponse(response); + + // Post new timeout in order to make sure to use + // fake timestamps if no new Sender Reports arrive + sp<AMessage> timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); + timeout->post(kStartupTimeoutUs); + } + } + + if (result != OK) { + ALOGE("resume failed, aborting."); + (new AMessage('abor', id()))->post(); + } + + mPausing = false; + break; + } + case 'seek': { if (!mSeekable) { @@ -905,8 +1148,17 @@ struct MyHandler : public AHandler { mCheckPending = true; ++mCheckGeneration; + sp<AMessage> reply = new AMessage('see1', id()); + reply->setInt64("time", timeUs); + + if (mPausing) { + // PAUSE already sent + ALOGI("Pause already sent"); + reply->post(); + break; + } AString request = "PAUSE "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -915,8 +1167,6 @@ struct MyHandler : public AHandler { request.append("\r\n"); - sp<AMessage> reply = new AMessage('see1', id()); - reply->setInt64("time", timeUs); mConn->sendRequest(request.c_str(), reply); break; } @@ -928,6 +1178,7 @@ struct MyHandler : public AHandler { TrackInfo *info = &mTracks.editItemAt(i); postQueueSeekDiscontinuity(i); + info->mEOSReceived = false; info->mRTPAnchor = 0; info->mNTPAnchorUs = -1; @@ -936,11 +1187,18 @@ struct MyHandler : public AHandler { mAllTracksHaveTime = false; mNTPAnchorUs = -1; + // Start new timeoutgeneration to avoid getting timeout + // before PLAY response arrive + sp<AMessage> timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); + timeout->post(kStartupTimeoutUs); + int64_t timeUs; CHECK(msg->findInt64("time", &timeUs)); AString request = "PLAY "; - request.append(mSessionURL); + request.append(mControlURL); request.append(" RTSP/1.0\r\n"); request.append("Session: "); @@ -960,7 +1218,10 @@ struct MyHandler : public AHandler { case 'see2': { - CHECK(mSeekPending); + if (mTracks.size() == 0) { + // We have already hit abor, break + break; + } int32_t result; CHECK(msg->findInt32("result", &result)); @@ -982,6 +1243,13 @@ struct MyHandler : public AHandler { } else { parsePlayResponse(response); + // Post new timeout in order to make sure to use + // fake timestamps if no new Sender Reports arrive + sp<AMessage> timeout = new AMessage('tiou', id()); + mCheckTimeoutGeneration++; + timeout->setInt32("tioucheck", mCheckTimeoutGeneration); + timeout->post(kStartupTimeoutUs); + ssize_t i = response->mHeaders.indexOfKey("rtp-info"); CHECK_GE(i, 0); @@ -996,6 +1264,7 @@ struct MyHandler : public AHandler { (new AMessage('abor', id()))->post(); } + mPausing = false; mSeekPending = false; sp<AMessage> msg = mNotify->dup(); @@ -1018,8 +1287,17 @@ struct MyHandler : public AHandler { case 'tiou': { + int32_t timeoutGenerationCheck; + CHECK(msg->findInt32("tioucheck", &timeoutGenerationCheck)); + if (timeoutGenerationCheck != mCheckTimeoutGeneration) { + // This is an outdated message. Ignore. + // This typically happens if a lot of seeks are + // performed, since new timeout messages now are + // posted at seek as well. + break; + } if (!mReceivedFirstRTCPPacket) { - if (mReceivedFirstRTPPacket && !mTryFakeRTCP) { + if (dataReceivedOnAllChannels() && !mTryFakeRTCP) { ALOGW("We received RTP packets but no RTCP packets, " "using fake timestamps."); @@ -1093,7 +1371,6 @@ struct MyHandler : public AHandler { } void parsePlayResponse(const sp<ARTSPResponse> &response) { - mSeekable = false; if (mTracks.size() == 0) { ALOGV("parsePlayResponse: late packets ignored."); return; @@ -1168,8 +1445,6 @@ struct MyHandler : public AHandler { ++n; } - - mSeekable = true; } sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale) { @@ -1199,6 +1474,7 @@ private: uint32_t mRTPAnchor; int64_t mNTPAnchorUs; int32_t mTimeScale; + bool mEOSReceived; uint32_t mNormalPlayTimeRTP; int64_t mNormalPlayTimeUs; @@ -1221,6 +1497,7 @@ private: AString mSessionURL; AString mSessionHost; AString mBaseURL; + AString mControlURL; AString mSessionID; bool mSetupTracksSuccessful; bool mSeekPending; @@ -1234,6 +1511,7 @@ private: int64_t mNumAccessUnitsReceived; bool mCheckPending; int32_t mCheckGeneration; + int32_t mCheckTimeoutGeneration; bool mTryTCPInterleaving; bool mTryFakeRTCP; bool mReceivedFirstRTCPPacket; @@ -1241,6 +1519,8 @@ private: bool mSeekable; int64_t mKeepAliveTimeoutUs; int32_t mKeepAliveGeneration; + bool mPausing; + int32_t mPauseGeneration; Vector<TrackInfo> mTracks; @@ -1287,6 +1567,7 @@ private: formatDesc.c_str(), ×cale, &numChannels); info->mTimeScale = timescale; + info->mEOSReceived = false; ALOGV("track #%d URL=%s", mTracks.size(), trackURL.c_str()); @@ -1379,6 +1660,17 @@ private: } } + bool dataReceivedOnAllChannels() { + TrackInfo *track; + for (size_t i = 0; i < mTracks.size(); ++i) { + track = &mTracks.editItemAt(i); + if (track->mPackets.empty()) { + return false; + } + } + return true; + } + void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) { ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx", trackIndex, rtpTime, ntpTime); @@ -1409,6 +1701,27 @@ private: ALOGI("Time now established for all tracks."); } } + if (mAllTracksHaveTime && dataReceivedOnAllChannels()) { + // Time is now established, lets start timestamping immediately + for (size_t i = 0; i < mTracks.size(); ++i) { + TrackInfo *trackInfo = &mTracks.editItemAt(i); + while (!trackInfo->mPackets.empty()) { + sp<ABuffer> accessUnit = *trackInfo->mPackets.begin(); + trackInfo->mPackets.erase(trackInfo->mPackets.begin()); + + if (addMediaTimestamp(i, trackInfo, accessUnit)) { + postQueueAccessUnit(i, accessUnit); + } + } + } + for (size_t i = 0; i < mTracks.size(); ++i) { + TrackInfo *trackInfo = &mTracks.editItemAt(i); + if (trackInfo->mEOSReceived) { + postQueueEOS(i, ERROR_END_OF_STREAM); + trackInfo->mEOSReceived = false; + } + } + } } void onAccessUnitComplete( @@ -1453,6 +1766,11 @@ private: if (addMediaTimestamp(trackIndex, track, accessUnit)) { postQueueAccessUnit(trackIndex, accessUnit); } + + if (track->mEOSReceived) { + postQueueEOS(trackIndex, ERROR_END_OF_STREAM); + track->mEOSReceived = false; + } } bool addMediaTimestamp( diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp new file mode 100644 index 0000000..ed3fa7e --- /dev/null +++ b/media/libstagefright/rtsp/SDPLoader.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SDPLoader" +#include <utils/Log.h> + +#include "SDPLoader.h" + +#include "ASessionDescription.h" +#include "HTTPBase.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> + +#define DEFAULT_SDP_SIZE 100000 + +namespace android { + +SDPLoader::SDPLoader(const sp<AMessage> ¬ify, uint32_t flags, bool uidValid, uid_t uid) + : mNotify(notify), + mFlags(flags), + mUIDValid(uidValid), + mUID(uid), + mNetLooper(new ALooper), + mCancelled(false), + mHTTPDataSource( + HTTPBase::Create( + (mFlags & kFlagIncognito) + ? HTTPBase::kFlagIncognito + : 0)) { + if (mUIDValid) { + mHTTPDataSource->setUID(mUID); + } + + mNetLooper->setName("sdp net"); + mNetLooper->start(false /* runOnCallingThread */, + false /* canCallJava */, + PRIORITY_HIGHEST); +} + +void SDPLoader::load(const char *url, const KeyedVector<String8, String8> *headers) { + mNetLooper->registerHandler(this); + + sp<AMessage> msg = new AMessage(kWhatLoad, id()); + msg->setString("url", url); + + if (headers != NULL) { + msg->setPointer( + "headers", + new KeyedVector<String8, String8>(*headers)); + } + + msg->post(); +} + +void SDPLoader::cancel() { + mCancelled = true; + sp<HTTPBase> HTTPDataSource = mHTTPDataSource; + HTTPDataSource->disconnect(); +} + +void SDPLoader::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatLoad: + onLoad(msg); + break; + + default: + TRESPASS(); + break; + } +} + +void SDPLoader::onLoad(const sp<AMessage> &msg) { + status_t err = OK; + sp<ASessionDescription> desc = NULL; + AString url; + CHECK(msg->findString("url", &url)); + + KeyedVector<String8, String8> *headers = NULL; + msg->findPointer("headers", (void **)&headers); + + if (!(mFlags & kFlagIncognito)) { + ALOGI("onLoad '%s'", url.c_str()); + } else { + ALOGI("onLoad <URL suppressed>"); + } + + if (!mCancelled) { + err = mHTTPDataSource->connect(url.c_str(), headers); + + if (err != OK) { + ALOGE("connect() returned %d", err); + } + } + + if (headers != NULL) { + delete headers; + headers = NULL; + } + + off64_t sdpSize; + if (err == OK && !mCancelled) { + err = mHTTPDataSource->getSize(&sdpSize); + + if (err != OK) { + //We did not get the size of the sdp file, default to a large value + sdpSize = DEFAULT_SDP_SIZE; + err = OK; + } + } + + sp<ABuffer> buffer = new ABuffer(sdpSize); + + if (err == OK && !mCancelled) { + ssize_t readSize = mHTTPDataSource->readAt(0, buffer->data(), sdpSize); + + if (readSize < 0) { + ALOGE("Failed to read SDP, error code = %ld", readSize); + err = UNKNOWN_ERROR; + } else { + desc = new ASessionDescription; + + if (desc == NULL || !desc->setTo(buffer->data(), (size_t)readSize)) { + err = UNKNOWN_ERROR; + ALOGE("Failed to parse SDP"); + } + } + } + + mHTTPDataSource.clear(); + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatSDPLoaded); + notify->setInt32("result", err); + notify->setObject("description", desc); + notify->post(); +} + +} // namespace android diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk index 57fff0b..06ce16b 100644 --- a/media/libstagefright/tests/Android.mk +++ b/media/libstagefright/tests/Android.mk @@ -26,6 +26,7 @@ LOCAL_SHARED_LIBRARIES := \ libsync \ libui \ libutils \ + liblog LOCAL_STATIC_LIBRARIES := \ libgtest \ diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp index a61d6a2..a5459fe 100644 --- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp +++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp @@ -27,7 +27,7 @@ #include <media/mediarecorder.h> #include <ui/GraphicBuffer.h> -#include <gui/SurfaceTextureClient.h> +#include <gui/Surface.h> #include <gui/ISurfaceComposer.h> #include <gui/Surface.h> #include <gui/SurfaceComposerClient.h> @@ -107,9 +107,9 @@ protected: window.get(), NULL); } else { ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource"); - sp<ISurfaceTexture> sms = (new SurfaceMediaSource( + sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource( getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue(); - sp<SurfaceTextureClient> stc = new SurfaceTextureClient(sms); + sp<Surface> stc = new Surface(sms); sp<ANativeWindow> window = stc; mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig, @@ -361,7 +361,7 @@ protected: mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); // Manual cast is required to avoid constructor ambiguity - mSTC = new SurfaceTextureClient(static_cast<sp<ISurfaceTexture> >( mSMS->getBufferQueue())); + mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue())); mANW = mSTC; } @@ -375,7 +375,7 @@ protected: const int mYuvTexHeight; sp<SurfaceMediaSource> mSMS; - sp<SurfaceTextureClient> mSTC; + sp<Surface> mSTC; sp<ANativeWindow> mANW; }; @@ -396,7 +396,7 @@ protected: ALOGV("SMS-GLTest::SetUp()"); android::ProcessState::self()->startThreadPool(); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new SurfaceTextureClient(static_cast<sp<ISurfaceTexture> >( mSMS->getBufferQueue())); + mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue())); mANW = mSTC; // Doing the setup related to the GL Side @@ -416,7 +416,7 @@ protected: const int mYuvTexHeight; sp<SurfaceMediaSource> mSMS; - sp<SurfaceTextureClient> mSTC; + sp<Surface> mSTC; sp<ANativeWindow> mANW; }; @@ -482,8 +482,8 @@ sp<MediaRecorder> SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int video // query the mediarecorder for a surfacemeidasource and create an egl surface with that void SurfaceMediaSourceGLTest::setUpEGLSurfaceFromMediaRecorder(sp<MediaRecorder>& mr) { - sp<ISurfaceTexture> iST = mr->querySurfaceMediaSourceFromMediaServer(); - mSTC = new SurfaceTextureClient(iST); + sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer(); + mSTC = new Surface(iST); mANW = mSTC; if (mEglSurface != EGL_NO_SURFACE) { @@ -749,8 +749,8 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS mYuvTexHeight, 30); // get the reference to the surfacemediasource living in // mediaserver that is created by stagefrightrecorder - sp<ISurfaceTexture> iST = mr->querySurfaceMediaSourceFromMediaServer(); - mSTC = new SurfaceTextureClient(iST); + sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer(); + mSTC = new Surface(iST); mANW = mSTC; ASSERT_EQ(NO_ERROR, native_window_api_connect(mANW.get(), NATIVE_WINDOW_API_CPU)); ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(), @@ -781,7 +781,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) { ALOGV("Verify creating a surface w/ right config + dummy writer*********"); mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight); - mSTC = new SurfaceTextureClient(static_cast<sp<ISurfaceTexture> >( mSMS->getBufferQueue())); + mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue())); mANW = mSTC; DummyRecorder writer(mSMS); diff --git a/media/libstagefright/timedtext/TimedTextSRTSource.cpp b/media/libstagefright/timedtext/TimedTextSRTSource.cpp index eac23ba..2ac1e72 100644 --- a/media/libstagefright/timedtext/TimedTextSRTSource.cpp +++ b/media/libstagefright/timedtext/TimedTextSRTSource.cpp @@ -36,6 +36,9 @@ TimedTextSRTSource::TimedTextSRTSource(const sp<DataSource>& dataSource) : mSource(dataSource), mMetaData(new MetaData), mIndex(0) { + // TODO: Need to detect the language, because SRT doesn't give language + // information explicitly. + mMetaData->setCString(kKeyMediaLanguage, "und"); } TimedTextSRTSource::~TimedTextSRTSource() { @@ -46,14 +49,10 @@ status_t TimedTextSRTSource::start() { if (err != OK) { reset(); } - // TODO: Need to detect the language, because SRT doesn't give language - // information explicitly. - mMetaData->setCString(kKeyMediaLanguage, ""); return err; } void TimedTextSRTSource::reset() { - mMetaData->clear(); mTextVector.clear(); mIndex = 0; } diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp index 62a6e7f..938d601 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.cpp +++ b/media/libstagefright/wifi-display/ANetworkSession.cpp @@ -23,9 +23,11 @@ #include <arpa/inet.h> #include <fcntl.h> +#include <linux/tcp.h> #include <net/if.h> #include <netdb.h> #include <netinet/in.h> +#include <sys/ioctl.h> #include <sys/socket.h> #include <media/stagefright/foundation/ABuffer.h> @@ -37,6 +39,7 @@ namespace android { static const size_t kMaxUDPSize = 1500; +static const int32_t kMaxUDPRetries = 200; struct ANetworkSession::NetworkThread : public Thread { NetworkThread(ANetworkSession *session); @@ -79,7 +82,8 @@ struct ANetworkSession::Session : public RefBase { status_t readMore(); status_t writeMore(); - status_t sendRequest(const void *data, ssize_t size); + status_t sendRequest( + const void *data, ssize_t size, bool timeValid, int64_t timeUs); void setIsRTSPConnection(bool yesno); @@ -87,24 +91,34 @@ protected: virtual ~Session(); private: + enum { + FRAGMENT_FLAG_TIME_VALID = 1, + }; + struct Fragment { + uint32_t mFlags; + int64_t mTimeUs; + sp<ABuffer> mBuffer; + }; + int32_t mSessionID; State mState; bool mIsRTSPConnection; int mSocket; sp<AMessage> mNotify; bool mSawReceiveFailure, mSawSendFailure; + int32_t mUDPRetries; - // for TCP / stream data - AString mOutBuffer; - - // for UDP / datagrams - List<sp<ABuffer> > mOutDatagrams; + List<Fragment> mOutFragments; AString mInBuffer; + int64_t mLastStallReportUs; + void notifyError(bool send, status_t err, const char *detail); void notify(NotificationReason reason); + void dumpFragmentStats(const Fragment &frag); + DISALLOW_EVIL_CONSTRUCTORS(Session); }; //////////////////////////////////////////////////////////////////////////////// @@ -135,7 +149,9 @@ ANetworkSession::Session::Session( mSocket(s), mNotify(notify), mSawReceiveFailure(false), - mSawSendFailure(false) { + mSawSendFailure(false), + mUDPRetries(kMaxUDPRetries), + mLastStallReportUs(-1ll) { if (mState == CONNECTED) { struct sockaddr_in localAddr; socklen_t localAddrLen = sizeof(localAddr); @@ -216,8 +232,8 @@ bool ANetworkSession::Session::wantsToRead() { bool ANetworkSession::Session::wantsToWrite() { return !mSawSendFailure && (mState == CONNECTING - || (mState == CONNECTED && !mOutBuffer.empty()) - || (mState == DATAGRAM && !mOutDatagrams.empty())); + || (mState == CONNECTED && !mOutFragments.empty()) + || (mState == DATAGRAM && !mOutFragments.empty())); } status_t ANetworkSession::Session::readMore() { @@ -273,8 +289,17 @@ status_t ANetworkSession::Session::readMore() { } if (err != OK) { - notifyError(false /* send */, err, "Recvfrom failed."); - mSawReceiveFailure = true; + if (!mUDPRetries) { + notifyError(false /* send */, err, "Recvfrom failed."); + mSawReceiveFailure = true; + } else { + mUDPRetries--; + ALOGE("Recvfrom failed, %d/%d retries left", + mUDPRetries, kMaxUDPRetries); + err = OK; + } + } else { + mUDPRetries = kMaxUDPRetries; } return err; @@ -314,6 +339,9 @@ status_t ANetworkSession::Session::readMore() { sp<ABuffer> packet = new ABuffer(packetSize); memcpy(packet->data(), mInBuffer.c_str() + 2, packetSize); + int64_t nowUs = ALooper::GetNowUs(); + packet->meta()->setInt64("arrivalTimeUs", nowUs); + sp<AMessage> notify = mNotify->dup(); notify->setInt32("sessionID", mSessionID); notify->setInt32("reason", kWhatDatagram); @@ -399,31 +427,41 @@ status_t ANetworkSession::Session::readMore() { return err; } -status_t ANetworkSession::Session::writeMore() { - if (mState == DATAGRAM) { - CHECK(!mOutDatagrams.empty()); +void ANetworkSession::Session::dumpFragmentStats(const Fragment &frag) { +#if 0 + int64_t nowUs = ALooper::GetNowUs(); + int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll; - status_t err; - do { - const sp<ABuffer> &datagram = *mOutDatagrams.begin(); + static const int64_t kMinDelayMs = 0; + static const int64_t kMaxDelayMs = 300; - uint8_t *data = datagram->data(); - if (data[0] == 0x80 && (data[1] & 0x7f) == 33) { - int64_t nowUs = ALooper::GetNowUs(); + const char *kPattern = "########################################"; + size_t kPatternSize = strlen(kPattern); - uint32_t prevRtpTime = U32_AT(&data[4]); + int n = (kPatternSize * (delayMs - kMinDelayMs)) + / (kMaxDelayMs - kMinDelayMs); - // 90kHz time scale - uint32_t rtpTime = (nowUs * 9ll) / 100ll; - int32_t diffTime = (int32_t)rtpTime - (int32_t)prevRtpTime; + if (n < 0) { + n = 0; + } else if ((size_t)n > kPatternSize) { + n = kPatternSize; + } - ALOGV("correcting rtpTime by %.0f ms", diffTime / 90.0); + ALOGI("[%lld]: (%4lld ms) %s\n", + frag.mTimeUs / 1000, + delayMs, + kPattern + kPatternSize - n); +#endif +} - data[4] = rtpTime >> 24; - data[5] = (rtpTime >> 16) & 0xff; - data[6] = (rtpTime >> 8) & 0xff; - data[7] = rtpTime & 0xff; - } +status_t ANetworkSession::Session::writeMore() { + if (mState == DATAGRAM) { + CHECK(!mOutFragments.empty()); + + status_t err; + do { + const Fragment &frag = *mOutFragments.begin(); + const sp<ABuffer> &datagram = frag.mBuffer; int n; do { @@ -433,24 +471,37 @@ status_t ANetworkSession::Session::writeMore() { err = OK; if (n > 0) { - mOutDatagrams.erase(mOutDatagrams.begin()); + if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { + dumpFragmentStats(frag); + } + + mOutFragments.erase(mOutFragments.begin()); } else if (n < 0) { err = -errno; } else if (n == 0) { err = -ECONNRESET; } - } while (err == OK && !mOutDatagrams.empty()); + } while (err == OK && !mOutFragments.empty()); if (err == -EAGAIN) { - if (!mOutDatagrams.empty()) { - ALOGI("%d datagrams remain queued.", mOutDatagrams.size()); + if (!mOutFragments.empty()) { + ALOGI("%d datagrams remain queued.", mOutFragments.size()); } err = OK; } if (err != OK) { - notifyError(true /* send */, err, "Send datagram failed."); - mSawSendFailure = true; + if (!mUDPRetries) { + notifyError(true /* send */, err, "Send datagram failed."); + mSawSendFailure = true; + } else { + mUDPRetries--; + ALOGE("Send datagram failed, %d/%d retries left", + mUDPRetries, kMaxUDPRetries); + err = OK; + } + } else { + mUDPRetries = kMaxUDPRetries; } return err; @@ -476,23 +527,37 @@ status_t ANetworkSession::Session::writeMore() { } CHECK_EQ(mState, CONNECTED); - CHECK(!mOutBuffer.empty()); + CHECK(!mOutFragments.empty()); ssize_t n; - do { - n = send(mSocket, mOutBuffer.c_str(), mOutBuffer.size(), 0); - } while (n < 0 && errno == EINTR); + while (!mOutFragments.empty()) { + const Fragment &frag = *mOutFragments.begin(); - status_t err = OK; + do { + n = send(mSocket, frag.mBuffer->data(), frag.mBuffer->size(), 0); + } while (n < 0 && errno == EINTR); - if (n > 0) { -#if 0 - ALOGI("out:"); - hexdump(mOutBuffer.c_str(), n); -#endif + if (n <= 0) { + break; + } - mOutBuffer.erase(0, n); - } else if (n < 0) { + frag.mBuffer->setRange( + frag.mBuffer->offset() + n, frag.mBuffer->size() - n); + + if (frag.mBuffer->size() > 0) { + break; + } + + if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) { + dumpFragmentStats(frag); + } + + mOutFragments.erase(mOutFragments.begin()); + } + + status_t err = OK; + + if (n < 0) { err = -errno; } else if (n == 0) { err = -ECONNRESET; @@ -503,35 +568,69 @@ status_t ANetworkSession::Session::writeMore() { mSawSendFailure = true; } +#if 0 + int numBytesQueued; + int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued); + if (res == 0 && numBytesQueued > 50 * 1024) { + if (numBytesQueued > 409600) { + ALOGW("!!! numBytesQueued = %d", numBytesQueued); + } + + int64_t nowUs = ALooper::GetNowUs(); + + if (mLastStallReportUs < 0ll + || nowUs > mLastStallReportUs + 100000ll) { + sp<AMessage> msg = mNotify->dup(); + msg->setInt32("sessionID", mSessionID); + msg->setInt32("reason", kWhatNetworkStall); + msg->setSize("numBytesQueued", numBytesQueued); + msg->post(); + + mLastStallReportUs = nowUs; + } + } +#endif + return err; } -status_t ANetworkSession::Session::sendRequest(const void *data, ssize_t size) { +status_t ANetworkSession::Session::sendRequest( + const void *data, ssize_t size, bool timeValid, int64_t timeUs) { CHECK(mState == CONNECTED || mState == DATAGRAM); - if (mState == DATAGRAM) { - CHECK_GE(size, 0); - - sp<ABuffer> datagram = new ABuffer(size); - memcpy(datagram->data(), data, size); + if (size < 0) { + size = strlen((const char *)data); + } - mOutDatagrams.push_back(datagram); + if (size == 0) { return OK; } + sp<ABuffer> buffer; + if (mState == CONNECTED && !mIsRTSPConnection) { CHECK_LE(size, 65535); - uint8_t prefix[2]; - prefix[0] = size >> 8; - prefix[1] = size & 0xff; + buffer = new ABuffer(size + 2); + buffer->data()[0] = size >> 8; + buffer->data()[1] = size & 0xff; + memcpy(buffer->data() + 2, data, size); + } else { + buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + } + + Fragment frag; - mOutBuffer.append((const char *)prefix, sizeof(prefix)); + frag.mFlags = 0; + if (timeValid) { + frag.mFlags = FRAGMENT_FLAG_TIME_VALID; + frag.mTimeUs = timeUs; } - mOutBuffer.append( - (const char *)data, - (size >= 0) ? size : strlen((const char *)data)); + frag.mBuffer = buffer; + + mOutFragments.push_back(frag); return OK; } @@ -770,6 +869,22 @@ status_t ANetworkSession::createClientOrServer( err = -errno; goto bail2; } + } else if (mode == kModeCreateTCPDatagramSessionActive) { + int flag = 1; + res = setsockopt(s, IPPROTO_TCP, TCP_NODELAY, &flag, sizeof(flag)); + + if (res < 0) { + err = -errno; + goto bail2; + } + + int tos = 224; // VOICE + res = setsockopt(s, IPPROTO_IP, IP_TOS, &tos, sizeof(tos)); + + if (res < 0) { + err = -errno; + goto bail2; + } } err = MakeSocketNonBlocking(s); @@ -946,7 +1061,8 @@ status_t ANetworkSession::connectUDPSession( } status_t ANetworkSession::sendRequest( - int32_t sessionID, const void *data, ssize_t size) { + int32_t sessionID, const void *data, ssize_t size, + bool timeValid, int64_t timeUs) { Mutex::Autolock autoLock(mLock); ssize_t index = mSessions.indexOfKey(sessionID); @@ -957,7 +1073,7 @@ status_t ANetworkSession::sendRequest( const sp<Session> session = mSessions.valueAt(index); - status_t err = session->sendRequest(data, size); + status_t err = session->sendRequest(data, size, timeValid, timeUs); interrupt(); @@ -1091,7 +1207,6 @@ void ANetworkSession::threadLoop() { clientSocket); sp<Session> clientSession = - // using socket sd as sessionID new Session( mNextSessionID++, Session::CONNECTED, diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h index c1acdcc..7c62b29 100644 --- a/media/libstagefright/wifi-display/ANetworkSession.h +++ b/media/libstagefright/wifi-display/ANetworkSession.h @@ -74,7 +74,8 @@ struct ANetworkSession : public RefBase { status_t destroySession(int32_t sessionID); status_t sendRequest( - int32_t sessionID, const void *data, ssize_t size = -1); + int32_t sessionID, const void *data, ssize_t size = -1, + bool timeValid = false, int64_t timeUs = -1ll); enum NotificationReason { kWhatError, @@ -83,6 +84,7 @@ struct ANetworkSession : public RefBase { kWhatData, kWhatDatagram, kWhatBinaryData, + kWhatNetworkStall, }; protected: diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index 75098f1..061ae89 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -4,20 +4,17 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ ANetworkSession.cpp \ + MediaSender.cpp \ Parameters.cpp \ ParsedMessage.cpp \ - sink/LinearRegression.cpp \ - sink/RTPSink.cpp \ - sink/TunnelRenderer.cpp \ - sink/WifiDisplaySink.cpp \ + rtp/RTPSender.cpp \ source/Converter.cpp \ source/MediaPuller.cpp \ source/PlaybackSession.cpp \ source/RepeaterSource.cpp \ - source/Sender.cpp \ source/TSPacketizer.cpp \ source/WifiDisplaySource.cpp \ - TimeSeries.cpp \ + VideoFormats.cpp \ LOCAL_C_INCLUDES:= \ $(TOP)/frameworks/av/media/libstagefright \ @@ -27,6 +24,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES:= \ libbinder \ libcutils \ + liblog \ libgui \ libmedia \ libstagefright \ @@ -55,31 +53,10 @@ LOCAL_SHARED_LIBRARIES:= \ libstagefright_foundation \ libstagefright_wfd \ libutils \ + liblog \ LOCAL_MODULE:= wfd LOCAL_MODULE_TAGS := debug include $(BUILD_EXECUTABLE) - -################################################################################ - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES:= \ - udptest.cpp \ - -LOCAL_SHARED_LIBRARIES:= \ - libbinder \ - libgui \ - libmedia \ - libstagefright \ - libstagefright_foundation \ - libstagefright_wfd \ - libutils \ - -LOCAL_MODULE:= udptest - -LOCAL_MODULE_TAGS := debug - -include $(BUILD_EXECUTABLE) diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp new file mode 100644 index 0000000..8a3566f --- /dev/null +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -0,0 +1,474 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaSender" +#include <utils/Log.h> + +#include "MediaSender.h" + +#include "ANetworkSession.h" +#include "rtp/RTPSender.h" +#include "source/TSPacketizer.h" + +#include "include/avc_utils.h" + +#include <media/IHDCP.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +namespace android { + +MediaSender::MediaSender( + const sp<ANetworkSession> &netSession, + const sp<AMessage> ¬ify) + : mNetSession(netSession), + mNotify(notify), + mMode(MODE_UNDEFINED), + mGeneration(0), + mPrevTimeUs(-1ll), + mInitDoneCount(0), + mLogFile(NULL) { + // mLogFile = fopen("/data/misc/log.ts", "wb"); +} + +MediaSender::~MediaSender() { + if (mLogFile != NULL) { + fclose(mLogFile); + mLogFile = NULL; + } +} + +status_t MediaSender::setHDCP(const sp<IHDCP> &hdcp) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + mHDCP = hdcp; + + return OK; +} + +ssize_t MediaSender::addTrack(const sp<AMessage> &format, uint32_t flags) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + TrackInfo info; + info.mFormat = format; + info.mFlags = flags; + info.mPacketizerTrackIndex = -1; + + AString mime; + CHECK(format->findString("mime", &mime)); + info.mIsAudio = !strncasecmp("audio/", mime.c_str(), 6); + + size_t index = mTrackInfos.size(); + mTrackInfos.push_back(info); + + return index; +} + +status_t MediaSender::initAsync( + ssize_t trackIndex, + const char *remoteHost, + int32_t remoteRTPPort, + RTPSender::TransportMode rtpMode, + int32_t remoteRTCPPort, + RTPSender::TransportMode rtcpMode, + int32_t *localRTPPort) { + if (trackIndex < 0) { + if (mMode != MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + uint32_t flags = 0; + if (mHDCP != NULL) { + // XXX Determine proper HDCP version. + flags |= TSPacketizer::EMIT_HDCP20_DESCRIPTOR; + } + mTSPacketizer = new TSPacketizer(flags); + + status_t err = OK; + for (size_t i = 0; i < mTrackInfos.size(); ++i) { + TrackInfo *info = &mTrackInfos.editItemAt(i); + + ssize_t packetizerTrackIndex = + mTSPacketizer->addTrack(info->mFormat); + + if (packetizerTrackIndex < 0) { + err = packetizerTrackIndex; + break; + } + + info->mPacketizerTrackIndex = packetizerTrackIndex; + } + + if (err == OK) { + sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); + notify->setInt32("generation", mGeneration); + mTSSender = new RTPSender(mNetSession, notify); + looper()->registerHandler(mTSSender); + + err = mTSSender->initAsync( + remoteHost, + remoteRTPPort, + rtpMode, + remoteRTCPPort, + rtcpMode, + localRTPPort); + + if (err != OK) { + looper()->unregisterHandler(mTSSender->id()); + mTSSender.clear(); + } + } + + if (err != OK) { + for (size_t i = 0; i < mTrackInfos.size(); ++i) { + TrackInfo *info = &mTrackInfos.editItemAt(i); + info->mPacketizerTrackIndex = -1; + } + + mTSPacketizer.clear(); + return err; + } + + mMode = MODE_TRANSPORT_STREAM; + mInitDoneCount = 1; + + return OK; + } + + if (mMode == MODE_TRANSPORT_STREAM) { + return INVALID_OPERATION; + } + + if ((size_t)trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + + if (info->mSender != NULL) { + return INVALID_OPERATION; + } + + sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); + notify->setInt32("generation", mGeneration); + notify->setSize("trackIndex", trackIndex); + + info->mSender = new RTPSender(mNetSession, notify); + looper()->registerHandler(info->mSender); + + status_t err = info->mSender->initAsync( + remoteHost, + remoteRTPPort, + rtpMode, + remoteRTCPPort, + rtcpMode, + localRTPPort); + + if (err != OK) { + looper()->unregisterHandler(info->mSender->id()); + info->mSender.clear(); + + return err; + } + + if (mMode == MODE_UNDEFINED) { + mInitDoneCount = mTrackInfos.size(); + } + + mMode = MODE_ELEMENTARY_STREAMS; + + return OK; +} + +status_t MediaSender::queueAccessUnit( + size_t trackIndex, const sp<ABuffer> &accessUnit) { + if (mMode == MODE_UNDEFINED) { + return INVALID_OPERATION; + } + + if (trackIndex >= mTrackInfos.size()) { + return -ERANGE; + } + + if (mMode == MODE_TRANSPORT_STREAM) { + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + info->mAccessUnits.push_back(accessUnit); + + mTSPacketizer->extractCSDIfNecessary(info->mPacketizerTrackIndex); + + for (;;) { + ssize_t minTrackIndex = -1; + int64_t minTimeUs = -1ll; + + for (size_t i = 0; i < mTrackInfos.size(); ++i) { + const TrackInfo &info = mTrackInfos.itemAt(i); + + if (info.mAccessUnits.empty()) { + minTrackIndex = -1; + minTimeUs = -1ll; + break; + } + + int64_t timeUs; + const sp<ABuffer> &accessUnit = *info.mAccessUnits.begin(); + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + if (minTrackIndex < 0 || timeUs < minTimeUs) { + minTrackIndex = i; + minTimeUs = timeUs; + } + } + + if (minTrackIndex < 0) { + return OK; + } + + TrackInfo *info = &mTrackInfos.editItemAt(minTrackIndex); + sp<ABuffer> accessUnit = *info->mAccessUnits.begin(); + info->mAccessUnits.erase(info->mAccessUnits.begin()); + + sp<ABuffer> tsPackets; + status_t err = packetizeAccessUnit( + minTrackIndex, accessUnit, &tsPackets); + + if (err == OK) { + if (mLogFile != NULL) { + fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile); + } + + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + tsPackets->meta()->setInt64("timeUs", timeUs); + + err = mTSSender->queueBuffer( + tsPackets, + 33 /* packetType */, + RTPSender::PACKETIZATION_TRANSPORT_STREAM); + } + + if (err != OK) { + return err; + } + } + } + + TrackInfo *info = &mTrackInfos.editItemAt(trackIndex); + + return info->mSender->queueBuffer( + accessUnit, + info->mIsAudio ? 96 : 97 /* packetType */, + info->mIsAudio + ? RTPSender::PACKETIZATION_AAC : RTPSender::PACKETIZATION_H264); +} + +void MediaSender::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatSenderNotify: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mGeneration) { + break; + } + + onSenderNotify(msg); + break; + } + + default: + TRESPASS(); + } +} + +void MediaSender::onSenderNotify(const sp<AMessage> &msg) { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + switch (what) { + case RTPSender::kWhatInitDone: + { + --mInitDoneCount; + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + if (err != OK) { + notifyInitDone(err); + ++mGeneration; + break; + } + + if (mInitDoneCount == 0) { + notifyInitDone(OK); + } + break; + } + + case RTPSender::kWhatError: + { + int32_t err; + CHECK(msg->findInt32("err", &err)); + + notifyError(err); + break; + } + + case kWhatNetworkStall: + { + size_t numBytesQueued; + CHECK(msg->findSize("numBytesQueued", &numBytesQueued)); + + notifyNetworkStall(numBytesQueued); + break; + } + + default: + TRESPASS(); + } +} + +void MediaSender::notifyInitDone(status_t err) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void MediaSender::notifyError(status_t err) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void MediaSender::notifyNetworkStall(size_t numBytesQueued) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatNetworkStall); + notify->setSize("numBytesQueued", numBytesQueued); + notify->post(); +} + +status_t MediaSender::packetizeAccessUnit( + size_t trackIndex, + sp<ABuffer> accessUnit, + sp<ABuffer> *tsPackets) { + const TrackInfo &info = mTrackInfos.itemAt(trackIndex); + + uint32_t flags = 0; + + bool isHDCPEncrypted = false; + uint64_t inputCTR; + uint8_t HDCP_private_data[16]; + + bool manuallyPrependSPSPPS = + !info.mIsAudio + && (info.mFlags & FLAG_MANUALLY_PREPEND_SPS_PPS) + && IsIDR(accessUnit); + + if (mHDCP != NULL && !info.mIsAudio) { + isHDCPEncrypted = true; + + if (manuallyPrependSPSPPS) { + accessUnit = mTSPacketizer->prependCSD( + info.mPacketizerTrackIndex, accessUnit); + } + + status_t err = mHDCP->encrypt( + accessUnit->data(), accessUnit->size(), + trackIndex /* streamCTR */, + &inputCTR, + accessUnit->data()); + + if (err != OK) { + ALOGE("Failed to HDCP-encrypt media data (err %d)", + err); + + return err; + } + + HDCP_private_data[0] = 0x00; + + HDCP_private_data[1] = + (((trackIndex >> 30) & 3) << 1) | 1; + + HDCP_private_data[2] = (trackIndex >> 22) & 0xff; + + HDCP_private_data[3] = + (((trackIndex >> 15) & 0x7f) << 1) | 1; + + HDCP_private_data[4] = (trackIndex >> 7) & 0xff; + + HDCP_private_data[5] = + ((trackIndex & 0x7f) << 1) | 1; + + HDCP_private_data[6] = 0x00; + + HDCP_private_data[7] = + (((inputCTR >> 60) & 0x0f) << 1) | 1; + + HDCP_private_data[8] = (inputCTR >> 52) & 0xff; + + HDCP_private_data[9] = + (((inputCTR >> 45) & 0x7f) << 1) | 1; + + HDCP_private_data[10] = (inputCTR >> 37) & 0xff; + + HDCP_private_data[11] = + (((inputCTR >> 30) & 0x7f) << 1) | 1; + + HDCP_private_data[12] = (inputCTR >> 22) & 0xff; + + HDCP_private_data[13] = + (((inputCTR >> 15) & 0x7f) << 1) | 1; + + HDCP_private_data[14] = (inputCTR >> 7) & 0xff; + + HDCP_private_data[15] = + ((inputCTR & 0x7f) << 1) | 1; + + flags |= TSPacketizer::IS_ENCRYPTED; + } else if (manuallyPrependSPSPPS) { + flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES; + } + + int64_t timeUs = ALooper::GetNowUs(); + if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) { + flags |= TSPacketizer::EMIT_PCR; + flags |= TSPacketizer::EMIT_PAT_AND_PMT; + + mPrevTimeUs = timeUs; + } + + mTSPacketizer->packetize( + info.mPacketizerTrackIndex, + accessUnit, + tsPackets, + flags, + !isHDCPEncrypted ? NULL : HDCP_private_data, + !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data), + info.mIsAudio ? 2 : 0 /* numStuffingBytes */); + + return OK; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h new file mode 100644 index 0000000..64722c5 --- /dev/null +++ b/media/libstagefright/wifi-display/MediaSender.h @@ -0,0 +1,131 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_SENDER_H_ + +#define MEDIA_SENDER_H_ + +#include "rtp/RTPSender.h" + +#include <media/stagefright/foundation/ABase.h> +#include <media/stagefright/foundation/AHandler.h> +#include <utils/Errors.h> +#include <utils/Vector.h> + +namespace android { + +struct ABuffer; +struct ANetworkSession; +struct AMessage; +struct IHDCP; +struct TSPacketizer; + +// This class facilitates sending of data from one or more media tracks +// through one or more RTP channels, either providing a 1:1 mapping from +// track to RTP channel or muxing all tracks into a single RTP channel and +// using transport stream encapsulation. +// Optionally the (video) data is encrypted using the provided hdcp object. +struct MediaSender : public AHandler { + enum { + kWhatInitDone, + kWhatError, + kWhatNetworkStall, + }; + + MediaSender( + const sp<ANetworkSession> &netSession, + const sp<AMessage> ¬ify); + + status_t setHDCP(const sp<IHDCP> &hdcp); + + enum FlagBits { + FLAG_MANUALLY_PREPEND_SPS_PPS = 1, + }; + ssize_t addTrack(const sp<AMessage> &format, uint32_t flags); + + // If trackIndex == -1, initialize for transport stream muxing. + status_t initAsync( + ssize_t trackIndex, + const char *remoteHost, + int32_t remoteRTPPort, + RTPSender::TransportMode rtpMode, + int32_t remoteRTCPPort, + RTPSender::TransportMode rtcpMode, + int32_t *localRTPPort); + + status_t queueAccessUnit( + size_t trackIndex, const sp<ABuffer> &accessUnit); + +protected: + virtual void onMessageReceived(const sp<AMessage> &msg); + virtual ~MediaSender(); + +private: + enum { + kWhatSenderNotify, + }; + + enum Mode { + MODE_UNDEFINED, + MODE_TRANSPORT_STREAM, + MODE_ELEMENTARY_STREAMS, + }; + + struct TrackInfo { + sp<AMessage> mFormat; + uint32_t mFlags; + sp<RTPSender> mSender; + List<sp<ABuffer> > mAccessUnits; + ssize_t mPacketizerTrackIndex; + bool mIsAudio; + }; + + sp<ANetworkSession> mNetSession; + sp<AMessage> mNotify; + + sp<IHDCP> mHDCP; + + Mode mMode; + int32_t mGeneration; + + Vector<TrackInfo> mTrackInfos; + + sp<TSPacketizer> mTSPacketizer; + sp<RTPSender> mTSSender; + int64_t mPrevTimeUs; + + size_t mInitDoneCount; + + FILE *mLogFile; + + void onSenderNotify(const sp<AMessage> &msg); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + void notifyNetworkStall(size_t numBytesQueued); + + status_t packetizeAccessUnit( + size_t trackIndex, + sp<ABuffer> accessUnit, + sp<ABuffer> *tsPackets); + + DISALLOW_EVIL_CONSTRUCTORS(MediaSender); +}; + +} // namespace android + +#endif // MEDIA_SENDER_H_ + diff --git a/media/libstagefright/wifi-display/Parameters.cpp b/media/libstagefright/wifi-display/Parameters.cpp index f7118b3..d2a61ea 100644 --- a/media/libstagefright/wifi-display/Parameters.cpp +++ b/media/libstagefright/wifi-display/Parameters.cpp @@ -65,7 +65,9 @@ status_t Parameters::parse(const char *data, size_t size) { mDict.add(name, value); - i += 2; + while (i + 1 < size && data[i] == '\r' && data[i + 1] == '\n') { + i += 2; + } } return OK; diff --git a/media/libstagefright/wifi-display/TimeSeries.cpp b/media/libstagefright/wifi-display/TimeSeries.cpp deleted file mode 100644 index d882d98..0000000 --- a/media/libstagefright/wifi-display/TimeSeries.cpp +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "TimeSeries.h" - -#include <math.h> -#include <string.h> - -namespace android { - -TimeSeries::TimeSeries() - : mCount(0), - mSum(0.0) { -} - -void TimeSeries::add(double val) { - if (mCount < kHistorySize) { - mValues[mCount++] = val; - mSum += val; - } else { - mSum -= mValues[0]; - memmove(&mValues[0], &mValues[1], (kHistorySize - 1) * sizeof(double)); - mValues[kHistorySize - 1] = val; - mSum += val; - } -} - -double TimeSeries::mean() const { - if (mCount < 1) { - return 0.0; - } - - return mSum / mCount; -} - -double TimeSeries::sdev() const { - if (mCount < 1) { - return 0.0; - } - - double m = mean(); - - double sum = 0.0; - for (size_t i = 0; i < mCount; ++i) { - double tmp = mValues[i] - m; - tmp *= tmp; - - sum += tmp; - } - - return sqrt(sum / mCount); -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp new file mode 100644 index 0000000..458b163 --- /dev/null +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -0,0 +1,419 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VideoFormats" +#include <utils/Log.h> + +#include "VideoFormats.h" + +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +VideoFormats::config_t VideoFormats::mConfigs[][32] = { + { + // CEA Resolutions + { 640, 480, 60, false, 0, 0}, + { 720, 480, 60, false, 0, 0}, + { 720, 480, 60, true, 0, 0}, + { 720, 576, 50, false, 0, 0}, + { 720, 576, 50, true, 0, 0}, + { 1280, 720, 30, false, 0, 0}, + { 1280, 720, 60, false, 0, 0}, + { 1920, 1080, 30, false, 0, 0}, + { 1920, 1080, 60, false, 0, 0}, + { 1920, 1080, 60, true, 0, 0}, + { 1280, 720, 25, false, 0, 0}, + { 1280, 720, 50, false, 0, 0}, + { 1920, 1080, 25, false, 0, 0}, + { 1920, 1080, 50, false, 0, 0}, + { 1920, 1080, 50, true, 0, 0}, + { 1280, 720, 24, false, 0, 0}, + { 1920, 1080, 24, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + }, + { + // VESA Resolutions + { 800, 600, 30, false, 0, 0}, + { 800, 600, 60, false, 0, 0}, + { 1024, 768, 30, false, 0, 0}, + { 1024, 768, 60, false, 0, 0}, + { 1152, 864, 30, false, 0, 0}, + { 1152, 864, 60, false, 0, 0}, + { 1280, 768, 30, false, 0, 0}, + { 1280, 768, 60, false, 0, 0}, + { 1280, 800, 30, false, 0, 0}, + { 1280, 800, 60, false, 0, 0}, + { 1360, 768, 30, false, 0, 0}, + { 1360, 768, 60, false, 0, 0}, + { 1366, 768, 30, false, 0, 0}, + { 1366, 768, 60, false, 0, 0}, + { 1280, 1024, 30, false, 0, 0}, + { 1280, 1024, 60, false, 0, 0}, + { 1400, 1050, 30, false, 0, 0}, + { 1400, 1050, 60, false, 0, 0}, + { 1440, 900, 30, false, 0, 0}, + { 1440, 900, 60, false, 0, 0}, + { 1600, 900, 30, false, 0, 0}, + { 1600, 900, 60, false, 0, 0}, + { 1600, 1200, 30, false, 0, 0}, + { 1600, 1200, 60, false, 0, 0}, + { 1680, 1024, 30, false, 0, 0}, + { 1680, 1024, 60, false, 0, 0}, + { 1680, 1050, 30, false, 0, 0}, + { 1680, 1050, 60, false, 0, 0}, + { 1920, 1200, 30, false, 0, 0}, + { 1920, 1200, 60, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + }, + { + // HH Resolutions + { 800, 480, 30, false, 0, 0}, + { 800, 480, 60, false, 0, 0}, + { 854, 480, 30, false, 0, 0}, + { 854, 480, 60, false, 0, 0}, + { 864, 480, 30, false, 0, 0}, + { 864, 480, 60, false, 0, 0}, + { 640, 360, 30, false, 0, 0}, + { 640, 360, 60, false, 0, 0}, + { 960, 540, 30, false, 0, 0}, + { 960, 540, 60, false, 0, 0}, + { 848, 480, 30, false, 0, 0}, + { 848, 480, 60, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + { 0, 0, 0, false, 0, 0}, + } +}; + +VideoFormats::VideoFormats() { + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + mResolutionEnabled[i] = 0; + } + + setNativeResolution(RESOLUTION_CEA, 0); // default to 640x480 p60 +} + +void VideoFormats::setNativeResolution(ResolutionType type, size_t index) { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + mNativeType = type; + mNativeIndex = index; + + setResolutionEnabled(type, index); +} + +void VideoFormats::getNativeResolution( + ResolutionType *type, size_t *index) const { + *type = mNativeType; + *index = mNativeIndex; +} + +void VideoFormats::disableAll() { + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + mResolutionEnabled[i] = 0; + for (size_t j = 0; j < 32; j++) { + mConfigs[i][j].profile = mConfigs[i][j].level = 0; + } + } +} + +void VideoFormats::enableAll() { + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + mResolutionEnabled[i] = 0xffffffff; + for (size_t j = 0; j < 32; j++) { + mConfigs[i][j].profile = (1ul << PROFILE_CBP); + mConfigs[i][j].level = (1ul << LEVEL_31); + } + } +} + +void VideoFormats::setResolutionEnabled( + ResolutionType type, size_t index, bool enabled) { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + if (enabled) { + mResolutionEnabled[type] |= (1ul << index); + } else { + mResolutionEnabled[type] &= ~(1ul << index); + } +} + +bool VideoFormats::isResolutionEnabled( + ResolutionType type, size_t index) const { + CHECK_LT(type, kNumResolutionTypes); + CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL)); + + return mResolutionEnabled[type] & (1ul << index); +} + +// static +bool VideoFormats::GetConfiguration( + ResolutionType type, + size_t index, + size_t *width, size_t *height, size_t *framesPerSecond, + bool *interlaced) { + CHECK_LT(type, kNumResolutionTypes); + + if (index >= 32) { + return false; + } + + const config_t *config = &mConfigs[type][index]; + + if (config->width == 0) { + return false; + } + + if (width) { + *width = config->width; + } + + if (height) { + *height = config->height; + } + + if (framesPerSecond) { + *framesPerSecond = config->framesPerSecond; + } + + if (interlaced) { + *interlaced = config->interlaced; + } + + return true; +} + +bool VideoFormats::parseH264Codec(const char *spec) { + unsigned profile, level, res[3]; + + if (sscanf( + spec, + "%02x %02x %08X %08X %08X", + &profile, + &level, + &res[0], + &res[1], + &res[2]) != 5) { + return false; + } + + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + for (size_t j = 0; j < 32; ++j) { + if (res[i] & (1ul << j)){ + mResolutionEnabled[i] |= (1ul << j); + if (profile > mConfigs[i][j].profile) { + mConfigs[i][j].profile = profile; + if (level > mConfigs[i][j].level) + mConfigs[i][j].level = level; + } + } + } + } + + return true; +} + +bool VideoFormats::parseFormatSpec(const char *spec) { + CHECK_EQ(kNumResolutionTypes, 3); + + unsigned native, dummy; + unsigned res[3]; + size_t size = strlen(spec); + size_t offset = 0; + + if (sscanf(spec, "%02x %02x ", &native, &dummy) != 2) { + return false; + } + + offset += 6; // skip native and preferred-display-mode-supported + CHECK_LE(offset + 58, size); + while (offset < size) { + parseH264Codec(spec + offset); + offset += 60; // skip H.264-codec + ", " + } + + mNativeIndex = native >> 3; + mNativeType = (ResolutionType)(native & 7); + + bool success; + if (mNativeType >= kNumResolutionTypes) { + success = false; + } else { + success = GetConfiguration( + mNativeType, mNativeIndex, NULL, NULL, NULL, NULL); + } + + if (!success) { + ALOGW("sink advertised an illegal native resolution, fortunately " + "this value is ignored for the time being..."); + } + + return true; +} + +AString VideoFormats::getFormatSpec(bool forM4Message) const { + CHECK_EQ(kNumResolutionTypes, 3); + + // wfd_video_formats: + // 1 byte "native" + // 1 byte "preferred-display-mode-supported" 0 or 1 + // one or more avc codec structures + // 1 byte profile + // 1 byte level + // 4 byte CEA mask + // 4 byte VESA mask + // 4 byte HH mask + // 1 byte latency + // 2 byte min-slice-slice + // 2 byte slice-enc-params + // 1 byte framerate-control-support + // max-hres (none or 2 byte) + // max-vres (none or 2 byte) + + return StringPrintf( + "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none", + forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType), + mResolutionEnabled[0], + mResolutionEnabled[1], + mResolutionEnabled[2]); +} + +// static +bool VideoFormats::PickBestFormat( + const VideoFormats &sinkSupported, + const VideoFormats &sourceSupported, + ResolutionType *chosenType, + size_t *chosenIndex) { +#if 0 + // Support for the native format is a great idea, the spec includes + // these features, but nobody supports it and the tests don't validate it. + + ResolutionType nativeType; + size_t nativeIndex; + sinkSupported.getNativeResolution(&nativeType, &nativeIndex); + if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) { + if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) { + ALOGI("Choosing sink's native resolution"); + *chosenType = nativeType; + *chosenIndex = nativeIndex; + return true; + } + } else { + ALOGW("Sink advertised native resolution that it doesn't " + "actually support... ignoring"); + } + + sourceSupported.getNativeResolution(&nativeType, &nativeIndex); + if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) { + if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) { + ALOGI("Choosing source's native resolution"); + *chosenType = nativeType; + *chosenIndex = nativeIndex; + return true; + } + } else { + ALOGW("Source advertised native resolution that it doesn't " + "actually support... ignoring"); + } +#endif + + bool first = true; + uint32_t bestScore = 0; + size_t bestType = 0; + size_t bestIndex = 0; + for (size_t i = 0; i < kNumResolutionTypes; ++i) { + for (size_t j = 0; j < 32; ++j) { + size_t width, height, framesPerSecond; + bool interlaced; + if (!GetConfiguration( + (ResolutionType)i, + j, + &width, &height, &framesPerSecond, &interlaced)) { + break; + } + + if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j) + || !sourceSupported.isResolutionEnabled( + (ResolutionType)i, j)) { + continue; + } + + ALOGV("type %u, index %u, %u x %u %c%u supported", + i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond); + + uint32_t score = width * height * framesPerSecond; + if (!interlaced) { + score *= 2; + } + + if (first || score > bestScore) { + bestScore = score; + bestType = i; + bestIndex = j; + + first = false; + } + } + } + + if (first) { + return false; + } + + *chosenType = (ResolutionType)bestType; + *chosenIndex = bestIndex; + + return true; +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h new file mode 100644 index 0000000..01de246 --- /dev/null +++ b/media/libstagefright/wifi-display/VideoFormats.h @@ -0,0 +1,106 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VIDEO_FORMATS_H_ + +#define VIDEO_FORMATS_H_ + +#include <media/stagefright/foundation/ABase.h> + +#include <stdint.h> + +namespace android { + +struct AString; + +// This class encapsulates that video resolution capabilities of a wfd source +// or sink as outlined in the wfd specs. Currently three sets of resolutions +// are specified, each of which supports up to 32 resolutions. +// In addition to its capabilities each sink/source also publishes its +// "native" resolution, presumably one that is preferred among all others +// because it wouldn't require any scaling and directly corresponds to the +// display capabilities/pixels. +struct VideoFormats { + VideoFormats(); + + struct config_t { + size_t width, height, framesPerSecond; + bool interlaced; + unsigned char profile, level; + }; + + enum ProfileType { + PROFILE_CBP = 0, + PROFILE_CHP, + kNumProfileTypes, + }; + + enum LevelType { + LEVEL_31 = 0, + LEVEL_32, + LEVEL_40, + LEVEL_41, + LEVEL_42, + kNumLevelTypes, + }; + + enum ResolutionType { + RESOLUTION_CEA, + RESOLUTION_VESA, + RESOLUTION_HH, + kNumResolutionTypes, + }; + + void setNativeResolution(ResolutionType type, size_t index); + void getNativeResolution(ResolutionType *type, size_t *index) const; + + void disableAll(); + void enableAll(); + + void setResolutionEnabled( + ResolutionType type, size_t index, bool enabled = true); + + bool isResolutionEnabled(ResolutionType type, size_t index) const; + + static bool GetConfiguration( + ResolutionType type, size_t index, + size_t *width, size_t *height, size_t *framesPerSecond, + bool *interlaced); + + bool parseFormatSpec(const char *spec); + AString getFormatSpec(bool forM4Message = false) const; + + static bool PickBestFormat( + const VideoFormats &sinkSupported, + const VideoFormats &sourceSupported, + ResolutionType *chosenType, + size_t *chosenIndex); + +private: + bool parseH264Codec(const char *spec); + ResolutionType mNativeType; + size_t mNativeIndex; + + uint32_t mResolutionEnabled[kNumResolutionTypes]; + static config_t mConfigs[kNumResolutionTypes][32]; + + DISALLOW_EVIL_CONSTRUCTORS(VideoFormats); +}; + +} // namespace android + +#endif // VIDEO_FORMATS_H_ + diff --git a/media/libstagefright/wifi-display/rtp/RTPBase.h b/media/libstagefright/wifi-display/rtp/RTPBase.h new file mode 100644 index 0000000..6178f00 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPBase.h @@ -0,0 +1,51 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_BASE_H_ + +#define RTP_BASE_H_ + +namespace android { + +struct RTPBase { + enum PacketizationMode { + PACKETIZATION_TRANSPORT_STREAM, + PACKETIZATION_H264, + PACKETIZATION_AAC, + PACKETIZATION_NONE, + }; + + enum TransportMode { + TRANSPORT_UNDEFINED, + TRANSPORT_NONE, + TRANSPORT_UDP, + TRANSPORT_TCP, + TRANSPORT_TCP_INTERLEAVED, + }; + + enum { + // Really UDP _payload_ size + kMaxUDPPacketSize = 1472, // 1472 good, 1473 bad on Android@Home + }; + + static int32_t PickRandomRTPPort(); +}; + +} // namespace android + +#endif // RTP_BASE_H_ + + diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp new file mode 100644 index 0000000..095fd97 --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -0,0 +1,795 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RTPSender" +#include <utils/Log.h> + +#include "RTPSender.h" + +#include "ANetworkSession.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/Utils.h> + +#include "include/avc_utils.h" + +namespace android { + +RTPSender::RTPSender( + const sp<ANetworkSession> &netSession, + const sp<AMessage> ¬ify) + : mNetSession(netSession), + mNotify(notify), + mRTPMode(TRANSPORT_UNDEFINED), + mRTCPMode(TRANSPORT_UNDEFINED), + mRTPSessionID(0), + mRTCPSessionID(0), + mRTPConnected(false), + mRTCPConnected(false), + mLastNTPTime(0), + mLastRTPTime(0), + mNumRTPSent(0), + mNumRTPOctetsSent(0), + mNumSRsSent(0), + mRTPSeqNo(0), + mHistorySize(0) { +} + +RTPSender::~RTPSender() { + if (mRTCPSessionID != 0) { + mNetSession->destroySession(mRTCPSessionID); + mRTCPSessionID = 0; + } + + if (mRTPSessionID != 0) { + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } +} + +// static +int32_t RTPBase::PickRandomRTPPort() { + // Pick an even integer in range [1024, 65534) + + static const size_t kRange = (65534 - 1024) / 2; + + return (int32_t)(((float)(kRange + 1) * rand()) / RAND_MAX) * 2 + 1024; +} + +status_t RTPSender::initAsync( + const char *remoteHost, + int32_t remoteRTPPort, + TransportMode rtpMode, + int32_t remoteRTCPPort, + TransportMode rtcpMode, + int32_t *outLocalRTPPort) { + if (mRTPMode != TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_UNDEFINED + || rtpMode == TRANSPORT_NONE + || rtcpMode == TRANSPORT_UNDEFINED) { + return INVALID_OPERATION; + } + + CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED); + CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED); + + if ((rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0) + || (rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0)) { + return INVALID_OPERATION; + } + + sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id()); + + sp<AMessage> rtcpNotify; + if (remoteRTCPPort >= 0) { + rtcpNotify = new AMessage(kWhatRTCPNotify, id()); + } + + CHECK_EQ(mRTPSessionID, 0); + CHECK_EQ(mRTCPSessionID, 0); + + int32_t localRTPPort; + + for (;;) { + localRTPPort = PickRandomRTPPort(); + + status_t err; + if (rtpMode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort, + remoteHost, + remoteRTPPort, + rtpNotify, + &mRTPSessionID); + } else { + CHECK_EQ(rtpMode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + localRTPPort, + remoteHost, + remoteRTPPort, + rtpNotify, + &mRTPSessionID); + } + + if (err != OK) { + continue; + } + + if (remoteRTCPPort < 0) { + break; + } + + if (rtcpMode == TRANSPORT_UDP) { + err = mNetSession->createUDPSession( + localRTPPort + 1, + remoteHost, + remoteRTCPPort, + rtcpNotify, + &mRTCPSessionID); + } else { + CHECK_EQ(rtcpMode, TRANSPORT_TCP); + err = mNetSession->createTCPDatagramSession( + localRTPPort + 1, + remoteHost, + remoteRTCPPort, + rtcpNotify, + &mRTCPSessionID); + } + + if (err == OK) { + break; + } + + mNetSession->destroySession(mRTPSessionID); + mRTPSessionID = 0; + } + + if (rtpMode == TRANSPORT_UDP) { + mRTPConnected = true; + } + + if (rtcpMode == TRANSPORT_UDP) { + mRTCPConnected = true; + } + + mRTPMode = rtpMode; + mRTCPMode = rtcpMode; + *outLocalRTPPort = localRTPPort; + + if (mRTPMode == TRANSPORT_UDP + && (mRTCPMode == TRANSPORT_UDP || mRTCPMode == TRANSPORT_NONE)) { + notifyInitDone(OK); + } + + return OK; +} + +status_t RTPSender::queueBuffer( + const sp<ABuffer> &buffer, uint8_t packetType, PacketizationMode mode) { + status_t err; + + switch (mode) { + case PACKETIZATION_NONE: + err = queueRawPacket(buffer, packetType); + break; + + case PACKETIZATION_TRANSPORT_STREAM: + err = queueTSPackets(buffer, packetType); + break; + + case PACKETIZATION_H264: + err = queueAVCBuffer(buffer, packetType); + break; + + default: + TRESPASS(); + } + + return err; +} + +status_t RTPSender::queueRawPacket( + const sp<ABuffer> &packet, uint8_t packetType) { + CHECK_LE(packet->size(), kMaxUDPPacketSize - 12); + + int64_t timeUs; + CHECK(packet->meta()->findInt64("timeUs", &timeUs)); + + sp<ABuffer> udpPacket = new ABuffer(12 + packet->size()); + + udpPacket->setInt32Data(mRTPSeqNo); + + uint8_t *rtp = udpPacket->data(); + rtp[0] = 0x80; + rtp[1] = packetType; + + rtp[2] = (mRTPSeqNo >> 8) & 0xff; + rtp[3] = mRTPSeqNo & 0xff; + ++mRTPSeqNo; + + uint32_t rtpTime = (timeUs * 9) / 100ll; + + rtp[4] = rtpTime >> 24; + rtp[5] = (rtpTime >> 16) & 0xff; + rtp[6] = (rtpTime >> 8) & 0xff; + rtp[7] = rtpTime & 0xff; + + rtp[8] = kSourceID >> 24; + rtp[9] = (kSourceID >> 16) & 0xff; + rtp[10] = (kSourceID >> 8) & 0xff; + rtp[11] = kSourceID & 0xff; + + memcpy(&rtp[12], packet->data(), packet->size()); + + return sendRTPPacket( + udpPacket, + true /* storeInHistory */, + true /* timeValid */, + ALooper::GetNowUs()); +} + +status_t RTPSender::queueTSPackets( + const sp<ABuffer> &tsPackets, uint8_t packetType) { + CHECK_EQ(0, tsPackets->size() % 188); + + int64_t timeUs; + CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs)); + + const size_t numTSPackets = tsPackets->size() / 188; + + size_t srcOffset = 0; + while (srcOffset < tsPackets->size()) { + sp<ABuffer> udpPacket = + new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188); + + udpPacket->setInt32Data(mRTPSeqNo); + + uint8_t *rtp = udpPacket->data(); + rtp[0] = 0x80; + rtp[1] = packetType; + + rtp[2] = (mRTPSeqNo >> 8) & 0xff; + rtp[3] = mRTPSeqNo & 0xff; + ++mRTPSeqNo; + + int64_t nowUs = ALooper::GetNowUs(); + uint32_t rtpTime = (nowUs * 9) / 100ll; + + rtp[4] = rtpTime >> 24; + rtp[5] = (rtpTime >> 16) & 0xff; + rtp[6] = (rtpTime >> 8) & 0xff; + rtp[7] = rtpTime & 0xff; + + rtp[8] = kSourceID >> 24; + rtp[9] = (kSourceID >> 16) & 0xff; + rtp[10] = (kSourceID >> 8) & 0xff; + rtp[11] = kSourceID & 0xff; + + size_t numTSPackets = (tsPackets->size() - srcOffset) / 188; + if (numTSPackets > kMaxNumTSPacketsPerRTPPacket) { + numTSPackets = kMaxNumTSPacketsPerRTPPacket; + } + + memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188); + + udpPacket->setRange(0, 12 + numTSPackets * 188); + + srcOffset += numTSPackets * 188; + bool isLastPacket = (srcOffset == tsPackets->size()); + + status_t err = sendRTPPacket( + udpPacket, + true /* storeInHistory */, + isLastPacket /* timeValid */, + timeUs); + + if (err != OK) { + return err; + } + } + + return OK; +} + +status_t RTPSender::queueAVCBuffer( + const sp<ABuffer> &accessUnit, uint8_t packetType) { + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + + uint32_t rtpTime = (timeUs * 9 / 100ll); + + List<sp<ABuffer> > packets; + + sp<ABuffer> out = new ABuffer(kMaxUDPPacketSize); + size_t outBytesUsed = 12; // Placeholder for RTP header. + + const uint8_t *data = accessUnit->data(); + size_t size = accessUnit->size(); + const uint8_t *nalStart; + size_t nalSize; + while (getNextNALUnit( + &data, &size, &nalStart, &nalSize, + true /* startCodeFollows */) == OK) { + size_t bytesNeeded = nalSize + 2; + if (outBytesUsed == 12) { + ++bytesNeeded; + } + + if (outBytesUsed + bytesNeeded > out->capacity()) { + bool emitSingleNALPacket = false; + + if (outBytesUsed == 12 + && outBytesUsed + nalSize <= out->capacity()) { + // We haven't emitted anything into the current packet yet and + // this NAL unit fits into a single-NAL-unit-packet while + // it wouldn't have fit as part of a STAP-A packet. + + memcpy(out->data() + outBytesUsed, nalStart, nalSize); + outBytesUsed += nalSize; + + emitSingleNALPacket = true; + } + + if (outBytesUsed > 12) { + out->setRange(0, outBytesUsed); + packets.push_back(out); + out = new ABuffer(kMaxUDPPacketSize); + outBytesUsed = 12; // Placeholder for RTP header + } + + if (emitSingleNALPacket) { + continue; + } + } + + if (outBytesUsed + bytesNeeded <= out->capacity()) { + uint8_t *dst = out->data() + outBytesUsed; + + if (outBytesUsed == 12) { + *dst++ = 24; // STAP-A header + } + + *dst++ = (nalSize >> 8) & 0xff; + *dst++ = nalSize & 0xff; + memcpy(dst, nalStart, nalSize); + + outBytesUsed += bytesNeeded; + continue; + } + + // This single NAL unit does not fit into a single RTP packet, + // we need to emit an FU-A. + + CHECK_EQ(outBytesUsed, 12u); + + uint8_t nalType = nalStart[0] & 0x1f; + uint8_t nri = (nalStart[0] >> 5) & 3; + + size_t srcOffset = 1; + while (srcOffset < nalSize) { + size_t copy = out->capacity() - outBytesUsed - 2; + if (copy > nalSize - srcOffset) { + copy = nalSize - srcOffset; + } + + uint8_t *dst = out->data() + outBytesUsed; + dst[0] = (nri << 5) | 28; + + dst[1] = nalType; + + if (srcOffset == 1) { + dst[1] |= 0x80; + } + + if (srcOffset + copy == nalSize) { + dst[1] |= 0x40; + } + + memcpy(&dst[2], nalStart + srcOffset, copy); + srcOffset += copy; + + out->setRange(0, outBytesUsed + copy + 2); + + packets.push_back(out); + out = new ABuffer(kMaxUDPPacketSize); + outBytesUsed = 12; // Placeholder for RTP header + } + } + + if (outBytesUsed > 12) { + out->setRange(0, outBytesUsed); + packets.push_back(out); + } + + while (!packets.empty()) { + sp<ABuffer> out = *packets.begin(); + packets.erase(packets.begin()); + + out->setInt32Data(mRTPSeqNo); + + bool last = packets.empty(); + + uint8_t *dst = out->data(); + + dst[0] = 0x80; + + dst[1] = packetType; + if (last) { + dst[1] |= 1 << 7; // M-bit + } + + dst[2] = (mRTPSeqNo >> 8) & 0xff; + dst[3] = mRTPSeqNo & 0xff; + ++mRTPSeqNo; + + dst[4] = rtpTime >> 24; + dst[5] = (rtpTime >> 16) & 0xff; + dst[6] = (rtpTime >> 8) & 0xff; + dst[7] = rtpTime & 0xff; + dst[8] = kSourceID >> 24; + dst[9] = (kSourceID >> 16) & 0xff; + dst[10] = (kSourceID >> 8) & 0xff; + dst[11] = kSourceID & 0xff; + + status_t err = sendRTPPacket(out, true /* storeInHistory */); + + if (err != OK) { + return err; + } + } + + return OK; +} + +status_t RTPSender::sendRTPPacket( + const sp<ABuffer> &buffer, bool storeInHistory, + bool timeValid, int64_t timeUs) { + CHECK(mRTPConnected); + + status_t err = mNetSession->sendRequest( + mRTPSessionID, buffer->data(), buffer->size(), + timeValid, timeUs); + + if (err != OK) { + return err; + } + + mLastNTPTime = GetNowNTP(); + mLastRTPTime = U32_AT(buffer->data() + 4); + + ++mNumRTPSent; + mNumRTPOctetsSent += buffer->size() - 12; + + if (storeInHistory) { + if (mHistorySize == kMaxHistorySize) { + mHistory.erase(mHistory.begin()); + } else { + ++mHistorySize; + } + mHistory.push_back(buffer); + } + + return OK; +} + +// static +uint64_t RTPSender::GetNowNTP() { + struct timeval tv; + gettimeofday(&tv, NULL /* timezone */); + + uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec; + + nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; + + uint64_t hi = nowUs / 1000000ll; + uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll; + + return (hi << 32) | lo; +} + +void RTPSender::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatRTPNotify: + case kWhatRTCPNotify: + onNetNotify(msg->what() == kWhatRTPNotify, msg); + break; + + default: + TRESPASS(); + } +} + +void RTPSender::onNetNotify(bool isRTP, const sp<AMessage> &msg) { + int32_t reason; + CHECK(msg->findInt32("reason", &reason)); + + switch (reason) { + case ANetworkSession::kWhatError: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + int32_t err; + CHECK(msg->findInt32("err", &err)); + + int32_t errorOccuredDuringSend; + CHECK(msg->findInt32("send", &errorOccuredDuringSend)); + + AString detail; + CHECK(msg->findString("detail", &detail)); + + ALOGE("An error occurred during %s in session %d " + "(%d, '%s' (%s)).", + errorOccuredDuringSend ? "send" : "receive", + sessionID, + err, + detail.c_str(), + strerror(-err)); + + mNetSession->destroySession(sessionID); + + if (sessionID == mRTPSessionID) { + mRTPSessionID = 0; + } else if (sessionID == mRTCPSessionID) { + mRTCPSessionID = 0; + } + + if (!mRTPConnected + || (mRTPMode != TRANSPORT_NONE && !mRTCPConnected)) { + // We haven't completed initialization, attach the error + // to the notification instead. + notifyInitDone(err); + break; + } + + notifyError(err); + break; + } + + case ANetworkSession::kWhatDatagram: + { + sp<ABuffer> data; + CHECK(msg->findBuffer("data", &data)); + + if (isRTP) { + ALOGW("Huh? Received data on RTP connection..."); + } else { + onRTCPData(data); + } + break; + } + + case ANetworkSession::kWhatConnected: + { + int32_t sessionID; + CHECK(msg->findInt32("sessionID", &sessionID)); + + if (isRTP) { + CHECK_EQ(mRTPMode, TRANSPORT_TCP); + CHECK_EQ(sessionID, mRTPSessionID); + mRTPConnected = true; + } else { + CHECK_EQ(mRTCPMode, TRANSPORT_TCP); + CHECK_EQ(sessionID, mRTCPSessionID); + mRTCPConnected = true; + } + + if (mRTPConnected + && (mRTCPMode == TRANSPORT_NONE || mRTCPConnected)) { + notifyInitDone(OK); + } + break; + } + + case ANetworkSession::kWhatNetworkStall: + { + size_t numBytesQueued; + CHECK(msg->findSize("numBytesQueued", &numBytesQueued)); + + notifyNetworkStall(numBytesQueued); + break; + } + + default: + TRESPASS(); + } +} + +status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) { + const uint8_t *data = buffer->data(); + size_t size = buffer->size(); + + while (size > 0) { + if (size < 8) { + // Too short to be a valid RTCP header + return ERROR_MALFORMED; + } + + if ((data[0] >> 6) != 2) { + // Unsupported version. + return ERROR_UNSUPPORTED; + } + + if (data[0] & 0x20) { + // Padding present. + + size_t paddingLength = data[size - 1]; + + if (paddingLength + 12 > size) { + // If we removed this much padding we'd end up with something + // that's too short to be a valid RTP header. + return ERROR_MALFORMED; + } + + size -= paddingLength; + } + + size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4; + + if (size < headerLength) { + // Only received a partial packet? + return ERROR_MALFORMED; + } + + switch (data[1]) { + case 200: + case 201: // RR + parseReceiverReport(data, headerLength); + break; + + case 202: // SDES + case 203: + break; + + case 204: // APP + parseAPP(data, headerLength); + break; + + case 205: // TSFB (transport layer specific feedback) + parseTSFB(data, headerLength); + break; + + case 206: // PSFB (payload specific feedback) + // hexdump(data, headerLength); + break; + + default: + { + ALOGW("Unknown RTCP packet type %u of size %d", + (unsigned)data[1], headerLength); + break; + } + } + + data += headerLength; + size -= headerLength; + } + + return OK; +} + +status_t RTPSender::parseReceiverReport(const uint8_t *data, size_t size) { + // hexdump(data, size); + + float fractionLost = data[12] / 256.0f; + + ALOGI("lost %.2f %% of packets during report interval.", + 100.0f * fractionLost); + + return OK; +} + +status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) { + if ((data[0] & 0x1f) != 1) { + return ERROR_UNSUPPORTED; // We only support NACK for now. + } + + uint32_t srcId = U32_AT(&data[8]); + if (srcId != kSourceID) { + return ERROR_MALFORMED; + } + + for (size_t i = 12; i < size; i += 4) { + uint16_t seqNo = U16_AT(&data[i]); + uint16_t blp = U16_AT(&data[i + 2]); + + List<sp<ABuffer> >::iterator it = mHistory.begin(); + bool foundSeqNo = false; + while (it != mHistory.end()) { + const sp<ABuffer> &buffer = *it; + + uint16_t bufferSeqNo = buffer->int32Data() & 0xffff; + + bool retransmit = false; + if (bufferSeqNo == seqNo) { + retransmit = true; + } else if (blp != 0) { + for (size_t i = 0; i < 16; ++i) { + if ((blp & (1 << i)) + && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) { + blp &= ~(1 << i); + retransmit = true; + } + } + } + + if (retransmit) { + ALOGV("retransmitting seqNo %d", bufferSeqNo); + + CHECK_EQ((status_t)OK, + sendRTPPacket(buffer, false /* storeInHistory */)); + + if (bufferSeqNo == seqNo) { + foundSeqNo = true; + } + + if (foundSeqNo && blp == 0) { + break; + } + } + + ++it; + } + + if (!foundSeqNo || blp != 0) { + ALOGI("Some sequence numbers were no longer available for " + "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)", + seqNo, foundSeqNo, blp); + + if (!mHistory.empty()) { + int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff; + int32_t latest = (*--mHistory.end())->int32Data() & 0xffff; + + ALOGI("have seq numbers from %d - %d", earliest, latest); + } + } + } + + return OK; +} + +status_t RTPSender::parseAPP(const uint8_t *data, size_t size) { + return OK; +} + +void RTPSender::notifyInitDone(status_t err) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatInitDone); + notify->setInt32("err", err); + notify->post(); +} + +void RTPSender::notifyError(status_t err) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); +} + +void RTPSender::notifyNetworkStall(size_t numBytesQueued) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatNetworkStall); + notify->setSize("numBytesQueued", numBytesQueued); + notify->post(); +} + +} // namespace android + diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h new file mode 100644 index 0000000..7dc138a --- /dev/null +++ b/media/libstagefright/wifi-display/rtp/RTPSender.h @@ -0,0 +1,120 @@ +/* + * Copyright 2013, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RTP_SENDER_H_ + +#define RTP_SENDER_H_ + +#include "RTPBase.h" + +#include <media/stagefright/foundation/AHandler.h> + +namespace android { + +struct ABuffer; +struct ANetworkSession; + +// An object of this class facilitates sending of media data over an RTP +// channel. The channel is established over a UDP or TCP connection depending +// on which "TransportMode" was chosen. In addition different RTP packetization +// schemes are supported such as "Transport Stream Packets over RTP", +// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)" +struct RTPSender : public RTPBase, public AHandler { + enum { + kWhatInitDone, + kWhatError, + kWhatNetworkStall, + }; + RTPSender( + const sp<ANetworkSession> &netSession, + const sp<AMessage> ¬ify); + + status_t initAsync( + const char *remoteHost, + int32_t remoteRTPPort, + TransportMode rtpMode, + int32_t remoteRTCPPort, + TransportMode rtcpMode, + int32_t *outLocalRTPPort); + + status_t queueBuffer( + const sp<ABuffer> &buffer, + uint8_t packetType, + PacketizationMode mode); + +protected: + virtual ~RTPSender(); + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kWhatRTPNotify, + kWhatRTCPNotify, + }; + + enum { + kMaxNumTSPacketsPerRTPPacket = (kMaxUDPPacketSize - 12) / 188, + kMaxHistorySize = 1024, + kSourceID = 0xdeadbeef, + }; + + sp<ANetworkSession> mNetSession; + sp<AMessage> mNotify; + TransportMode mRTPMode; + TransportMode mRTCPMode; + int32_t mRTPSessionID; + int32_t mRTCPSessionID; + bool mRTPConnected; + bool mRTCPConnected; + + uint64_t mLastNTPTime; + uint32_t mLastRTPTime; + uint32_t mNumRTPSent; + uint32_t mNumRTPOctetsSent; + uint32_t mNumSRsSent; + + uint32_t mRTPSeqNo; + + List<sp<ABuffer> > mHistory; + size_t mHistorySize; + + static uint64_t GetNowNTP(); + + status_t queueRawPacket(const sp<ABuffer> &tsPackets, uint8_t packetType); + status_t queueTSPackets(const sp<ABuffer> &tsPackets, uint8_t packetType); + status_t queueAVCBuffer(const sp<ABuffer> &accessUnit, uint8_t packetType); + + status_t sendRTPPacket( + const sp<ABuffer> &packet, bool storeInHistory, + bool timeValid = false, int64_t timeUs = -1ll); + + void onNetNotify(bool isRTP, const sp<AMessage> &msg); + + status_t onRTCPData(const sp<ABuffer> &data); + status_t parseReceiverReport(const uint8_t *data, size_t size); + status_t parseTSFB(const uint8_t *data, size_t size); + status_t parseAPP(const uint8_t *data, size_t size); + + void notifyInitDone(status_t err); + void notifyError(status_t err); + void notifyNetworkStall(size_t numBytesQueued); + + DISALLOW_EVIL_CONSTRUCTORS(RTPSender); +}; + +} // namespace android + +#endif // RTP_SENDER_H_ diff --git a/media/libstagefright/wifi-display/sink/LinearRegression.cpp b/media/libstagefright/wifi-display/sink/LinearRegression.cpp deleted file mode 100644 index 8cfce37..0000000 --- a/media/libstagefright/wifi-display/sink/LinearRegression.cpp +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "LinearRegression" -#include <utils/Log.h> - -#include "LinearRegression.h" - -#include <math.h> -#include <string.h> - -namespace android { - -LinearRegression::LinearRegression(size_t historySize) - : mHistorySize(historySize), - mCount(0), - mHistory(new Point[mHistorySize]), - mSumX(0.0), - mSumY(0.0) { -} - -LinearRegression::~LinearRegression() { - delete[] mHistory; - mHistory = NULL; -} - -void LinearRegression::addPoint(float x, float y) { - if (mCount == mHistorySize) { - const Point &oldest = mHistory[0]; - - mSumX -= oldest.mX; - mSumY -= oldest.mY; - - memmove(&mHistory[0], &mHistory[1], (mHistorySize - 1) * sizeof(Point)); - --mCount; - } - - Point *newest = &mHistory[mCount++]; - newest->mX = x; - newest->mY = y; - - mSumX += x; - mSumY += y; -} - -bool LinearRegression::approxLine(float *n1, float *n2, float *b) const { - static const float kEpsilon = 1.0E-4; - - if (mCount < 2) { - return false; - } - - float sumX2 = 0.0f; - float sumY2 = 0.0f; - float sumXY = 0.0f; - - float meanX = mSumX / (float)mCount; - float meanY = mSumY / (float)mCount; - - for (size_t i = 0; i < mCount; ++i) { - const Point &p = mHistory[i]; - - float x = p.mX - meanX; - float y = p.mY - meanY; - - sumX2 += x * x; - sumY2 += y * y; - sumXY += x * y; - } - - float T = sumX2 + sumY2; - float D = sumX2 * sumY2 - sumXY * sumXY; - float root = sqrt(T * T * 0.25 - D); - - float L1 = T * 0.5 - root; - - if (fabs(sumXY) > kEpsilon) { - *n1 = 1.0; - *n2 = (2.0 * L1 - sumX2) / sumXY; - - float mag = sqrt((*n1) * (*n1) + (*n2) * (*n2)); - - *n1 /= mag; - *n2 /= mag; - } else { - *n1 = 0.0; - *n2 = 1.0; - } - - *b = (*n1) * meanX + (*n2) * meanY; - - return true; -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/LinearRegression.h b/media/libstagefright/wifi-display/sink/LinearRegression.h deleted file mode 100644 index ca6f5a1..0000000 --- a/media/libstagefright/wifi-display/sink/LinearRegression.h +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef LINEAR_REGRESSION_H_ - -#define LINEAR_REGRESSION_H_ - -#include <sys/types.h> -#include <media/stagefright/foundation/ABase.h> - -namespace android { - -// Helper class to fit a line to a set of points minimizing the sum of -// squared (orthogonal) distances from line to individual points. -struct LinearRegression { - LinearRegression(size_t historySize); - ~LinearRegression(); - - void addPoint(float x, float y); - - bool approxLine(float *n1, float *n2, float *b) const; - -private: - struct Point { - float mX, mY; - }; - - size_t mHistorySize; - size_t mCount; - Point *mHistory; - - float mSumX, mSumY; - - DISALLOW_EVIL_CONSTRUCTORS(LinearRegression); -}; - -} // namespace android - -#endif // LINEAR_REGRESSION_H_ diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp deleted file mode 100644 index 0918034..0000000 --- a/media/libstagefright/wifi-display/sink/RTPSink.cpp +++ /dev/null @@ -1,806 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "RTPSink" -#include <utils/Log.h> - -#include "RTPSink.h" - -#include "ANetworkSession.h" -#include "TunnelRenderer.h" - -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/foundation/hexdump.h> -#include <media/stagefright/MediaErrors.h> -#include <media/stagefright/Utils.h> - -namespace android { - -struct RTPSink::Source : public RefBase { - Source(uint16_t seq, const sp<ABuffer> &buffer, - const sp<AMessage> queueBufferMsg); - - bool updateSeq(uint16_t seq, const sp<ABuffer> &buffer); - - void addReportBlock(uint32_t ssrc, const sp<ABuffer> &buf); - -protected: - virtual ~Source(); - -private: - static const uint32_t kMinSequential = 2; - static const uint32_t kMaxDropout = 3000; - static const uint32_t kMaxMisorder = 100; - static const uint32_t kRTPSeqMod = 1u << 16; - - sp<AMessage> mQueueBufferMsg; - - uint16_t mMaxSeq; - uint32_t mCycles; - uint32_t mBaseSeq; - uint32_t mBadSeq; - uint32_t mProbation; - uint32_t mReceived; - uint32_t mExpectedPrior; - uint32_t mReceivedPrior; - - void initSeq(uint16_t seq); - void queuePacket(const sp<ABuffer> &buffer); - - DISALLOW_EVIL_CONSTRUCTORS(Source); -}; - -//////////////////////////////////////////////////////////////////////////////// - -RTPSink::Source::Source( - uint16_t seq, const sp<ABuffer> &buffer, - const sp<AMessage> queueBufferMsg) - : mQueueBufferMsg(queueBufferMsg), - mProbation(kMinSequential) { - initSeq(seq); - mMaxSeq = seq - 1; - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); -} - -RTPSink::Source::~Source() { -} - -void RTPSink::Source::initSeq(uint16_t seq) { - mMaxSeq = seq; - mCycles = 0; - mBaseSeq = seq; - mBadSeq = kRTPSeqMod + 1; - mReceived = 0; - mExpectedPrior = 0; - mReceivedPrior = 0; -} - -bool RTPSink::Source::updateSeq(uint16_t seq, const sp<ABuffer> &buffer) { - uint16_t udelta = seq - mMaxSeq; - - if (mProbation) { - // Startup phase - - if (seq == mMaxSeq + 1) { - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - - --mProbation; - mMaxSeq = seq; - if (mProbation == 0) { - initSeq(seq); - ++mReceived; - - return true; - } - } else { - // Packet out of sequence, restart startup phase - - mProbation = kMinSequential - 1; - mMaxSeq = seq; - -#if 0 - mPackets.clear(); - mTotalBytesQueued = 0; - ALOGI("XXX cleared packets"); -#endif - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - } - - return false; - } - - if (udelta < kMaxDropout) { - // In order, with permissible gap. - - if (seq < mMaxSeq) { - // Sequence number wrapped - count another 64K cycle - mCycles += kRTPSeqMod; - } - - mMaxSeq = seq; - } else if (udelta <= kRTPSeqMod - kMaxMisorder) { - // The sequence number made a very large jump - - if (seq == mBadSeq) { - // Two sequential packets -- assume that the other side - // restarted without telling us so just re-sync - // (i.e. pretend this was the first packet) - - initSeq(seq); - } else { - mBadSeq = (seq + 1) & (kRTPSeqMod - 1); - - return false; - } - } else { - // Duplicate or reordered packet. - } - - ++mReceived; - - buffer->setInt32Data(mCycles | seq); - queuePacket(buffer); - - return true; -} - -void RTPSink::Source::queuePacket(const sp<ABuffer> &buffer) { - sp<AMessage> msg = mQueueBufferMsg->dup(); - msg->setBuffer("buffer", buffer); - msg->post(); -} - -void RTPSink::Source::addReportBlock( - uint32_t ssrc, const sp<ABuffer> &buf) { - uint32_t extMaxSeq = mMaxSeq | mCycles; - uint32_t expected = extMaxSeq - mBaseSeq + 1; - - int64_t lost = (int64_t)expected - (int64_t)mReceived; - if (lost > 0x7fffff) { - lost = 0x7fffff; - } else if (lost < -0x800000) { - lost = -0x800000; - } - - uint32_t expectedInterval = expected - mExpectedPrior; - mExpectedPrior = expected; - - uint32_t receivedInterval = mReceived - mReceivedPrior; - mReceivedPrior = mReceived; - - int64_t lostInterval = expectedInterval - receivedInterval; - - uint8_t fractionLost; - if (expectedInterval == 0 || lostInterval <=0) { - fractionLost = 0; - } else { - fractionLost = (lostInterval << 8) / expectedInterval; - } - - uint8_t *ptr = buf->data() + buf->size(); - - ptr[0] = ssrc >> 24; - ptr[1] = (ssrc >> 16) & 0xff; - ptr[2] = (ssrc >> 8) & 0xff; - ptr[3] = ssrc & 0xff; - - ptr[4] = fractionLost; - - ptr[5] = (lost >> 16) & 0xff; - ptr[6] = (lost >> 8) & 0xff; - ptr[7] = lost & 0xff; - - ptr[8] = extMaxSeq >> 24; - ptr[9] = (extMaxSeq >> 16) & 0xff; - ptr[10] = (extMaxSeq >> 8) & 0xff; - ptr[11] = extMaxSeq & 0xff; - - // XXX TODO: - - ptr[12] = 0x00; // interarrival jitter - ptr[13] = 0x00; - ptr[14] = 0x00; - ptr[15] = 0x00; - - ptr[16] = 0x00; // last SR - ptr[17] = 0x00; - ptr[18] = 0x00; - ptr[19] = 0x00; - - ptr[20] = 0x00; // delay since last SR - ptr[21] = 0x00; - ptr[22] = 0x00; - ptr[23] = 0x00; -} - -//////////////////////////////////////////////////////////////////////////////// - -RTPSink::RTPSink( - const sp<ANetworkSession> &netSession, - const sp<ISurfaceTexture> &surfaceTex) - : mNetSession(netSession), - mSurfaceTex(surfaceTex), - mRTPPort(0), - mRTPSessionID(0), - mRTCPSessionID(0), - mFirstArrivalTimeUs(-1ll), - mNumPacketsReceived(0ll), - mRegression(1000), - mMaxDelayMs(-1ll) { -} - -RTPSink::~RTPSink() { - if (mRTCPSessionID != 0) { - mNetSession->destroySession(mRTCPSessionID); - } - - if (mRTPSessionID != 0) { - mNetSession->destroySession(mRTPSessionID); - } -} - -status_t RTPSink::init(bool useTCPInterleaving) { - if (useTCPInterleaving) { - return OK; - } - - int clientRtp; - - sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id()); - sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - for (clientRtp = 15550;; clientRtp += 2) { - int32_t rtpSession; - status_t err = mNetSession->createUDPSession( - clientRtp, rtpNotify, &rtpSession); - - if (err != OK) { - ALOGI("failed to create RTP socket on port %d", clientRtp); - continue; - } - - int32_t rtcpSession; - err = mNetSession->createUDPSession( - clientRtp + 1, rtcpNotify, &rtcpSession); - - if (err == OK) { - mRTPPort = clientRtp; - mRTPSessionID = rtpSession; - mRTCPSessionID = rtcpSession; - break; - } - - ALOGI("failed to create RTCP socket on port %d", clientRtp + 1); - mNetSession->destroySession(rtpSession); - } - - if (mRTPPort == 0) { - return UNKNOWN_ERROR; - } - - return OK; -} - -int32_t RTPSink::getRTPPort() const { - return mRTPPort; -} - -void RTPSink::onMessageReceived(const sp<AMessage> &msg) { - switch (msg->what()) { - case kWhatRTPNotify: - case kWhatRTCPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - if (sessionID == mRTPSessionID) { - mRTPSessionID = 0; - } else if (sessionID == mRTCPSessionID) { - mRTCPSessionID = 0; - } - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp<ABuffer> data; - CHECK(msg->findBuffer("data", &data)); - - status_t err; - if (msg->what() == kWhatRTPNotify) { - err = parseRTP(data); - } else { - err = parseRTCP(data); - } - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatSendRR: - { - onSendRR(); - break; - } - - case kWhatPacketLost: - { - onPacketLost(msg); - break; - } - - case kWhatInject: - { - int32_t isRTP; - CHECK(msg->findInt32("isRTP", &isRTP)); - - sp<ABuffer> buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - status_t err; - if (isRTP) { - err = parseRTP(buffer); - } else { - err = parseRTCP(buffer); - } - break; - } - - default: - TRESPASS(); - } -} - -status_t RTPSink::injectPacket(bool isRTP, const sp<ABuffer> &buffer) { - sp<AMessage> msg = new AMessage(kWhatInject, id()); - msg->setInt32("isRTP", isRTP); - msg->setBuffer("buffer", buffer); - msg->post(); - - return OK; -} - -status_t RTPSink::parseRTP(const sp<ABuffer> &buffer) { - size_t size = buffer->size(); - if (size < 12) { - // Too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - const uint8_t *data = buffer->data(); - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - int numCSRCs = data[0] & 0x0f; - - size_t payloadOffset = 12 + 4 * numCSRCs; - - if (size < payloadOffset) { - // Not enough data to fit the basic header and all the CSRC entries. - return ERROR_MALFORMED; - } - - if (data[0] & 0x10) { - // Header eXtension present. - - if (size < payloadOffset + 4) { - // Not enough data to fit the basic header, all CSRC entries - // and the first 4 bytes of the extension header. - - return ERROR_MALFORMED; - } - - const uint8_t *extensionData = &data[payloadOffset]; - - size_t extensionLength = - 4 * (extensionData[2] << 8 | extensionData[3]); - - if (size < payloadOffset + 4 + extensionLength) { - return ERROR_MALFORMED; - } - - payloadOffset += 4 + extensionLength; - } - - uint32_t srcId = U32_AT(&data[8]); - uint32_t rtpTime = U32_AT(&data[4]); - uint16_t seqNo = U16_AT(&data[2]); - - int64_t arrivalTimeUs; - CHECK(buffer->meta()->findInt64("arrivalTimeUs", &arrivalTimeUs)); - - if (mFirstArrivalTimeUs < 0ll) { - mFirstArrivalTimeUs = arrivalTimeUs; - } - arrivalTimeUs -= mFirstArrivalTimeUs; - - int64_t arrivalTimeMedia = (arrivalTimeUs * 9ll) / 100ll; - - ALOGV("seqNo: %d, SSRC 0x%08x, diff %lld", - seqNo, srcId, rtpTime - arrivalTimeMedia); - - mRegression.addPoint((float)rtpTime, (float)arrivalTimeMedia); - - ++mNumPacketsReceived; - - float n1, n2, b; - if (mRegression.approxLine(&n1, &n2, &b)) { - ALOGV("Line %lld: %.2f %.2f %.2f, slope %.2f", - mNumPacketsReceived, n1, n2, b, -n1 / n2); - - float expectedArrivalTimeMedia = (b - n1 * (float)rtpTime) / n2; - float latenessMs = (arrivalTimeMedia - expectedArrivalTimeMedia) / 90.0; - - if (mMaxDelayMs < 0ll || latenessMs > mMaxDelayMs) { - mMaxDelayMs = latenessMs; - ALOGI("packet was %.2f ms late", latenessMs); - } - } - - sp<AMessage> meta = buffer->meta(); - meta->setInt32("ssrc", srcId); - meta->setInt32("rtp-time", rtpTime); - meta->setInt32("PT", data[1] & 0x7f); - meta->setInt32("M", data[1] >> 7); - - buffer->setRange(payloadOffset, size - payloadOffset); - - ssize_t index = mSources.indexOfKey(srcId); - if (index < 0) { - if (mRenderer == NULL) { - sp<AMessage> notifyLost = new AMessage(kWhatPacketLost, id()); - notifyLost->setInt32("ssrc", srcId); - - mRenderer = new TunnelRenderer(notifyLost, mSurfaceTex); - looper()->registerHandler(mRenderer); - } - - sp<AMessage> queueBufferMsg = - new AMessage(TunnelRenderer::kWhatQueueBuffer, mRenderer->id()); - - sp<Source> source = new Source(seqNo, buffer, queueBufferMsg); - mSources.add(srcId, source); - } else { - mSources.valueAt(index)->updateSeq(seqNo, buffer); - } - - return OK; -} - -status_t RTPSink::parseRTCP(const sp<ABuffer> &buffer) { - const uint8_t *data = buffer->data(); - size_t size = buffer->size(); - - while (size > 0) { - if (size < 8) { - // Too short to be a valid RTCP header - return ERROR_MALFORMED; - } - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4; - - if (size < headerLength) { - // Only received a partial packet? - return ERROR_MALFORMED; - } - - switch (data[1]) { - case 200: - { - parseSR(data, headerLength); - break; - } - - case 201: // RR - case 202: // SDES - case 204: // APP - break; - - case 205: // TSFB (transport layer specific feedback) - case 206: // PSFB (payload specific feedback) - // hexdump(data, headerLength); - break; - - case 203: - { - parseBYE(data, headerLength); - break; - } - - default: - { - ALOGW("Unknown RTCP packet type %u of size %d", - (unsigned)data[1], headerLength); - break; - } - } - - data += headerLength; - size -= headerLength; - } - - return OK; -} - -status_t RTPSink::parseBYE(const uint8_t *data, size_t size) { - size_t SC = data[0] & 0x3f; - - if (SC == 0 || size < (4 + SC * 4)) { - // Packet too short for the minimal BYE header. - return ERROR_MALFORMED; - } - - uint32_t id = U32_AT(&data[4]); - - return OK; -} - -status_t RTPSink::parseSR(const uint8_t *data, size_t size) { - size_t RC = data[0] & 0x1f; - - if (size < (7 + RC * 6) * 4) { - // Packet too short for the minimal SR header. - return ERROR_MALFORMED; - } - - uint32_t id = U32_AT(&data[4]); - uint64_t ntpTime = U64_AT(&data[8]); - uint32_t rtpTime = U32_AT(&data[16]); - - ALOGV("SR: ssrc 0x%08x, ntpTime 0x%016llx, rtpTime 0x%08x", - id, ntpTime, rtpTime); - - return OK; -} - -status_t RTPSink::connect( - const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort) { - ALOGI("connecting RTP/RTCP sockets to %s:{%d,%d}", - host, remoteRtpPort, remoteRtcpPort); - - status_t err = - mNetSession->connectUDPSession(mRTPSessionID, host, remoteRtpPort); - - if (err != OK) { - return err; - } - - err = mNetSession->connectUDPSession(mRTCPSessionID, host, remoteRtcpPort); - - if (err != OK) { - return err; - } - -#if 0 - sp<ABuffer> buf = new ABuffer(1500); - memset(buf->data(), 0, buf->size()); - - mNetSession->sendRequest( - mRTPSessionID, buf->data(), buf->size()); - - mNetSession->sendRequest( - mRTCPSessionID, buf->data(), buf->size()); -#endif - - scheduleSendRR(); - - return OK; -} - -void RTPSink::scheduleSendRR() { - (new AMessage(kWhatSendRR, id()))->post(2000000ll); -} - -void RTPSink::addSDES(const sp<ABuffer> &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - data[0] = 0x80 | 1; - data[1] = 202; // SDES - data[4] = 0xde; // SSRC - data[5] = 0xad; - data[6] = 0xbe; - data[7] = 0xef; - - size_t offset = 8; - - data[offset++] = 1; // CNAME - - AString cname = "stagefright@somewhere"; - data[offset++] = cname.size(); - - memcpy(&data[offset], cname.c_str(), cname.size()); - offset += cname.size(); - - data[offset++] = 6; // TOOL - - AString tool = "stagefright/1.0"; - data[offset++] = tool.size(); - - memcpy(&data[offset], tool.c_str(), tool.size()); - offset += tool.size(); - - data[offset++] = 0; - - if ((offset % 4) > 0) { - size_t count = 4 - (offset % 4); - switch (count) { - case 3: - data[offset++] = 0; - case 2: - data[offset++] = 0; - case 1: - data[offset++] = 0; - } - } - - size_t numWords = (offset / 4) - 1; - data[2] = numWords >> 8; - data[3] = numWords & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + offset); -} - -void RTPSink::onSendRR() { - sp<ABuffer> buf = new ABuffer(1500); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 0; - ptr[1] = 201; // RR - ptr[2] = 0; - ptr[3] = 1; - ptr[4] = 0xde; // SSRC - ptr[5] = 0xad; - ptr[6] = 0xbe; - ptr[7] = 0xef; - - buf->setRange(0, 8); - - size_t numReportBlocks = 0; - for (size_t i = 0; i < mSources.size(); ++i) { - uint32_t ssrc = mSources.keyAt(i); - sp<Source> source = mSources.valueAt(i); - - if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) { - // Cannot fit another report block. - break; - } - - source->addReportBlock(ssrc, buf); - ++numReportBlocks; - } - - ptr[0] |= numReportBlocks; // 5 bit - - size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks; - ptr[2] = sizeInWordsMinus1 >> 8; - ptr[3] = sizeInWordsMinus1 & 0xff; - - buf->setRange(0, (sizeInWordsMinus1 + 1) * 4); - - addSDES(buf); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); - - scheduleSendRR(); -} - -void RTPSink::onPacketLost(const sp<AMessage> &msg) { - uint32_t srcId; - CHECK(msg->findInt32("ssrc", (int32_t *)&srcId)); - - int32_t seqNo; - CHECK(msg->findInt32("seqNo", &seqNo)); - - int32_t blp = 0; - - sp<ABuffer> buf = new ABuffer(1500); - buf->setRange(0, 0); - - uint8_t *ptr = buf->data(); - ptr[0] = 0x80 | 1; // generic NACK - ptr[1] = 205; // RTPFB - ptr[2] = 0; - ptr[3] = 3; - ptr[4] = 0xde; // sender SSRC - ptr[5] = 0xad; - ptr[6] = 0xbe; - ptr[7] = 0xef; - ptr[8] = (srcId >> 24) & 0xff; - ptr[9] = (srcId >> 16) & 0xff; - ptr[10] = (srcId >> 8) & 0xff; - ptr[11] = (srcId & 0xff); - ptr[12] = (seqNo >> 8) & 0xff; - ptr[13] = (seqNo & 0xff); - ptr[14] = (blp >> 8) & 0xff; - ptr[15] = (blp & 0xff); - - buf->setRange(0, 16); - - mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size()); -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h deleted file mode 100644 index a1d127d..0000000 --- a/media/libstagefright/wifi-display/sink/RTPSink.h +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef RTP_SINK_H_ - -#define RTP_SINK_H_ - -#include <media/stagefright/foundation/AHandler.h> - -#include "LinearRegression.h" - -#include <gui/Surface.h> - -namespace android { - -struct ABuffer; -struct ANetworkSession; -struct TunnelRenderer; - -// Creates a pair of sockets for RTP/RTCP traffic, instantiates a renderer -// for incoming transport stream data and occasionally sends statistics over -// the RTCP channel. -struct RTPSink : public AHandler { - RTPSink(const sp<ANetworkSession> &netSession, - const sp<ISurfaceTexture> &surfaceTex); - - // If TCP interleaving is used, no UDP sockets are created, instead - // incoming RTP/RTCP packets (arriving on the RTSP control connection) - // are manually injected by WifiDisplaySink. - status_t init(bool useTCPInterleaving); - - status_t connect( - const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort); - - int32_t getRTPPort() const; - - status_t injectPacket(bool isRTP, const sp<ABuffer> &buffer); - -protected: - virtual void onMessageReceived(const sp<AMessage> &msg); - virtual ~RTPSink(); - -private: - enum { - kWhatRTPNotify, - kWhatRTCPNotify, - kWhatSendRR, - kWhatPacketLost, - kWhatInject, - }; - - struct Source; - struct StreamSource; - - sp<ANetworkSession> mNetSession; - sp<ISurfaceTexture> mSurfaceTex; - KeyedVector<uint32_t, sp<Source> > mSources; - - int32_t mRTPPort; - int32_t mRTPSessionID; - int32_t mRTCPSessionID; - - int64_t mFirstArrivalTimeUs; - int64_t mNumPacketsReceived; - LinearRegression mRegression; - int64_t mMaxDelayMs; - - sp<TunnelRenderer> mRenderer; - - status_t parseRTP(const sp<ABuffer> &buffer); - status_t parseRTCP(const sp<ABuffer> &buffer); - status_t parseBYE(const uint8_t *data, size_t size); - status_t parseSR(const uint8_t *data, size_t size); - - void addSDES(const sp<ABuffer> &buffer); - void onSendRR(); - void onPacketLost(const sp<AMessage> &msg); - void scheduleSendRR(); - - DISALLOW_EVIL_CONSTRUCTORS(RTPSink); -}; - -} // namespace android - -#endif // RTP_SINK_H_ diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp deleted file mode 100644 index bc35aef..0000000 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp +++ /dev/null @@ -1,396 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "TunnelRenderer" -#include <utils/Log.h> - -#include "TunnelRenderer.h" - -#include "ATSParser.h" - -#include <binder/IMemory.h> -#include <binder/IServiceManager.h> -#include <gui/SurfaceComposerClient.h> -#include <media/IMediaPlayerService.h> -#include <media/IStreamSource.h> -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AMessage.h> -#include <ui/DisplayInfo.h> - -namespace android { - -struct TunnelRenderer::PlayerClient : public BnMediaPlayerClient { - PlayerClient() {} - - virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) { - ALOGI("notify %d, %d, %d", msg, ext1, ext2); - } - -protected: - virtual ~PlayerClient() {} - -private: - DISALLOW_EVIL_CONSTRUCTORS(PlayerClient); -}; - -struct TunnelRenderer::StreamSource : public BnStreamSource { - StreamSource(TunnelRenderer *owner); - - virtual void setListener(const sp<IStreamListener> &listener); - virtual void setBuffers(const Vector<sp<IMemory> > &buffers); - - virtual void onBufferAvailable(size_t index); - - virtual uint32_t flags() const; - - void doSomeWork(); - -protected: - virtual ~StreamSource(); - -private: - mutable Mutex mLock; - - TunnelRenderer *mOwner; - - sp<IStreamListener> mListener; - - Vector<sp<IMemory> > mBuffers; - List<size_t> mIndicesAvailable; - - size_t mNumDeqeued; - - DISALLOW_EVIL_CONSTRUCTORS(StreamSource); -}; - -//////////////////////////////////////////////////////////////////////////////// - -TunnelRenderer::StreamSource::StreamSource(TunnelRenderer *owner) - : mOwner(owner), - mNumDeqeued(0) { -} - -TunnelRenderer::StreamSource::~StreamSource() { -} - -void TunnelRenderer::StreamSource::setListener( - const sp<IStreamListener> &listener) { - mListener = listener; -} - -void TunnelRenderer::StreamSource::setBuffers( - const Vector<sp<IMemory> > &buffers) { - mBuffers = buffers; -} - -void TunnelRenderer::StreamSource::onBufferAvailable(size_t index) { - CHECK_LT(index, mBuffers.size()); - - { - Mutex::Autolock autoLock(mLock); - mIndicesAvailable.push_back(index); - } - - doSomeWork(); -} - -uint32_t TunnelRenderer::StreamSource::flags() const { - return kFlagAlignedVideoData; -} - -void TunnelRenderer::StreamSource::doSomeWork() { - Mutex::Autolock autoLock(mLock); - - while (!mIndicesAvailable.empty()) { - sp<ABuffer> srcBuffer = mOwner->dequeueBuffer(); - if (srcBuffer == NULL) { - break; - } - - ++mNumDeqeued; - - if (mNumDeqeued == 1) { - ALOGI("fixing real time now."); - - sp<AMessage> extra = new AMessage; - - extra->setInt32( - IStreamListener::kKeyDiscontinuityMask, - ATSParser::DISCONTINUITY_ABSOLUTE_TIME); - - extra->setInt64("timeUs", ALooper::GetNowUs()); - - mListener->issueCommand( - IStreamListener::DISCONTINUITY, - false /* synchronous */, - extra); - } - - ALOGV("dequeue TS packet of size %d", srcBuffer->size()); - - size_t index = *mIndicesAvailable.begin(); - mIndicesAvailable.erase(mIndicesAvailable.begin()); - - sp<IMemory> mem = mBuffers.itemAt(index); - CHECK_LE(srcBuffer->size(), mem->size()); - CHECK_EQ((srcBuffer->size() % 188), 0u); - - memcpy(mem->pointer(), srcBuffer->data(), srcBuffer->size()); - mListener->queueBuffer(index, srcBuffer->size()); - } -} - -//////////////////////////////////////////////////////////////////////////////// - -TunnelRenderer::TunnelRenderer( - const sp<AMessage> ¬ifyLost, - const sp<ISurfaceTexture> &surfaceTex) - : mNotifyLost(notifyLost), - mSurfaceTex(surfaceTex), - mTotalBytesQueued(0ll), - mLastDequeuedExtSeqNo(-1), - mFirstFailedAttemptUs(-1ll), - mRequestedRetransmission(false) { -} - -TunnelRenderer::~TunnelRenderer() { - destroyPlayer(); -} - -void TunnelRenderer::queueBuffer(const sp<ABuffer> &buffer) { - Mutex::Autolock autoLock(mLock); - - mTotalBytesQueued += buffer->size(); - - if (mPackets.empty()) { - mPackets.push_back(buffer); - return; - } - - int32_t newExtendedSeqNo = buffer->int32Data(); - - List<sp<ABuffer> >::iterator firstIt = mPackets.begin(); - List<sp<ABuffer> >::iterator it = --mPackets.end(); - for (;;) { - int32_t extendedSeqNo = (*it)->int32Data(); - - if (extendedSeqNo == newExtendedSeqNo) { - // Duplicate packet. - return; - } - - if (extendedSeqNo < newExtendedSeqNo) { - // Insert new packet after the one at "it". - mPackets.insert(++it, buffer); - return; - } - - if (it == firstIt) { - // Insert new packet before the first existing one. - mPackets.insert(it, buffer); - return; - } - - --it; - } -} - -sp<ABuffer> TunnelRenderer::dequeueBuffer() { - Mutex::Autolock autoLock(mLock); - - sp<ABuffer> buffer; - int32_t extSeqNo; - while (!mPackets.empty()) { - buffer = *mPackets.begin(); - extSeqNo = buffer->int32Data(); - - if (mLastDequeuedExtSeqNo < 0 || extSeqNo > mLastDequeuedExtSeqNo) { - break; - } - - // This is a retransmission of a packet we've already returned. - - mTotalBytesQueued -= buffer->size(); - buffer.clear(); - extSeqNo = -1; - - mPackets.erase(mPackets.begin()); - } - - if (mPackets.empty()) { - if (mFirstFailedAttemptUs < 0ll) { - mFirstFailedAttemptUs = ALooper::GetNowUs(); - mRequestedRetransmission = false; - } else { - ALOGV("no packets available for %.2f secs", - (ALooper::GetNowUs() - mFirstFailedAttemptUs) / 1E6); - } - - return NULL; - } - - if (mLastDequeuedExtSeqNo < 0 || extSeqNo == mLastDequeuedExtSeqNo + 1) { - if (mRequestedRetransmission) { - ALOGI("Recovered after requesting retransmission of %d", - extSeqNo); - } - - mLastDequeuedExtSeqNo = extSeqNo; - mFirstFailedAttemptUs = -1ll; - mRequestedRetransmission = false; - - mPackets.erase(mPackets.begin()); - - mTotalBytesQueued -= buffer->size(); - - return buffer; - } - - if (mFirstFailedAttemptUs < 0ll) { - mFirstFailedAttemptUs = ALooper::GetNowUs(); - - ALOGI("failed to get the correct packet the first time."); - return NULL; - } - - if (mFirstFailedAttemptUs + 50000ll > ALooper::GetNowUs()) { - // We're willing to wait a little while to get the right packet. - - if (!mRequestedRetransmission) { - ALOGI("requesting retransmission of seqNo %d", - (mLastDequeuedExtSeqNo + 1) & 0xffff); - - sp<AMessage> notify = mNotifyLost->dup(); - notify->setInt32("seqNo", (mLastDequeuedExtSeqNo + 1) & 0xffff); - notify->post(); - - mRequestedRetransmission = true; - } else { - ALOGI("still waiting for the correct packet to arrive."); - } - - return NULL; - } - - ALOGI("dropping packet. extSeqNo %d didn't arrive in time", - mLastDequeuedExtSeqNo + 1); - - // Permanent failure, we never received the packet. - mLastDequeuedExtSeqNo = extSeqNo; - mFirstFailedAttemptUs = -1ll; - mRequestedRetransmission = false; - - mTotalBytesQueued -= buffer->size(); - - mPackets.erase(mPackets.begin()); - - return buffer; -} - -void TunnelRenderer::onMessageReceived(const sp<AMessage> &msg) { - switch (msg->what()) { - case kWhatQueueBuffer: - { - sp<ABuffer> buffer; - CHECK(msg->findBuffer("buffer", &buffer)); - - queueBuffer(buffer); - - if (mStreamSource == NULL) { - if (mTotalBytesQueued > 0ll) { - initPlayer(); - } else { - ALOGI("Have %lld bytes queued...", mTotalBytesQueued); - } - } else { - mStreamSource->doSomeWork(); - } - break; - } - - default: - TRESPASS(); - } -} - -void TunnelRenderer::initPlayer() { - if (mSurfaceTex == NULL) { - mComposerClient = new SurfaceComposerClient; - CHECK_EQ(mComposerClient->initCheck(), (status_t)OK); - - DisplayInfo info; - SurfaceComposerClient::getDisplayInfo(0, &info); - ssize_t displayWidth = info.w; - ssize_t displayHeight = info.h; - - mSurfaceControl = - mComposerClient->createSurface( - String8("A Surface"), - displayWidth, - displayHeight, - PIXEL_FORMAT_RGB_565, - 0); - - CHECK(mSurfaceControl != NULL); - CHECK(mSurfaceControl->isValid()); - - SurfaceComposerClient::openGlobalTransaction(); - CHECK_EQ(mSurfaceControl->setLayer(INT_MAX), (status_t)OK); - CHECK_EQ(mSurfaceControl->show(), (status_t)OK); - SurfaceComposerClient::closeGlobalTransaction(); - - mSurface = mSurfaceControl->getSurface(); - CHECK(mSurface != NULL); - } - - sp<IServiceManager> sm = defaultServiceManager(); - sp<IBinder> binder = sm->getService(String16("media.player")); - sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder); - CHECK(service.get() != NULL); - - mStreamSource = new StreamSource(this); - - mPlayerClient = new PlayerClient; - - mPlayer = service->create(getpid(), mPlayerClient, 0); - CHECK(mPlayer != NULL); - CHECK_EQ(mPlayer->setDataSource(mStreamSource), (status_t)OK); - - mPlayer->setVideoSurfaceTexture( - mSurfaceTex != NULL ? mSurfaceTex : mSurface->getSurfaceTexture()); - - mPlayer->start(); -} - -void TunnelRenderer::destroyPlayer() { - mStreamSource.clear(); - - mPlayer->stop(); - mPlayer.clear(); - - if (mSurfaceTex == NULL) { - mSurface.clear(); - mSurfaceControl.clear(); - - mComposerClient->dispose(); - mComposerClient.clear(); - } -} - -} // namespace android - diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.h b/media/libstagefright/wifi-display/sink/TunnelRenderer.h deleted file mode 100644 index c9597e0..0000000 --- a/media/libstagefright/wifi-display/sink/TunnelRenderer.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef TUNNEL_RENDERER_H_ - -#define TUNNEL_RENDERER_H_ - -#include <gui/Surface.h> -#include <media/stagefright/foundation/AHandler.h> - -namespace android { - -struct ABuffer; -struct SurfaceComposerClient; -struct SurfaceControl; -struct Surface; -struct IMediaPlayer; -struct IStreamListener; - -// This class reassembles incoming RTP packets into the correct order -// and sends the resulting transport stream to a mediaplayer instance -// for playback. -struct TunnelRenderer : public AHandler { - TunnelRenderer( - const sp<AMessage> ¬ifyLost, - const sp<ISurfaceTexture> &surfaceTex); - - sp<ABuffer> dequeueBuffer(); - - enum { - kWhatQueueBuffer, - }; - -protected: - virtual void onMessageReceived(const sp<AMessage> &msg); - virtual ~TunnelRenderer(); - -private: - struct PlayerClient; - struct StreamSource; - - mutable Mutex mLock; - - sp<AMessage> mNotifyLost; - sp<ISurfaceTexture> mSurfaceTex; - - List<sp<ABuffer> > mPackets; - int64_t mTotalBytesQueued; - - sp<SurfaceComposerClient> mComposerClient; - sp<SurfaceControl> mSurfaceControl; - sp<Surface> mSurface; - sp<PlayerClient> mPlayerClient; - sp<IMediaPlayer> mPlayer; - sp<StreamSource> mStreamSource; - - int32_t mLastDequeuedExtSeqNo; - int64_t mFirstFailedAttemptUs; - bool mRequestedRetransmission; - - void initPlayer(); - void destroyPlayer(); - - void queueBuffer(const sp<ABuffer> &buffer); - - DISALLOW_EVIL_CONSTRUCTORS(TunnelRenderer); -}; - -} // namespace android - -#endif // TUNNEL_RENDERER_H_ diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp deleted file mode 100644 index fcd20d4..0000000 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp +++ /dev/null @@ -1,644 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "WifiDisplaySink" -#include <utils/Log.h> - -#include "WifiDisplaySink.h" -#include "ParsedMessage.h" -#include "RTPSink.h" - -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/MediaErrors.h> - -namespace android { - -WifiDisplaySink::WifiDisplaySink( - const sp<ANetworkSession> &netSession, - const sp<ISurfaceTexture> &surfaceTex) - : mState(UNDEFINED), - mNetSession(netSession), - mSurfaceTex(surfaceTex), - mSessionID(0), - mNextCSeq(1) { -} - -WifiDisplaySink::~WifiDisplaySink() { -} - -void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) { - sp<AMessage> msg = new AMessage(kWhatStart, id()); - msg->setString("sourceHost", sourceHost); - msg->setInt32("sourcePort", sourcePort); - msg->post(); -} - -void WifiDisplaySink::start(const char *uri) { - sp<AMessage> msg = new AMessage(kWhatStart, id()); - msg->setString("setupURI", uri); - msg->post(); -} - -// static -bool WifiDisplaySink::ParseURL( - const char *url, AString *host, int32_t *port, AString *path, - AString *user, AString *pass) { - host->clear(); - *port = 0; - path->clear(); - user->clear(); - pass->clear(); - - if (strncasecmp("rtsp://", url, 7)) { - return false; - } - - const char *slashPos = strchr(&url[7], '/'); - - if (slashPos == NULL) { - host->setTo(&url[7]); - path->setTo("/"); - } else { - host->setTo(&url[7], slashPos - &url[7]); - path->setTo(slashPos); - } - - ssize_t atPos = host->find("@"); - - if (atPos >= 0) { - // Split of user:pass@ from hostname. - - AString userPass(*host, 0, atPos); - host->erase(0, atPos + 1); - - ssize_t colonPos = userPass.find(":"); - - if (colonPos < 0) { - *user = userPass; - } else { - user->setTo(userPass, 0, colonPos); - pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1); - } - } - - const char *colonPos = strchr(host->c_str(), ':'); - - if (colonPos != NULL) { - char *end; - unsigned long x = strtoul(colonPos + 1, &end, 10); - - if (end == colonPos + 1 || *end != '\0' || x >= 65536) { - return false; - } - - *port = x; - - size_t colonOffset = colonPos - host->c_str(); - size_t trailing = host->size() - colonOffset; - host->erase(colonOffset, trailing); - } else { - *port = 554; - } - - return true; -} - -void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) { - switch (msg->what()) { - case kWhatStart: - { - int32_t sourcePort; - - if (msg->findString("setupURI", &mSetupURI)) { - AString path, user, pass; - CHECK(ParseURL( - mSetupURI.c_str(), - &mRTSPHost, &sourcePort, &path, &user, &pass) - && user.empty() && pass.empty()); - } else { - CHECK(msg->findString("sourceHost", &mRTSPHost)); - CHECK(msg->findInt32("sourcePort", &sourcePort)); - } - - sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id()); - - status_t err = mNetSession->createRTSPClient( - mRTSPHost.c_str(), sourcePort, notify, &mSessionID); - CHECK_EQ(err, (status_t)OK); - - mState = CONNECTING; - break; - } - - case kWhatRTSPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - if (sessionID == mSessionID) { - ALOGI("Lost control connection."); - - // The control connection is dead now. - mNetSession->destroySession(mSessionID); - mSessionID = 0; - - looper()->stop(); - } - break; - } - - case ANetworkSession::kWhatConnected: - { - ALOGI("We're now connected."); - mState = CONNECTED; - - if (!mSetupURI.empty()) { - status_t err = - sendDescribe(mSessionID, mSetupURI.c_str()); - - CHECK_EQ(err, (status_t)OK); - } - break; - } - - case ANetworkSession::kWhatData: - { - onReceiveClientData(msg); - break; - } - - case ANetworkSession::kWhatBinaryData: - { - CHECK(sUseTCPInterleaving); - - int32_t channel; - CHECK(msg->findInt32("channel", &channel)); - - sp<ABuffer> data; - CHECK(msg->findBuffer("data", &data)); - - mRTPSink->injectPacket(channel == 0 /* isRTP */, data); - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatStop: - { - looper()->stop(); - break; - } - - default: - TRESPASS(); - } -} - -void WifiDisplaySink::registerResponseHandler( - int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) { - ResponseID id; - id.mSessionID = sessionID; - id.mCSeq = cseq; - mResponseHandlers.add(id, func); -} - -status_t WifiDisplaySink::sendM2(int32_t sessionID) { - AString request = "OPTIONS * RTSP/1.0\r\n"; - AppendCommonResponse(&request, mNextCSeq); - - request.append( - "Require: org.wfa.wfd1.0\r\n" - "\r\n"); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::onReceiveM2Response( - int32_t sessionID, const sp<ParsedMessage> &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - return OK; -} - -status_t WifiDisplaySink::onReceiveDescribeResponse( - int32_t sessionID, const sp<ParsedMessage> &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - return sendSetup(sessionID, mSetupURI.c_str()); -} - -status_t WifiDisplaySink::onReceiveSetupResponse( - int32_t sessionID, const sp<ParsedMessage> &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - if (!msg->findString("session", &mPlaybackSessionID)) { - return ERROR_MALFORMED; - } - - if (!ParsedMessage::GetInt32Attribute( - mPlaybackSessionID.c_str(), - "timeout", - &mPlaybackSessionTimeoutSecs)) { - mPlaybackSessionTimeoutSecs = -1; - } - - ssize_t colonPos = mPlaybackSessionID.find(";"); - if (colonPos >= 0) { - // Strip any options from the returned session id. - mPlaybackSessionID.erase( - colonPos, mPlaybackSessionID.size() - colonPos); - } - - status_t err = configureTransport(msg); - - if (err != OK) { - return err; - } - - mState = PAUSED; - - return sendPlay( - sessionID, - !mSetupURI.empty() - ? mSetupURI.c_str() : "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); -} - -status_t WifiDisplaySink::configureTransport(const sp<ParsedMessage> &msg) { - if (sUseTCPInterleaving) { - return OK; - } - - AString transport; - if (!msg->findString("transport", &transport)) { - ALOGE("Missing 'transport' field in SETUP response."); - return ERROR_MALFORMED; - } - - AString sourceHost; - if (!ParsedMessage::GetAttribute( - transport.c_str(), "source", &sourceHost)) { - sourceHost = mRTSPHost; - } - - AString serverPortStr; - if (!ParsedMessage::GetAttribute( - transport.c_str(), "server_port", &serverPortStr)) { - ALOGE("Missing 'server_port' in Transport field."); - return ERROR_MALFORMED; - } - - int rtpPort, rtcpPort; - if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2 - || rtpPort <= 0 || rtpPort > 65535 - || rtcpPort <=0 || rtcpPort > 65535 - || rtcpPort != rtpPort + 1) { - ALOGE("Invalid server_port description '%s'.", - serverPortStr.c_str()); - - return ERROR_MALFORMED; - } - - if (rtpPort & 1) { - ALOGW("Server picked an odd numbered RTP port."); - } - - return mRTPSink->connect(sourceHost.c_str(), rtpPort, rtcpPort); -} - -status_t WifiDisplaySink::onReceivePlayResponse( - int32_t sessionID, const sp<ParsedMessage> &msg) { - int32_t statusCode; - if (!msg->getStatusCode(&statusCode)) { - return ERROR_MALFORMED; - } - - if (statusCode != 200) { - return ERROR_UNSUPPORTED; - } - - mState = PLAYING; - - return OK; -} - -void WifiDisplaySink::onReceiveClientData(const sp<AMessage> &msg) { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp<RefBase> obj; - CHECK(msg->findObject("data", &obj)); - - sp<ParsedMessage> data = - static_cast<ParsedMessage *>(obj.get()); - - ALOGV("session %d received '%s'", - sessionID, data->debugString().c_str()); - - AString method; - AString uri; - data->getRequestField(0, &method); - - int32_t cseq; - if (!data->findInt32("cseq", &cseq)) { - sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */); - return; - } - - if (method.startsWith("RTSP/")) { - // This is a response. - - ResponseID id; - id.mSessionID = sessionID; - id.mCSeq = cseq; - - ssize_t index = mResponseHandlers.indexOfKey(id); - - if (index < 0) { - ALOGW("Received unsolicited server response, cseq %d", cseq); - return; - } - - HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index); - mResponseHandlers.removeItemsAt(index); - - status_t err = (this->*func)(sessionID, data); - CHECK_EQ(err, (status_t)OK); - } else { - AString version; - data->getRequestField(2, &version); - if (!(version == AString("RTSP/1.0"))) { - sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq); - return; - } - - if (method == "OPTIONS") { - onOptionsRequest(sessionID, cseq, data); - } else if (method == "GET_PARAMETER") { - onGetParameterRequest(sessionID, cseq, data); - } else if (method == "SET_PARAMETER") { - onSetParameterRequest(sessionID, cseq, data); - } else { - sendErrorResponse(sessionID, "405 Method Not Allowed", cseq); - } - } -} - -void WifiDisplaySink::onOptionsRequest( - int32_t sessionID, - int32_t cseq, - const sp<ParsedMessage> &data) { - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n"); - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); - - err = sendM2(sessionID); - CHECK_EQ(err, (status_t)OK); -} - -void WifiDisplaySink::onGetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp<ParsedMessage> &data) { - AString body = - "wfd_video_formats: xxx\r\n" - "wfd_audio_codecs: xxx\r\n" - "wfd_client_rtp_ports: RTP/AVP/UDP;unicast xxx 0 mode=play\r\n"; - - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("Content-Type: text/parameters\r\n"); - response.append(StringPrintf("Content-Length: %d\r\n", body.size())); - response.append("\r\n"); - response.append(body); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -status_t WifiDisplaySink::sendDescribe(int32_t sessionID, const char *uri) { - uri = "rtsp://xwgntvx.is.livestream-api.com/livestreamiphone/wgntv"; - uri = "rtsp://v2.cache6.c.youtube.com/video.3gp?cid=e101d4bf280055f9&fmt=18"; - - AString request = StringPrintf("DESCRIBE %s RTSP/1.0\r\n", uri); - AppendCommonResponse(&request, mNextCSeq); - - request.append("Accept: application/sdp\r\n"); - request.append("\r\n"); - - status_t err = mNetSession->sendRequest( - sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveDescribeResponse); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) { - mRTPSink = new RTPSink(mNetSession, mSurfaceTex); - looper()->registerHandler(mRTPSink); - - status_t err = mRTPSink->init(sUseTCPInterleaving); - - if (err != OK) { - looper()->unregisterHandler(mRTPSink->id()); - mRTPSink.clear(); - return err; - } - - AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri); - - AppendCommonResponse(&request, mNextCSeq); - - if (sUseTCPInterleaving) { - request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n"); - } else { - int32_t rtpPort = mRTPSink->getRTPPort(); - - request.append( - StringPrintf( - "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n", - rtpPort, rtpPort + 1)); - } - - request.append("\r\n"); - - ALOGV("request = '%s'", request.c_str()); - - err = mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse); - - ++mNextCSeq; - - return OK; -} - -status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) { - AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri); - - AppendCommonResponse(&request, mNextCSeq); - - request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str())); - request.append("\r\n"); - - status_t err = - mNetSession->sendRequest(sessionID, request.c_str(), request.size()); - - if (err != OK) { - return err; - } - - registerResponseHandler( - sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse); - - ++mNextCSeq; - - return OK; -} - -void WifiDisplaySink::onSetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp<ParsedMessage> &data) { - const char *content = data->getContent(); - - if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) { - status_t err = - sendSetup( - sessionID, - "rtsp://x.x.x.x:x/wfd1.0/streamid=0"); - - CHECK_EQ(err, (status_t)OK); - } - - AString response = "RTSP/1.0 200 OK\r\n"; - AppendCommonResponse(&response, cseq); - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -void WifiDisplaySink::sendErrorResponse( - int32_t sessionID, - const char *errorDetail, - int32_t cseq) { - AString response; - response.append("RTSP/1.0 "); - response.append(errorDetail); - response.append("\r\n"); - - AppendCommonResponse(&response, cseq); - - response.append("\r\n"); - - status_t err = mNetSession->sendRequest(sessionID, response.c_str()); - CHECK_EQ(err, (status_t)OK); -} - -// static -void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) { - time_t now = time(NULL); - struct tm *now2 = gmtime(&now); - char buf[128]; - strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2); - - response->append("Date: "); - response->append(buf); - response->append("\r\n"); - - response->append("User-Agent: stagefright/1.1 (Linux;Android 4.1)\r\n"); - - if (cseq >= 0) { - response->append(StringPrintf("CSeq: %d\r\n", cseq)); - } -} - -} // namespace android diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h deleted file mode 100644 index f886ee5..0000000 --- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef WIFI_DISPLAY_SINK_H_ - -#define WIFI_DISPLAY_SINK_H_ - -#include "ANetworkSession.h" - -#include <gui/Surface.h> -#include <media/stagefright/foundation/AHandler.h> - -namespace android { - -struct ParsedMessage; -struct RTPSink; - -// Represents the RTSP client acting as a wifi display sink. -// Connects to a wifi display source and renders the incoming -// transport stream using a MediaPlayer instance. -struct WifiDisplaySink : public AHandler { - WifiDisplaySink( - const sp<ANetworkSession> &netSession, - const sp<ISurfaceTexture> &surfaceTex = NULL); - - void start(const char *sourceHost, int32_t sourcePort); - void start(const char *uri); - -protected: - virtual ~WifiDisplaySink(); - virtual void onMessageReceived(const sp<AMessage> &msg); - -private: - enum State { - UNDEFINED, - CONNECTING, - CONNECTED, - PAUSED, - PLAYING, - }; - - enum { - kWhatStart, - kWhatRTSPNotify, - kWhatStop, - }; - - struct ResponseID { - int32_t mSessionID; - int32_t mCSeq; - - bool operator<(const ResponseID &other) const { - return mSessionID < other.mSessionID - || (mSessionID == other.mSessionID - && mCSeq < other.mCSeq); - } - }; - - typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)( - int32_t sessionID, const sp<ParsedMessage> &msg); - - static const bool sUseTCPInterleaving = false; - - State mState; - sp<ANetworkSession> mNetSession; - sp<ISurfaceTexture> mSurfaceTex; - AString mSetupURI; - AString mRTSPHost; - int32_t mSessionID; - - int32_t mNextCSeq; - - KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers; - - sp<RTPSink> mRTPSink; - AString mPlaybackSessionID; - int32_t mPlaybackSessionTimeoutSecs; - - status_t sendM2(int32_t sessionID); - status_t sendDescribe(int32_t sessionID, const char *uri); - status_t sendSetup(int32_t sessionID, const char *uri); - status_t sendPlay(int32_t sessionID, const char *uri); - - status_t onReceiveM2Response( - int32_t sessionID, const sp<ParsedMessage> &msg); - - status_t onReceiveDescribeResponse( - int32_t sessionID, const sp<ParsedMessage> &msg); - - status_t onReceiveSetupResponse( - int32_t sessionID, const sp<ParsedMessage> &msg); - - status_t configureTransport(const sp<ParsedMessage> &msg); - - status_t onReceivePlayResponse( - int32_t sessionID, const sp<ParsedMessage> &msg); - - void registerResponseHandler( - int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func); - - void onReceiveClientData(const sp<AMessage> &msg); - - void onOptionsRequest( - int32_t sessionID, - int32_t cseq, - const sp<ParsedMessage> &data); - - void onGetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp<ParsedMessage> &data); - - void onSetParameterRequest( - int32_t sessionID, - int32_t cseq, - const sp<ParsedMessage> &data); - - void sendErrorResponse( - int32_t sessionID, - const char *errorDetail, - int32_t cseq); - - static void AppendCommonResponse(AString *response, int32_t cseq); - - bool ParseURL( - const char *url, AString *host, int32_t *port, AString *path, - AString *user, AString *pass); - - DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink); -}; - -} // namespace android - -#endif // WIFI_DISPLAY_SINK_H_ diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 7a87444..5344623 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -23,7 +23,7 @@ #include "MediaPuller.h" #include <cutils/properties.h> -#include <gui/SurfaceTextureClient.h> +#include <gui/Surface.h> #include <media/ICrypto.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> @@ -54,6 +54,8 @@ Converter::Converter( ,mFirstSilentFrameUs(-1ll) ,mInSilentMode(false) #endif + ,mPrevVideoBitrate(-1) + ,mNumFramesToDrop(0) { AString mime; CHECK(mInputFormat->findString("mime", &mime)); @@ -67,11 +69,45 @@ Converter::Converter( mInitCheck = initEncoder(); if (mInitCheck != OK) { - if (mEncoder != NULL) { - mEncoder->release(); - mEncoder.clear(); - } + releaseEncoder(); + } +} + +static void ReleaseMediaBufferReference(const sp<ABuffer> &accessUnit) { + void *mbuf; + if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) + && mbuf != NULL) { + ALOGV("releasing mbuf %p", mbuf); + + accessUnit->meta()->setPointer("mediaBuffer", NULL); + + static_cast<MediaBuffer *>(mbuf)->release(); + mbuf = NULL; + } +} + +void Converter::releaseEncoder() { + if (mEncoder == NULL) { + return; + } + + mEncoder->release(); + mEncoder.clear(); + + while (!mInputBufferQueue.empty()) { + sp<ABuffer> accessUnit = *mInputBufferQueue.begin(); + mInputBufferQueue.erase(mInputBufferQueue.begin()); + + ReleaseMediaBufferReference(accessUnit); } + + for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) { + sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i); + ReleaseMediaBufferReference(accessUnit); + } + + mEncoderInputBuffers.clear(); + mEncoderOutputBuffers.clear(); } Converter::~Converter() { @@ -99,7 +135,9 @@ bool Converter::needToManuallyPrependSPSPPS() const { return mNeedToManuallyPrependSPSPPS; } -static int32_t getBitrate(const char *propName, int32_t defaultValue) { +// static +int32_t Converter::GetInt32Property( + const char *propName, int32_t defaultValue) { char val[PROPERTY_VALUE_MAX]; if (property_get(propName, val, NULL)) { char *end; @@ -149,8 +187,9 @@ status_t Converter::initEncoder() { mOutputFormat->setString("mime", outputMIME.c_str()); - int32_t audioBitrate = getBitrate("media.wfd.audio-bitrate", 128000); - int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000); + int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000); + int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000); + mPrevVideoBitrate = videoBitrate; ALOGI("using audio bitrate of %d bps, video bitrate of %d bps", audioBitrate, videoBitrate); @@ -274,16 +313,7 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) { sp<ABuffer> accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); - void *mbuf; - if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) - && mbuf != NULL) { - ALOGV("releasing mbuf %p", mbuf); - - accessUnit->meta()->setPointer("mediaBuffer", NULL); - - static_cast<MediaBuffer *>(mbuf)->release(); - mbuf = NULL; - } + ReleaseMediaBufferReference(accessUnit); } break; } @@ -300,6 +330,13 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) { sp<ABuffer> accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); + if (mIsVideo && mNumFramesToDrop) { + --mNumFramesToDrop; + ALOGI("dropping frame."); + ReleaseMediaBufferReference(accessUnit); + break; + } + #if 0 void *mbuf; if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf) @@ -385,12 +422,9 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) { case kWhatShutdown: { - ALOGI("shutting down encoder"); + ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio"); - if (mEncoder != NULL) { - mEncoder->release(); - mEncoder.clear(); - } + releaseEncoder(); AString mime; CHECK(mInputFormat->findString("mime", &mime)); @@ -398,6 +432,12 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatDropAFrame: + { + ++mNumFramesToDrop; + break; + } + default: TRESPASS(); } @@ -609,6 +649,13 @@ status_t Converter::doMoreWork() { &bufferIndex, &offset, &size, &timeUs, &flags); if (err != OK) { + if (err == INFO_FORMAT_CHANGED) { + continue; + } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { + mEncoder->getOutputBuffers(&mEncoderOutputBuffers); + continue; + } + if (err == -EAGAIN) { err = OK; } @@ -654,4 +701,23 @@ void Converter::requestIDRFrame() { (new AMessage(kWhatRequestIDRFrame, id()))->post(); } +void Converter::dropAFrame() { + (new AMessage(kWhatDropAFrame, id()))->post(); +} + +int32_t Converter::getVideoBitrate() const { + return mPrevVideoBitrate; +} + +void Converter::setVideoBitrate(int32_t bitRate) { + if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) { + sp<AMessage> params = new AMessage; + params->setInt32("videoBitrate", bitRate); + + mEncoder->setParameters(params); + + mPrevVideoBitrate = bitRate; + } +} + } // namespace android diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 0665eea..ba297c4 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -51,6 +51,8 @@ struct Converter : public AHandler { void requestIDRFrame(); + void dropAFrame(); + enum { kWhatAccessUnit, kWhatEOS, @@ -63,10 +65,16 @@ struct Converter : public AHandler { kWhatShutdown, kWhatMediaPullerNotify, kWhatEncoderActivity, + kWhatDropAFrame, }; void shutdownAsync(); + int32_t getVideoBitrate() const; + void setVideoBitrate(int32_t bitrate); + + static int32_t GetInt32Property(const char *propName, int32_t defaultValue); + protected: virtual ~Converter(); virtual void onMessageReceived(const sp<AMessage> &msg); @@ -100,7 +108,12 @@ private: sp<ABuffer> mPartialAudioAU; + int32_t mPrevVideoBitrate; + + int32_t mNumFramesToDrop; + status_t initEncoder(); + void releaseEncoder(); status_t feedEncoderInputBuffers(); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 916f797..3d7b865 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -23,14 +23,11 @@ #include "Converter.h" #include "MediaPuller.h" #include "RepeaterSource.h" -#include "Sender.h" -#include "TSPacketizer.h" #include "include/avc_utils.h" #include "WifiDisplaySource.h" #include <binder/IServiceManager.h> -#include <gui/ISurfaceComposer.h> -#include <gui/SurfaceComposerClient.h> +#include <cutils/properties.h> #include <media/IHDCP.h> #include <media/stagefright/foundation/ABitReader.h> #include <media/stagefright/foundation/ABuffer.h> @@ -41,10 +38,9 @@ #include <media/stagefright/DataSource.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaErrors.h> -#include <media/stagefright/MediaExtractor.h> #include <media/stagefright/MediaSource.h> #include <media/stagefright/MetaData.h> -#include <media/stagefright/MPEG2TSWriter.h> +#include <media/stagefright/NuMediaExtractor.h> #include <media/stagefright/SurfaceMediaSource.h> #include <media/stagefright/Utils.h> @@ -63,15 +59,18 @@ struct WifiDisplaySource::PlaybackSession::Track : public AHandler { const sp<MediaPuller> &mediaPuller, const sp<Converter> &converter); + Track(const sp<AMessage> ¬ify, const sp<AMessage> &format); + void setRepeaterSource(const sp<RepeaterSource> &source); sp<AMessage> getFormat(); bool isAudio() const; const sp<Converter> &converter() const; - ssize_t packetizerTrackIndex() const; + const sp<RepeaterSource> &repeaterSource() const; - void setPacketizerTrackIndex(size_t index); + ssize_t mediaSenderTrackIndex() const; + void setMediaSenderTrackIndex(size_t index); status_t start(); void stopAsync(); @@ -110,8 +109,9 @@ private: sp<ALooper> mCodecLooper; sp<MediaPuller> mMediaPuller; sp<Converter> mConverter; + sp<AMessage> mFormat; bool mStarted; - ssize_t mPacketizerTrackIndex; + ssize_t mMediaSenderTrackIndex; bool mIsAudio; List<sp<ABuffer> > mQueuedAccessUnits; sp<RepeaterSource> mRepeaterSource; @@ -135,11 +135,19 @@ WifiDisplaySource::PlaybackSession::Track::Track( mMediaPuller(mediaPuller), mConverter(converter), mStarted(false), - mPacketizerTrackIndex(-1), mIsAudio(IsAudioFormat(mConverter->getOutputFormat())), mLastOutputBufferQueuedTimeUs(-1ll) { } +WifiDisplaySource::PlaybackSession::Track::Track( + const sp<AMessage> ¬ify, const sp<AMessage> &format) + : mNotify(notify), + mFormat(format), + mStarted(false), + mIsAudio(IsAudioFormat(format)), + mLastOutputBufferQueuedTimeUs(-1ll) { +} + WifiDisplaySource::PlaybackSession::Track::~Track() { CHECK(!mStarted); } @@ -154,7 +162,7 @@ bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat( } sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() { - return mConverter->getOutputFormat(); + return mFormat != NULL ? mFormat : mConverter->getOutputFormat(); } bool WifiDisplaySource::PlaybackSession::Track::isAudio() const { @@ -165,13 +173,19 @@ const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() cons return mConverter; } -ssize_t WifiDisplaySource::PlaybackSession::Track::packetizerTrackIndex() const { - return mPacketizerTrackIndex; +const sp<RepeaterSource> & +WifiDisplaySource::PlaybackSession::Track::repeaterSource() const { + return mRepeaterSource; +} + +ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const { + CHECK_GE(mMediaSenderTrackIndex, 0); + return mMediaSenderTrackIndex; } -void WifiDisplaySource::PlaybackSession::Track::setPacketizerTrackIndex(size_t index) { - CHECK_LT(mPacketizerTrackIndex, 0); - mPacketizerTrackIndex = index; +void WifiDisplaySource::PlaybackSession::Track::setMediaSenderTrackIndex( + size_t index) { + mMediaSenderTrackIndex = index; } status_t WifiDisplaySource::PlaybackSession::Track::start() { @@ -195,7 +209,9 @@ status_t WifiDisplaySource::PlaybackSession::Track::start() { void WifiDisplaySource::PlaybackSession::Track::stopAsync() { ALOGV("Track::stopAsync isAudio=%d", mIsAudio); - mConverter->shutdownAsync(); + if (mConverter != NULL) { + mConverter->shutdownAsync(); + } sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, id()); @@ -207,6 +223,7 @@ void WifiDisplaySource::PlaybackSession::Track::stopAsync() { mMediaPuller->stopAsync(msg); } else { + mStarted = false; msg->post(); } } @@ -330,45 +347,68 @@ WifiDisplaySource::PlaybackSession::PlaybackSession( const sp<ANetworkSession> &netSession, const sp<AMessage> ¬ify, const in_addr &interfaceAddr, - const sp<IHDCP> &hdcp) + const sp<IHDCP> &hdcp, + const char *path) : mNetSession(netSession), mNotify(notify), mInterfaceAddr(interfaceAddr), mHDCP(hdcp), + mLocalRTPPort(-1), mWeAreDead(false), mPaused(false), mLastLifesignUs(), mVideoTrackIndex(-1), mPrevTimeUs(-1ll), - mAllTracksHavePacketizerIndex(false) { + mPullExtractorPending(false), + mPullExtractorGeneration(0), + mFirstSampleTimeRealUs(-1ll), + mFirstSampleTimeUs(-1ll) { + if (path != NULL) { + mMediaPath.setTo(path); + } } status_t WifiDisplaySource::PlaybackSession::init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - Sender::TransportMode transportMode, - bool usePCMAudio) { - status_t err = setupPacketizer(usePCMAudio); + const char *clientIP, + int32_t clientRtp, + RTPSender::TransportMode rtpMode, + int32_t clientRtcp, + RTPSender::TransportMode rtcpMode, + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex) { + sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, id()); + mMediaSender = new MediaSender(mNetSession, notify); + looper()->registerHandler(mMediaSender); + + mMediaSender->setHDCP(mHDCP); + + status_t err = setupPacketizer( + enableAudio, + usePCMAudio, + enableVideo, + videoResolutionType, + videoResolutionIndex); - if (err != OK) { - return err; + if (err == OK) { + err = mMediaSender->initAsync( + -1 /* trackIndex */, + clientIP, + clientRtp, + rtpMode, + clientRtcp, + rtcpMode, + &mLocalRTPPort); } - sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); - mSender = new Sender(mNetSession, notify); - - mSenderLooper = new ALooper; - mSenderLooper->setName("sender_looper"); - - mSenderLooper->start( - false /* runOnCallingThread */, - false /* canCallJava */, - PRIORITY_AUDIO); - - mSenderLooper->registerHandler(mSender); + if (err != OK) { + mLocalRTPPort = -1; - err = mSender->init(clientIP, clientRtp, clientRtcp, transportMode); + looper()->unregisterHandler(mMediaSender->id()); + mMediaSender.clear(); - if (err != OK) { return err; } @@ -381,7 +421,7 @@ WifiDisplaySource::PlaybackSession::~PlaybackSession() { } int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const { - return mSender->getRTPPort(); + return mLocalRTPPort; } int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const { @@ -400,19 +440,7 @@ status_t WifiDisplaySource::PlaybackSession::play() { return OK; } -status_t WifiDisplaySource::PlaybackSession::finishPlay() { - // XXX Give the dongle a second to bind its sockets. - (new AMessage(kWhatFinishPlay, id()))->post(1000000ll); - return OK; -} - -status_t WifiDisplaySource::PlaybackSession::onFinishPlay() { - return mSender->finishInit(); -} - -status_t WifiDisplaySource::PlaybackSession::onFinishPlay2() { - mSender->scheduleSendSR(); - +status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() { for (size_t i = 0; i < mTracks.size(); ++i) { CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start()); } @@ -459,44 +487,18 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( CHECK(msg->findSize("trackIndex", &trackIndex)); if (what == Converter::kWhatAccessUnit) { - const sp<Track> &track = mTracks.valueFor(trackIndex); - - ssize_t packetizerTrackIndex = track->packetizerTrackIndex(); - - if (packetizerTrackIndex < 0) { - sp<AMessage> trackFormat = track->getFormat()->dup(); - if (mHDCP != NULL && !track->isAudio()) { - // HDCP2.0 _and_ HDCP 2.1 specs say to set the version - // inside the HDCP descriptor to 0x20!!! - trackFormat->setInt32("hdcp-version", 0x20); - } - packetizerTrackIndex = mPacketizer->addTrack(trackFormat); - - CHECK_GE(packetizerTrackIndex, 0); - - track->setPacketizerTrackIndex(packetizerTrackIndex); - - if (allTracksHavePacketizerIndex()) { - status_t err = packetizeQueuedAccessUnits(); - - if (err != OK) { - notifySessionDead(); - break; - } - } - } - sp<ABuffer> accessUnit; CHECK(msg->findBuffer("accessUnit", &accessUnit)); - if (!allTracksHavePacketizerIndex()) { - track->queueAccessUnit(accessUnit); - break; - } + const sp<Track> &track = mTracks.valueFor(trackIndex); - track->queueOutputBuffer(accessUnit); + status_t err = mMediaSender->queueAccessUnit( + track->mediaSenderTrackIndex(), + accessUnit); - drainAccessUnits(); + if (err != OK) { + notifySessionDead(); + } break; } else if (what == Converter::kWhatEOS) { CHECK_EQ(what, Converter::kWhatEOS); @@ -528,25 +530,38 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } - case kWhatSenderNotify: + case kWhatMediaSenderNotify: { int32_t what; CHECK(msg->findInt32("what", &what)); - if (what == Sender::kWhatInitDone) { - onFinishPlay2(); - } else if (what == Sender::kWhatSessionDead) { + if (what == MediaSender::kWhatInitDone) { + status_t err; + CHECK(msg->findInt32("err", &err)); + + if (err == OK) { + onMediaSenderInitialized(); + } else { + notifySessionDead(); + } + } else if (what == MediaSender::kWhatError) { notifySessionDead(); + } else if (what == MediaSender::kWhatNetworkStall) { + size_t numBytesQueued; + CHECK(msg->findSize("numBytesQueued", &numBytesQueued)); + + if (mVideoTrackIndex >= 0) { + const sp<Track> &videoTrack = + mTracks.valueFor(mVideoTrackIndex); + + sp<Converter> converter = videoTrack->converter(); + if (converter != NULL) { + converter->dropAFrame(); + } + } } else { TRESPASS(); } - - break; - } - - case kWhatFinishPlay: - { - onFinishPlay(); break; } @@ -571,11 +586,8 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } - mSenderLooper->unregisterHandler(mSender->id()); - mSender.clear(); - mSenderLooper.clear(); - - mPacketizer.clear(); + looper()->unregisterHandler(mMediaSender->id()); + mMediaSender.clear(); sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatSessionDestroyed); @@ -584,30 +596,14 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } - case kWhatPacketize: + case kWhatPause: { - size_t trackIndex; - CHECK(msg->findSize("trackIndex", &trackIndex)); - - sp<ABuffer> accessUnit; - CHECK(msg->findBuffer("accessUnit", &accessUnit)); - -#if 0 - if ((ssize_t)trackIndex == mVideoTrackIndex) { - int64_t nowUs = ALooper::GetNowUs(); - static int64_t prevNowUs = 0ll; - - ALOGI("sending AU, dNowUs=%lld us", nowUs - prevNowUs); - - prevNowUs = nowUs; + if (mExtractor != NULL) { + ++mPullExtractorGeneration; + mFirstSampleTimeRealUs = -1ll; + mFirstSampleTimeUs = -1ll; } -#endif - break; - } - - case kWhatPause: - { if (mPaused) { break; } @@ -622,6 +618,10 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( case kWhatResume: { + if (mExtractor != NULL) { + schedulePullExtractor(); + } + if (!mPaused) { break; } @@ -634,20 +634,177 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived( break; } + case kWhatPullExtractorSample: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mPullExtractorGeneration) { + break; + } + + mPullExtractorPending = false; + + onPullExtractor(); + break; + } + default: TRESPASS(); } } -status_t WifiDisplaySource::PlaybackSession::setupPacketizer(bool usePCMAudio) { - mPacketizer = new TSPacketizer; +status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( + bool enableAudio, bool enableVideo) { + DataSource::RegisterDefaultSniffers(); - status_t err = addVideoSource(); + mExtractor = new NuMediaExtractor; + + status_t err = mExtractor->setDataSource(mMediaPath.c_str()); if (err != OK) { return err; } + size_t n = mExtractor->countTracks(); + bool haveAudio = false; + bool haveVideo = false; + for (size_t i = 0; i < n; ++i) { + sp<AMessage> format; + err = mExtractor->getTrackFormat(i, &format); + + if (err != OK) { + continue; + } + + AString mime; + CHECK(format->findString("mime", &mime)); + + bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6); + bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); + + if (isAudio && enableAudio && !haveAudio) { + haveAudio = true; + } else if (isVideo && enableVideo && !haveVideo) { + haveVideo = true; + } else { + continue; + } + + err = mExtractor->selectTrack(i); + + size_t trackIndex = mTracks.size(); + + sp<AMessage> notify = new AMessage(kWhatTrackNotify, id()); + notify->setSize("trackIndex", trackIndex); + + sp<Track> track = new Track(notify, format); + looper()->registerHandler(track); + + mTracks.add(trackIndex, track); + + mExtractorTrackToInternalTrack.add(i, trackIndex); + + if (isVideo) { + mVideoTrackIndex = trackIndex; + } + + uint32_t flags = MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS; + + ssize_t mediaSenderTrackIndex = + mMediaSender->addTrack(format, flags); + CHECK_GE(mediaSenderTrackIndex, 0); + + track->setMediaSenderTrackIndex(mediaSenderTrackIndex); + + if ((haveAudio || !enableAudio) && (haveVideo || !enableVideo)) { + break; + } + } + + return OK; +} + +void WifiDisplaySource::PlaybackSession::schedulePullExtractor() { + if (mPullExtractorPending) { + return; + } + + int64_t sampleTimeUs; + status_t err = mExtractor->getSampleTime(&sampleTimeUs); + + int64_t nowUs = ALooper::GetNowUs(); + + if (mFirstSampleTimeRealUs < 0ll) { + mFirstSampleTimeRealUs = nowUs; + mFirstSampleTimeUs = sampleTimeUs; + } + + int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs; + + sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, id()); + msg->setInt32("generation", mPullExtractorGeneration); + msg->post(whenUs - nowUs); + + mPullExtractorPending = true; +} + +void WifiDisplaySource::PlaybackSession::onPullExtractor() { + sp<ABuffer> accessUnit = new ABuffer(1024 * 1024); + status_t err = mExtractor->readSampleData(accessUnit); + if (err != OK) { + // EOS. + return; + } + + int64_t timeUs; + CHECK_EQ((status_t)OK, mExtractor->getSampleTime(&timeUs)); + + accessUnit->meta()->setInt64( + "timeUs", mFirstSampleTimeRealUs + timeUs - mFirstSampleTimeUs); + + size_t trackIndex; + CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex)); + + sp<AMessage> msg = new AMessage(kWhatConverterNotify, id()); + + msg->setSize( + "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex)); + + msg->setInt32("what", Converter::kWhatAccessUnit); + msg->setBuffer("accessUnit", accessUnit); + msg->post(); + + mExtractor->advance(); + + schedulePullExtractor(); +} + +status_t WifiDisplaySource::PlaybackSession::setupPacketizer( + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex) { + CHECK(enableAudio || enableVideo); + + if (!mMediaPath.empty()) { + return setupMediaPacketizer(enableAudio, enableVideo); + } + + if (enableVideo) { + status_t err = addVideoSource( + videoResolutionType, videoResolutionIndex); + + if (err != OK) { + return err; + } + } + + if (!enableAudio) { + return OK; + } + return addAudioSource(usePCMAudio); } @@ -732,30 +889,44 @@ status_t WifiDisplaySource::PlaybackSession::addSource( mVideoTrackIndex = trackIndex; } + uint32_t flags = 0; + if (converter->needToManuallyPrependSPSPPS()) { + flags |= MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS; + } + + ssize_t mediaSenderTrackIndex = + mMediaSender->addTrack(converter->getOutputFormat(), flags); + CHECK_GE(mediaSenderTrackIndex, 0); + + track->setMediaSenderTrackIndex(mediaSenderTrackIndex); + return OK; } -status_t WifiDisplaySource::PlaybackSession::addVideoSource() { - sp<SurfaceMediaSource> source = new SurfaceMediaSource(width(), height()); +status_t WifiDisplaySource::PlaybackSession::addVideoSource( + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex) { + size_t width, height, framesPerSecond; + bool interlaced; + CHECK(VideoFormats::GetConfiguration( + videoResolutionType, + videoResolutionIndex, + &width, + &height, + &framesPerSecond, + &interlaced)); + + sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height); source->setUseAbsoluteTimestamps(); -#if 1 sp<RepeaterSource> videoSource = - new RepeaterSource(source, 30.0 /* rateHz */); -#endif + new RepeaterSource(source, framesPerSecond); -#if 1 size_t numInputBuffers; status_t err = addSource( true /* isVideo */, videoSource, true /* isRepeaterSource */, false /* usePCMAudio */, &numInputBuffers); -#else - size_t numInputBuffers; - status_t err = addSource( - true /* isVideo */, source, false /* isRepeaterSource */, - false /* usePCMAudio */, &numInputBuffers); -#endif if (err != OK) { return err; @@ -786,26 +957,10 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) { return OK; } -sp<ISurfaceTexture> WifiDisplaySource::PlaybackSession::getSurfaceTexture() { +sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() { return mBufferQueue; } -int32_t WifiDisplaySource::PlaybackSession::width() const { -#if USE_1080P - return 1920; -#else - return 1280; -#endif -} - -int32_t WifiDisplaySource::PlaybackSession::height() const { -#if USE_1080P - return 1080; -#else - return 720; -#endif -} - void WifiDisplaySource::PlaybackSession::requestIDRFrame() { for (size_t i = 0; i < mTracks.size(); ++i) { const sp<Track> &track = mTracks.valueAt(i); @@ -814,168 +969,6 @@ void WifiDisplaySource::PlaybackSession::requestIDRFrame() { } } -bool WifiDisplaySource::PlaybackSession::allTracksHavePacketizerIndex() { - if (mAllTracksHavePacketizerIndex) { - return true; - } - - for (size_t i = 0; i < mTracks.size(); ++i) { - if (mTracks.valueAt(i)->packetizerTrackIndex() < 0) { - return false; - } - } - - mAllTracksHavePacketizerIndex = true; - - return true; -} - -status_t WifiDisplaySource::PlaybackSession::packetizeAccessUnit( - size_t trackIndex, sp<ABuffer> accessUnit, - sp<ABuffer> *packets) { - const sp<Track> &track = mTracks.valueFor(trackIndex); - - uint32_t flags = 0; - - bool isHDCPEncrypted = false; - uint64_t inputCTR; - uint8_t HDCP_private_data[16]; - - bool manuallyPrependSPSPPS = - !track->isAudio() - && track->converter()->needToManuallyPrependSPSPPS() - && IsIDR(accessUnit); - - if (mHDCP != NULL && !track->isAudio()) { - isHDCPEncrypted = true; - - if (manuallyPrependSPSPPS) { - accessUnit = mPacketizer->prependCSD( - track->packetizerTrackIndex(), accessUnit); - } - - status_t err = mHDCP->encrypt( - accessUnit->data(), accessUnit->size(), - trackIndex /* streamCTR */, - &inputCTR, - accessUnit->data()); - - if (err != OK) { - ALOGE("Failed to HDCP-encrypt media data (err %d)", - err); - - return err; - } - - HDCP_private_data[0] = 0x00; - - HDCP_private_data[1] = - (((trackIndex >> 30) & 3) << 1) | 1; - - HDCP_private_data[2] = (trackIndex >> 22) & 0xff; - - HDCP_private_data[3] = - (((trackIndex >> 15) & 0x7f) << 1) | 1; - - HDCP_private_data[4] = (trackIndex >> 7) & 0xff; - - HDCP_private_data[5] = - ((trackIndex & 0x7f) << 1) | 1; - - HDCP_private_data[6] = 0x00; - - HDCP_private_data[7] = - (((inputCTR >> 60) & 0x0f) << 1) | 1; - - HDCP_private_data[8] = (inputCTR >> 52) & 0xff; - - HDCP_private_data[9] = - (((inputCTR >> 45) & 0x7f) << 1) | 1; - - HDCP_private_data[10] = (inputCTR >> 37) & 0xff; - - HDCP_private_data[11] = - (((inputCTR >> 30) & 0x7f) << 1) | 1; - - HDCP_private_data[12] = (inputCTR >> 22) & 0xff; - - HDCP_private_data[13] = - (((inputCTR >> 15) & 0x7f) << 1) | 1; - - HDCP_private_data[14] = (inputCTR >> 7) & 0xff; - - HDCP_private_data[15] = - ((inputCTR & 0x7f) << 1) | 1; - -#if 0 - ALOGI("HDCP_private_data:"); - hexdump(HDCP_private_data, sizeof(HDCP_private_data)); - - ABitReader br(HDCP_private_data, sizeof(HDCP_private_data)); - CHECK_EQ(br.getBits(13), 0); - CHECK_EQ(br.getBits(2), (trackIndex >> 30) & 3); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (trackIndex >> 15) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), trackIndex & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(11), 0); - CHECK_EQ(br.getBits(4), (inputCTR >> 60) & 0xf); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (inputCTR >> 45) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (inputCTR >> 30) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), (inputCTR >> 15) & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); - CHECK_EQ(br.getBits(15), inputCTR & 0x7fff); - CHECK_EQ(br.getBits(1), 1u); -#endif - - flags |= TSPacketizer::IS_ENCRYPTED; - } else if (manuallyPrependSPSPPS) { - flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES; - } - - int64_t timeUs = ALooper::GetNowUs(); - if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) { - flags |= TSPacketizer::EMIT_PCR; - flags |= TSPacketizer::EMIT_PAT_AND_PMT; - - mPrevTimeUs = timeUs; - } - - mPacketizer->packetize( - track->packetizerTrackIndex(), accessUnit, packets, flags, - !isHDCPEncrypted ? NULL : HDCP_private_data, - !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data), - track->isAudio() ? 2 : 0 /* numStuffingBytes */); - - return OK; -} - -status_t WifiDisplaySource::PlaybackSession::packetizeQueuedAccessUnits() { - for (;;) { - bool gotMoreData = false; - for (size_t i = 0; i < mTracks.size(); ++i) { - size_t trackIndex = mTracks.keyAt(i); - const sp<Track> &track = mTracks.valueAt(i); - - sp<ABuffer> accessUnit = track->dequeueAccessUnit(); - if (accessUnit != NULL) { - track->queueOutputBuffer(accessUnit); - gotMoreData = true; - } - } - - if (!gotMoreData) { - break; - } - } - - return OK; -} - void WifiDisplaySource::PlaybackSession::notifySessionDead() { // Inform WifiDisplaySource of our premature death (wish). sp<AMessage> notify = mNotify->dup(); @@ -985,78 +978,5 @@ void WifiDisplaySource::PlaybackSession::notifySessionDead() { mWeAreDead = true; } -void WifiDisplaySource::PlaybackSession::drainAccessUnits() { - ALOGV("audio/video has %d/%d buffers ready.", - mTracks.valueFor(1)->countQueuedOutputBuffers(), - mTracks.valueFor(0)->countQueuedOutputBuffers()); - - while (drainAccessUnit()) { - } -} - -bool WifiDisplaySource::PlaybackSession::drainAccessUnit() { - ssize_t minTrackIndex = -1; - int64_t minTimeUs = -1ll; - - for (size_t i = 0; i < mTracks.size(); ++i) { - const sp<Track> &track = mTracks.valueAt(i); - - int64_t timeUs; - if (track->hasOutputBuffer(&timeUs)) { - if (minTrackIndex < 0 || timeUs < minTimeUs) { - minTrackIndex = mTracks.keyAt(i); - minTimeUs = timeUs; - } - } -#if SUSPEND_VIDEO_IF_IDLE - else if (!track->isSuspended()) { - // We still consider this track "live", so it should keep - // delivering output data whose time stamps we'll have to - // consider for proper interleaving. - return false; - } -#else - else { - // We need access units available on all tracks to be able to - // dequeue the earliest one. - return false; - } -#endif - } - - if (minTrackIndex < 0) { - return false; - } - - const sp<Track> &track = mTracks.valueFor(minTrackIndex); - sp<ABuffer> accessUnit = track->dequeueOutputBuffer(); - - sp<ABuffer> packets; - status_t err = packetizeAccessUnit(minTrackIndex, accessUnit, &packets); - - if (err != OK) { - notifySessionDead(); - return false; - } - - if ((ssize_t)minTrackIndex == mVideoTrackIndex) { - packets->meta()->setInt32("isVideo", 1); - } - mSender->queuePackets(minTimeUs, packets); - -#if 0 - if (minTrackIndex == mVideoTrackIndex) { - int64_t nowUs = ALooper::GetNowUs(); - - // Latency from "data acquired" to "ready to send if we wanted to". - ALOGI("[%s] latencyUs = %lld ms", - minTrackIndex == mVideoTrackIndex ? "video" : "audio", - (nowUs - minTimeUs) / 1000ll); - } -#endif - - return true; -} - } // namespace android diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index b9d193b..39086a1 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -18,7 +18,8 @@ #define PLAYBACK_SESSION_H_ -#include "Sender.h" +#include "MediaSender.h" +#include "VideoFormats.h" #include "WifiDisplaySource.h" namespace android { @@ -26,10 +27,11 @@ namespace android { struct ABuffer; struct BufferQueue; struct IHDCP; -struct ISurfaceTexture; +struct IGraphicBufferProducer; struct MediaPuller; struct MediaSource; -struct TSPacketizer; +struct MediaSender; +struct NuMediaExtractor; // Encapsulates the state of an RTP/RTCP session in the context of wifi // display. @@ -38,12 +40,20 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { const sp<ANetworkSession> &netSession, const sp<AMessage> ¬ify, const struct in_addr &interfaceAddr, - const sp<IHDCP> &hdcp); + const sp<IHDCP> &hdcp, + const char *path = NULL); status_t init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - Sender::TransportMode transportMode, - bool usePCMAudio); + const char *clientIP, + int32_t clientRtp, + RTPSender::TransportMode rtpMode, + int32_t clientRtcp, + RTPSender::TransportMode rtcpMode, + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex); void destroyAsync(); @@ -56,9 +66,7 @@ struct WifiDisplaySource::PlaybackSession : public AHandler { status_t finishPlay(); status_t pause(); - sp<ISurfaceTexture> getSurfaceTexture(); - int32_t width() const; - int32_t height() const; + sp<IGraphicBufferProducer> getSurfaceTexture(); void requestIDRFrame(); @@ -80,26 +88,27 @@ private: kWhatMediaPullerNotify, kWhatConverterNotify, kWhatTrackNotify, - kWhatSenderNotify, kWhatUpdateSurface, - kWhatFinishPlay, - kWhatPacketize, kWhatPause, kWhatResume, + kWhatMediaSenderNotify, + kWhatPullExtractorSample, }; sp<ANetworkSession> mNetSession; - sp<Sender> mSender; - sp<ALooper> mSenderLooper; sp<AMessage> mNotify; in_addr mInterfaceAddr; sp<IHDCP> mHDCP; + AString mMediaPath; + + sp<MediaSender> mMediaSender; + int32_t mLocalRTPPort; + bool mWeAreDead; bool mPaused; int64_t mLastLifesignUs; - sp<TSPacketizer> mPacketizer; sp<BufferQueue> mBufferQueue; KeyedVector<size_t, sp<Track> > mTracks; @@ -107,9 +116,21 @@ private: int64_t mPrevTimeUs; - bool mAllTracksHavePacketizerIndex; + sp<NuMediaExtractor> mExtractor; + KeyedVector<size_t, size_t> mExtractorTrackToInternalTrack; + bool mPullExtractorPending; + int32_t mPullExtractorGeneration; + int64_t mFirstSampleTimeRealUs; + int64_t mFirstSampleTimeUs; - status_t setupPacketizer(bool usePCMAudio); + status_t setupMediaPacketizer(bool enableAudio, bool enableVideo); + + status_t setupPacketizer( + bool enableAudio, + bool usePCMAudio, + bool enableVideo, + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex); status_t addSource( bool isVideo, @@ -118,29 +139,20 @@ private: bool usePCMAudio, size_t *numInputBuffers); - status_t addVideoSource(); - status_t addAudioSource(bool usePCMAudio); - - ssize_t appendTSData( - const void *data, size_t size, bool timeDiscontinuity, bool flush); - - status_t onFinishPlay(); - status_t onFinishPlay2(); + status_t addVideoSource( + VideoFormats::ResolutionType videoResolutionType, + size_t videoResolutionIndex); - bool allTracksHavePacketizerIndex(); - - status_t packetizeAccessUnit( - size_t trackIndex, sp<ABuffer> accessUnit, - sp<ABuffer> *packets); + status_t addAudioSource(bool usePCMAudio); - status_t packetizeQueuedAccessUnits(); + status_t onMediaSenderInitialized(); void notifySessionDead(); - void drainAccessUnits(); + void schedulePullExtractor(); + void onPullExtractor(); - // Returns true iff an access unit was successfully drained. - bool drainAccessUnit(); + void onSinkFeedback(const sp<AMessage> &msg); DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession); }; diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp index 72be927..cc8dee3 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp +++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp @@ -27,6 +27,25 @@ RepeaterSource::~RepeaterSource() { CHECK(!mStarted); } +double RepeaterSource::getFrameRate() const { + return mRateHz; +} + +void RepeaterSource::setFrameRate(double rateHz) { + Mutex::Autolock autoLock(mLock); + + if (rateHz == mRateHz) { + return; + } + + if (mStartTimeUs >= 0ll) { + int64_t nextTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz; + mStartTimeUs = nextTimeUs; + mFrameCount = 0; + } + mRateHz = rateHz; +} + status_t RepeaterSource::start(MetaData *params) { CHECK(!mStarted); diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h index a13973c..8d414fd 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.h +++ b/media/libstagefright/wifi-display/source/RepeaterSource.h @@ -6,7 +6,7 @@ #include <media/stagefright/foundation/AHandlerReflector.h> #include <media/stagefright/MediaSource.h> -#define SUSPEND_VIDEO_IF_IDLE 1 +#define SUSPEND_VIDEO_IF_IDLE 0 namespace android { @@ -28,6 +28,9 @@ struct RepeaterSource : public MediaSource { // send updates in a while, this is its wakeup call. void wakeUp(); + double getFrameRate() const; + void setFrameRate(double rateHz); + protected: virtual ~RepeaterSource(); diff --git a/media/libstagefright/wifi-display/source/Sender.cpp b/media/libstagefright/wifi-display/source/Sender.cpp deleted file mode 100644 index 9048691..0000000 --- a/media/libstagefright/wifi-display/source/Sender.cpp +++ /dev/null @@ -1,870 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "Sender" -#include <utils/Log.h> - -#include "Sender.h" - -#include "ANetworkSession.h" -#include "TimeSeries.h" - -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/foundation/hexdump.h> -#include <media/stagefright/MediaErrors.h> -#include <media/stagefright/Utils.h> - -namespace android { - -static size_t kMaxRTPPacketSize = 1500; -static size_t kMaxNumTSPacketsPerRTPPacket = (kMaxRTPPacketSize - 12) / 188; - -Sender::Sender( - const sp<ANetworkSession> &netSession, - const sp<AMessage> ¬ify) - : mNetSession(netSession), - mNotify(notify), - mTransportMode(TRANSPORT_UDP), - mRTPChannel(0), - mRTCPChannel(0), - mRTPPort(0), - mRTPSessionID(0), - mRTCPSessionID(0), -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - mRTPRetransmissionSessionID(0), - mRTCPRetransmissionSessionID(0), -#endif - mClientRTPPort(0), - mClientRTCPPort(0), - mRTPConnected(false), - mRTCPConnected(false), - mFirstOutputBufferReadyTimeUs(-1ll), - mFirstOutputBufferSentTimeUs(-1ll), - mRTPSeqNo(0), -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - mRTPRetransmissionSeqNo(0), -#endif - mLastNTPTime(0), - mLastRTPTime(0), - mNumRTPSent(0), - mNumRTPOctetsSent(0), - mNumSRsSent(0), - mSendSRPending(false) -#if ENABLE_RETRANSMISSION - ,mHistoryLength(0) -#endif -#if TRACK_BANDWIDTH - ,mFirstPacketTimeUs(-1ll) - ,mTotalBytesSent(0ll) -#endif -#if LOG_TRANSPORT_STREAM - ,mLogFile(NULL) -#endif -{ -#if LOG_TRANSPORT_STREAM - mLogFile = fopen("/system/etc/log.ts", "wb"); -#endif -} - -Sender::~Sender() { -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - if (mRTCPRetransmissionSessionID != 0) { - mNetSession->destroySession(mRTCPRetransmissionSessionID); - } - - if (mRTPRetransmissionSessionID != 0) { - mNetSession->destroySession(mRTPRetransmissionSessionID); - } -#endif - - if (mRTCPSessionID != 0) { - mNetSession->destroySession(mRTCPSessionID); - } - - if (mRTPSessionID != 0) { - mNetSession->destroySession(mRTPSessionID); - } - -#if LOG_TRANSPORT_STREAM - if (mLogFile != NULL) { - fclose(mLogFile); - mLogFile = NULL; - } -#endif -} - -status_t Sender::init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - TransportMode transportMode) { - mClientIP = clientIP; - mTransportMode = transportMode; - - if (transportMode == TRANSPORT_TCP_INTERLEAVED) { - mRTPChannel = clientRtp; - mRTCPChannel = clientRtcp; - mRTPPort = 0; - mRTPSessionID = 0; - mRTCPSessionID = 0; - return OK; - } - - mRTPChannel = 0; - mRTCPChannel = 0; - - if (mTransportMode == TRANSPORT_TCP) { - // XXX This is wrong, we need to allocate sockets here, we only - // need to do this because the dongles are not establishing their - // end until after PLAY instead of before SETUP. - mRTPPort = 20000; - mRTPSessionID = 0; - mRTCPSessionID = 0; - mClientRTPPort = clientRtp; - mClientRTCPPort = clientRtcp; - return OK; - } - - int serverRtp; - - sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id()); - sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - sp<AMessage> rtpRetransmissionNotify = - new AMessage(kWhatRTPRetransmissionNotify, id()); - - sp<AMessage> rtcpRetransmissionNotify = - new AMessage(kWhatRTCPRetransmissionNotify, id()); -#endif - - status_t err; - for (serverRtp = 15550;; serverRtp += 2) { - int32_t rtpSession; - if (mTransportMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - serverRtp, clientIP, clientRtp, - rtpNotify, &rtpSession); - } else { - err = mNetSession->createTCPDatagramSession( - serverRtp, clientIP, clientRtp, - rtpNotify, &rtpSession); - } - - if (err != OK) { - ALOGI("failed to create RTP socket on port %d", serverRtp); - continue; - } - - int32_t rtcpSession = 0; - - if (clientRtcp >= 0) { - if (mTransportMode == TRANSPORT_UDP) { - err = mNetSession->createUDPSession( - serverRtp + 1, clientIP, clientRtcp, - rtcpNotify, &rtcpSession); - } else { - err = mNetSession->createTCPDatagramSession( - serverRtp + 1, clientIP, clientRtcp, - rtcpNotify, &rtcpSession); - } - - if (err != OK) { - ALOGI("failed to create RTCP socket on port %d", serverRtp + 1); - - mNetSession->destroySession(rtpSession); - continue; - } - } - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - if (mTransportMode == TRANSPORT_UDP) { - int32_t rtpRetransmissionSession; - - err = mNetSession->createUDPSession( - serverRtp + kRetransmissionPortOffset, - clientIP, - clientRtp + kRetransmissionPortOffset, - rtpRetransmissionNotify, - &rtpRetransmissionSession); - - if (err != OK) { - mNetSession->destroySession(rtcpSession); - mNetSession->destroySession(rtpSession); - continue; - } - - CHECK_GE(clientRtcp, 0); - - int32_t rtcpRetransmissionSession; - err = mNetSession->createUDPSession( - serverRtp + 1 + kRetransmissionPortOffset, - clientIP, - clientRtp + 1 + kRetransmissionPortOffset, - rtcpRetransmissionNotify, - &rtcpRetransmissionSession); - - if (err != OK) { - mNetSession->destroySession(rtpRetransmissionSession); - mNetSession->destroySession(rtcpSession); - mNetSession->destroySession(rtpSession); - continue; - } - - mRTPRetransmissionSessionID = rtpRetransmissionSession; - mRTCPRetransmissionSessionID = rtcpRetransmissionSession; - - ALOGI("rtpRetransmissionSessionID = %d, " - "rtcpRetransmissionSessionID = %d", - rtpRetransmissionSession, rtcpRetransmissionSession); - } -#endif - - mRTPPort = serverRtp; - mRTPSessionID = rtpSession; - mRTCPSessionID = rtcpSession; - - ALOGI("rtpSessionID = %d, rtcpSessionID = %d", rtpSession, rtcpSession); - break; - } - - if (mRTPPort == 0) { - return UNKNOWN_ERROR; - } - - return OK; -} - -status_t Sender::finishInit() { - if (mTransportMode != TRANSPORT_TCP) { - notifyInitDone(); - return OK; - } - - sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id()); - - status_t err = mNetSession->createTCPDatagramSession( - mRTPPort, mClientIP.c_str(), mClientRTPPort, - rtpNotify, &mRTPSessionID); - - if (err != OK) { - return err; - } - - if (mClientRTCPPort >= 0) { - sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id()); - - err = mNetSession->createTCPDatagramSession( - mRTPPort + 1, mClientIP.c_str(), mClientRTCPPort, - rtcpNotify, &mRTCPSessionID); - - if (err != OK) { - return err; - } - } - - return OK; -} - -int32_t Sender::getRTPPort() const { - return mRTPPort; -} - -void Sender::queuePackets( - int64_t timeUs, const sp<ABuffer> &tsPackets) { - const size_t numTSPackets = tsPackets->size() / 188; - - const size_t numRTPPackets = - (numTSPackets + kMaxNumTSPacketsPerRTPPacket - 1) - / kMaxNumTSPacketsPerRTPPacket; - - sp<ABuffer> udpPackets = new ABuffer( - numRTPPackets * (12 + kMaxNumTSPacketsPerRTPPacket * 188)); - - udpPackets->meta()->setInt64("timeUs", timeUs); - - size_t dstOffset = 0; - for (size_t i = 0; i < numTSPackets; ++i) { - if ((i % kMaxNumTSPacketsPerRTPPacket) == 0) { - static const bool kMarkerBit = false; - - uint8_t *rtp = udpPackets->data() + dstOffset; - rtp[0] = 0x80; - rtp[1] = 33 | (kMarkerBit ? (1 << 7) : 0); // M-bit - rtp[2] = (mRTPSeqNo >> 8) & 0xff; - rtp[3] = mRTPSeqNo & 0xff; - rtp[4] = 0x00; // rtp time to be filled in later. - rtp[5] = 0x00; - rtp[6] = 0x00; - rtp[7] = 0x00; - rtp[8] = kSourceID >> 24; - rtp[9] = (kSourceID >> 16) & 0xff; - rtp[10] = (kSourceID >> 8) & 0xff; - rtp[11] = kSourceID & 0xff; - - ++mRTPSeqNo; - - dstOffset += 12; - } - - memcpy(udpPackets->data() + dstOffset, - tsPackets->data() + 188 * i, - 188); - - dstOffset += 188; - } - - udpPackets->setRange(0, dstOffset); - - sp<AMessage> msg = new AMessage(kWhatDrainQueue, id()); - msg->setBuffer("udpPackets", udpPackets); - msg->post(); - -#if LOG_TRANSPORT_STREAM - if (mLogFile != NULL) { - fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile); - } -#endif -} - -void Sender::onMessageReceived(const sp<AMessage> &msg) { - switch (msg->what()) { - case kWhatRTPNotify: - case kWhatRTCPNotify: -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - case kWhatRTPRetransmissionNotify: - case kWhatRTCPRetransmissionNotify: -#endif - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - int32_t errorOccuredDuringSend; - CHECK(msg->findInt32("send", &errorOccuredDuringSend)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - if ((msg->what() == kWhatRTPNotify -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - || msg->what() == kWhatRTPRetransmissionNotify -#endif - ) && !errorOccuredDuringSend) { - // This is ok, we don't expect to receive anything on - // the RTP socket. - break; - } - - ALOGE("An error occurred during %s in session %d " - "(%d, '%s' (%s)).", - errorOccuredDuringSend ? "send" : "receive", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - - if (sessionID == mRTPSessionID) { - mRTPSessionID = 0; - } else if (sessionID == mRTCPSessionID) { - mRTCPSessionID = 0; - } -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - else if (sessionID == mRTPRetransmissionSessionID) { - mRTPRetransmissionSessionID = 0; - } else if (sessionID == mRTCPRetransmissionSessionID) { - mRTCPRetransmissionSessionID = 0; - } -#endif - - notifySessionDead(); - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp<ABuffer> data; - CHECK(msg->findBuffer("data", &data)); - - status_t err; - if (msg->what() == kWhatRTCPNotify -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - || msg->what() == kWhatRTCPRetransmissionNotify -#endif - ) - { - err = parseRTCP(data); - } - break; - } - - case ANetworkSession::kWhatConnected: - { - CHECK_EQ(mTransportMode, TRANSPORT_TCP); - - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - if (sessionID == mRTPSessionID) { - CHECK(!mRTPConnected); - mRTPConnected = true; - ALOGI("RTP Session now connected."); - } else if (sessionID == mRTCPSessionID) { - CHECK(!mRTCPConnected); - mRTCPConnected = true; - ALOGI("RTCP Session now connected."); - } else { - TRESPASS(); - } - - if (mRTPConnected - && (mClientRTCPPort < 0 || mRTCPConnected)) { - notifyInitDone(); - } - break; - } - - default: - TRESPASS(); - } - break; - } - - case kWhatDrainQueue: - { - sp<ABuffer> udpPackets; - CHECK(msg->findBuffer("udpPackets", &udpPackets)); - - onDrainQueue(udpPackets); - break; - } - - case kWhatSendSR: - { - mSendSRPending = false; - - if (mRTCPSessionID == 0) { - break; - } - - onSendSR(); - - scheduleSendSR(); - break; - } - } -} - -void Sender::scheduleSendSR() { - if (mSendSRPending || mRTCPSessionID == 0) { - return; - } - - mSendSRPending = true; - (new AMessage(kWhatSendSR, id()))->post(kSendSRIntervalUs); -} - -void Sender::addSR(const sp<ABuffer> &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - - // TODO: Use macros/utility functions to clean up all the bitshifts below. - - data[0] = 0x80 | 0; - data[1] = 200; // SR - data[2] = 0; - data[3] = 6; - data[4] = kSourceID >> 24; - data[5] = (kSourceID >> 16) & 0xff; - data[6] = (kSourceID >> 8) & 0xff; - data[7] = kSourceID & 0xff; - - data[8] = mLastNTPTime >> (64 - 8); - data[9] = (mLastNTPTime >> (64 - 16)) & 0xff; - data[10] = (mLastNTPTime >> (64 - 24)) & 0xff; - data[11] = (mLastNTPTime >> 32) & 0xff; - data[12] = (mLastNTPTime >> 24) & 0xff; - data[13] = (mLastNTPTime >> 16) & 0xff; - data[14] = (mLastNTPTime >> 8) & 0xff; - data[15] = mLastNTPTime & 0xff; - - data[16] = (mLastRTPTime >> 24) & 0xff; - data[17] = (mLastRTPTime >> 16) & 0xff; - data[18] = (mLastRTPTime >> 8) & 0xff; - data[19] = mLastRTPTime & 0xff; - - data[20] = mNumRTPSent >> 24; - data[21] = (mNumRTPSent >> 16) & 0xff; - data[22] = (mNumRTPSent >> 8) & 0xff; - data[23] = mNumRTPSent & 0xff; - - data[24] = mNumRTPOctetsSent >> 24; - data[25] = (mNumRTPOctetsSent >> 16) & 0xff; - data[26] = (mNumRTPOctetsSent >> 8) & 0xff; - data[27] = mNumRTPOctetsSent & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + 28); -} - -void Sender::addSDES(const sp<ABuffer> &buffer) { - uint8_t *data = buffer->data() + buffer->size(); - data[0] = 0x80 | 1; - data[1] = 202; // SDES - data[4] = kSourceID >> 24; - data[5] = (kSourceID >> 16) & 0xff; - data[6] = (kSourceID >> 8) & 0xff; - data[7] = kSourceID & 0xff; - - size_t offset = 8; - - data[offset++] = 1; // CNAME - - static const char *kCNAME = "someone@somewhere"; - data[offset++] = strlen(kCNAME); - - memcpy(&data[offset], kCNAME, strlen(kCNAME)); - offset += strlen(kCNAME); - - data[offset++] = 7; // NOTE - - static const char *kNOTE = "Hell's frozen over."; - data[offset++] = strlen(kNOTE); - - memcpy(&data[offset], kNOTE, strlen(kNOTE)); - offset += strlen(kNOTE); - - data[offset++] = 0; - - if ((offset % 4) > 0) { - size_t count = 4 - (offset % 4); - switch (count) { - case 3: - data[offset++] = 0; - case 2: - data[offset++] = 0; - case 1: - data[offset++] = 0; - } - } - - size_t numWords = (offset / 4) - 1; - data[2] = numWords >> 8; - data[3] = numWords & 0xff; - - buffer->setRange(buffer->offset(), buffer->size() + offset); -} - -// static -uint64_t Sender::GetNowNTP() { - uint64_t nowUs = ALooper::GetNowUs(); - - nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll; - - uint64_t hi = nowUs / 1000000ll; - uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll; - - return (hi << 32) | lo; -} - -void Sender::onSendSR() { - sp<ABuffer> buffer = new ABuffer(1500); - buffer->setRange(0, 0); - - addSR(buffer); - addSDES(buffer); - - if (mTransportMode == TRANSPORT_TCP_INTERLEAVED) { - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatBinaryData); - notify->setInt32("channel", mRTCPChannel); - notify->setBuffer("data", buffer); - notify->post(); - } else { - sendPacket(mRTCPSessionID, buffer->data(), buffer->size()); - } - - ++mNumSRsSent; -} - -#if ENABLE_RETRANSMISSION -status_t Sender::parseTSFB( - const uint8_t *data, size_t size) { - if ((data[0] & 0x1f) != 1) { - return ERROR_UNSUPPORTED; // We only support NACK for now. - } - - uint32_t srcId = U32_AT(&data[8]); - if (srcId != kSourceID) { - return ERROR_MALFORMED; - } - - for (size_t i = 12; i < size; i += 4) { - uint16_t seqNo = U16_AT(&data[i]); - uint16_t blp = U16_AT(&data[i + 2]); - - List<sp<ABuffer> >::iterator it = mHistory.begin(); - bool foundSeqNo = false; - while (it != mHistory.end()) { - const sp<ABuffer> &buffer = *it; - - uint16_t bufferSeqNo = buffer->int32Data() & 0xffff; - - bool retransmit = false; - if (bufferSeqNo == seqNo) { - retransmit = true; - } else if (blp != 0) { - for (size_t i = 0; i < 16; ++i) { - if ((blp & (1 << i)) - && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) { - blp &= ~(1 << i); - retransmit = true; - } - } - } - - if (retransmit) { - ALOGI("retransmitting seqNo %d", bufferSeqNo); - -#if RETRANSMISSION_ACCORDING_TO_RFC_XXXX - sp<ABuffer> retransRTP = new ABuffer(2 + buffer->size()); - uint8_t *rtp = retransRTP->data(); - memcpy(rtp, buffer->data(), 12); - rtp[2] = (mRTPRetransmissionSeqNo >> 8) & 0xff; - rtp[3] = mRTPRetransmissionSeqNo & 0xff; - rtp[12] = (bufferSeqNo >> 8) & 0xff; - rtp[13] = bufferSeqNo & 0xff; - memcpy(&rtp[14], buffer->data() + 12, buffer->size() - 12); - - ++mRTPRetransmissionSeqNo; - - sendPacket( - mRTPRetransmissionSessionID, - retransRTP->data(), retransRTP->size()); -#else - sendPacket( - mRTPSessionID, buffer->data(), buffer->size()); -#endif - - if (bufferSeqNo == seqNo) { - foundSeqNo = true; - } - - if (foundSeqNo && blp == 0) { - break; - } - } - - ++it; - } - - if (!foundSeqNo || blp != 0) { - ALOGI("Some sequence numbers were no longer available for " - "retransmission"); - } - } - - return OK; -} -#endif - -status_t Sender::parseRTCP( - const sp<ABuffer> &buffer) { - const uint8_t *data = buffer->data(); - size_t size = buffer->size(); - - while (size > 0) { - if (size < 8) { - // Too short to be a valid RTCP header - return ERROR_MALFORMED; - } - - if ((data[0] >> 6) != 2) { - // Unsupported version. - return ERROR_UNSUPPORTED; - } - - if (data[0] & 0x20) { - // Padding present. - - size_t paddingLength = data[size - 1]; - - if (paddingLength + 12 > size) { - // If we removed this much padding we'd end up with something - // that's too short to be a valid RTP header. - return ERROR_MALFORMED; - } - - size -= paddingLength; - } - - size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4; - - if (size < headerLength) { - // Only received a partial packet? - return ERROR_MALFORMED; - } - - switch (data[1]) { - case 200: - case 201: // RR - case 202: // SDES - case 203: - case 204: // APP - break; - -#if ENABLE_RETRANSMISSION - case 205: // TSFB (transport layer specific feedback) - parseTSFB(data, headerLength); - break; -#endif - - case 206: // PSFB (payload specific feedback) - hexdump(data, headerLength); - break; - - default: - { - ALOGW("Unknown RTCP packet type %u of size %d", - (unsigned)data[1], headerLength); - break; - } - } - - data += headerLength; - size -= headerLength; - } - - return OK; -} - -status_t Sender::sendPacket( - int32_t sessionID, const void *data, size_t size) { - return mNetSession->sendRequest(sessionID, data, size); -} - -void Sender::notifyInitDone() { - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatInitDone); - notify->post(); -} - -void Sender::notifySessionDead() { - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatSessionDead); - notify->post(); -} - -void Sender::onDrainQueue(const sp<ABuffer> &udpPackets) { - static const size_t kFullRTPPacketSize = - 12 + 188 * kMaxNumTSPacketsPerRTPPacket; - - size_t srcOffset = 0; - while (srcOffset < udpPackets->size()) { - uint8_t *rtp = udpPackets->data() + srcOffset; - - size_t rtpPacketSize = udpPackets->size() - srcOffset; - if (rtpPacketSize > kFullRTPPacketSize) { - rtpPacketSize = kFullRTPPacketSize; - } - - int64_t nowUs = ALooper::GetNowUs(); - mLastNTPTime = GetNowNTP(); - - // 90kHz time scale - uint32_t rtpTime = (nowUs * 9ll) / 100ll; - - rtp[4] = rtpTime >> 24; - rtp[5] = (rtpTime >> 16) & 0xff; - rtp[6] = (rtpTime >> 8) & 0xff; - rtp[7] = rtpTime & 0xff; - - ++mNumRTPSent; - mNumRTPOctetsSent += rtpPacketSize - 12; - - mLastRTPTime = rtpTime; - - if (mTransportMode == TRANSPORT_TCP_INTERLEAVED) { - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatBinaryData); - - sp<ABuffer> data = new ABuffer(rtpPacketSize); - memcpy(data->data(), rtp, rtpPacketSize); - - notify->setInt32("channel", mRTPChannel); - notify->setBuffer("data", data); - notify->post(); - } else { - sendPacket(mRTPSessionID, rtp, rtpPacketSize); - -#if TRACK_BANDWIDTH - mTotalBytesSent += rtpPacketSize->size(); - int64_t delayUs = ALooper::GetNowUs() - mFirstPacketTimeUs; - - if (delayUs > 0ll) { - ALOGI("approx. net bandwidth used: %.2f Mbit/sec", - mTotalBytesSent * 8.0 / delayUs); - } -#endif - } - -#if ENABLE_RETRANSMISSION - addToHistory(rtp, rtpPacketSize); -#endif - - srcOffset += rtpPacketSize; - } - -#if 0 - int64_t timeUs; - CHECK(udpPackets->meta()->findInt64("timeUs", &timeUs)); - - ALOGI("dTimeUs = %lld us", ALooper::GetNowUs() - timeUs); -#endif -} - -#if ENABLE_RETRANSMISSION -void Sender::addToHistory(const uint8_t *rtp, size_t rtpPacketSize) { - sp<ABuffer> packet = new ABuffer(rtpPacketSize); - memcpy(packet->data(), rtp, rtpPacketSize); - - unsigned rtpSeqNo = U16_AT(&rtp[2]); - packet->setInt32Data(rtpSeqNo); - - mHistory.push_back(packet); - ++mHistoryLength; - - if (mHistoryLength > kMaxHistoryLength) { - mHistory.erase(mHistory.begin()); - --mHistoryLength; - } -} -#endif - -} // namespace android - diff --git a/media/libstagefright/wifi-display/source/Sender.h b/media/libstagefright/wifi-display/source/Sender.h deleted file mode 100644 index 66951f7..0000000 --- a/media/libstagefright/wifi-display/source/Sender.h +++ /dev/null @@ -1,169 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef SENDER_H_ - -#define SENDER_H_ - -#include <media/stagefright/foundation/AHandler.h> - -namespace android { - -#define LOG_TRANSPORT_STREAM 0 -#define TRACK_BANDWIDTH 0 - -#define ENABLE_RETRANSMISSION 1 - -// If retransmission is enabled the following define determines what -// kind we support, if RETRANSMISSION_ACCORDING_TO_RFC_XXXX is 0 -// we'll send NACKs on the original RTCP channel and retransmit packets -// on the original RTP channel, otherwise a separate channel pair is used -// for this purpose. -#define RETRANSMISSION_ACCORDING_TO_RFC_XXXX 0 - -struct ABuffer; -struct ANetworkSession; - -struct Sender : public AHandler { - Sender(const sp<ANetworkSession> &netSession, const sp<AMessage> ¬ify); - - enum { - kWhatInitDone, - kWhatSessionDead, - kWhatBinaryData, - }; - - enum TransportMode { - TRANSPORT_UDP, - TRANSPORT_TCP_INTERLEAVED, - TRANSPORT_TCP, - }; - status_t init( - const char *clientIP, int32_t clientRtp, int32_t clientRtcp, - TransportMode transportMode); - - status_t finishInit(); - - int32_t getRTPPort() const; - - void queuePackets(int64_t timeUs, const sp<ABuffer> &tsPackets); - void scheduleSendSR(); - -protected: - virtual ~Sender(); - virtual void onMessageReceived(const sp<AMessage> &msg); - -private: - enum { - kWhatDrainQueue, - kWhatSendSR, - kWhatRTPNotify, - kWhatRTCPNotify, -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - kWhatRTPRetransmissionNotify, - kWhatRTCPRetransmissionNotify, -#endif - }; - - static const int64_t kSendSRIntervalUs = 10000000ll; - - static const uint32_t kSourceID = 0xdeadbeef; - static const size_t kMaxHistoryLength = 128; - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - static const size_t kRetransmissionPortOffset = 120; -#endif - - sp<ANetworkSession> mNetSession; - sp<AMessage> mNotify; - - TransportMode mTransportMode; - AString mClientIP; - - // in TCP mode - int32_t mRTPChannel; - int32_t mRTCPChannel; - - // in UDP mode - int32_t mRTPPort; - int32_t mRTPSessionID; - int32_t mRTCPSessionID; - -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - int32_t mRTPRetransmissionSessionID; - int32_t mRTCPRetransmissionSessionID; -#endif - - int32_t mClientRTPPort; - int32_t mClientRTCPPort; - bool mRTPConnected; - bool mRTCPConnected; - - int64_t mFirstOutputBufferReadyTimeUs; - int64_t mFirstOutputBufferSentTimeUs; - - uint32_t mRTPSeqNo; -#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX - uint32_t mRTPRetransmissionSeqNo; -#endif - - uint64_t mLastNTPTime; - uint32_t mLastRTPTime; - uint32_t mNumRTPSent; - uint32_t mNumRTPOctetsSent; - uint32_t mNumSRsSent; - - bool mSendSRPending; - -#if ENABLE_RETRANSMISSION - List<sp<ABuffer> > mHistory; - size_t mHistoryLength; -#endif - -#if TRACK_BANDWIDTH - int64_t mFirstPacketTimeUs; - uint64_t mTotalBytesSent; -#endif - -#if LOG_TRANSPORT_STREAM - FILE *mLogFile; -#endif - - void onSendSR(); - void addSR(const sp<ABuffer> &buffer); - void addSDES(const sp<ABuffer> &buffer); - static uint64_t GetNowNTP(); - -#if ENABLE_RETRANSMISSION - status_t parseTSFB(const uint8_t *data, size_t size); - void addToHistory(const uint8_t *rtp, size_t rtpPacketSize); -#endif - - status_t parseRTCP(const sp<ABuffer> &buffer); - - status_t sendPacket(int32_t sessionID, const void *data, size_t size); - - void notifyInitDone(); - void notifySessionDead(); - - void onDrainQueue(const sp<ABuffer> &udpPackets); - - DISALLOW_EVIL_CONSTRUCTORS(Sender); -}; - -} // namespace android - -#endif // SENDER_H_ diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp index ef57a4d..2c4a373 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp +++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp @@ -58,6 +58,7 @@ struct TSPacketizer::Track : public RefBase { sp<ABuffer> descriptorAt(size_t index) const; void finalize(); + void extractCSDIfNecessary(); protected: virtual ~Track(); @@ -77,6 +78,7 @@ private: bool mAudioLacksATDSHeaders; bool mFinalized; + bool mExtractedCSD; DISALLOW_EVIL_CONSTRUCTORS(Track); }; @@ -90,14 +92,21 @@ TSPacketizer::Track::Track( mStreamID(streamID), mContinuityCounter(0), mAudioLacksATDSHeaders(false), - mFinalized(false) { + mFinalized(false), + mExtractedCSD(false) { CHECK(format->findString("mime", &mMIME)); +} + +void TSPacketizer::Track::extractCSDIfNecessary() { + if (mExtractedCSD) { + return; + } if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC) || !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) { for (size_t i = 0;; ++i) { sp<ABuffer> csd; - if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) { + if (!mFormat->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) { break; } @@ -111,6 +120,8 @@ TSPacketizer::Track::Track( } } } + + mExtractedCSD = true; } TSPacketizer::Track::~Track() { @@ -250,7 +261,7 @@ void TSPacketizer::Track::finalize() { data[0] = 40; // descriptor_tag data[1] = 4; // descriptor_length - CHECK_EQ(mCSD.size(), 1u); + CHECK_GE(mCSD.size(), 1u); const sp<ABuffer> &sps = mCSD.itemAt(0); CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4)); CHECK_GE(sps->size(), 7u); @@ -314,12 +325,31 @@ void TSPacketizer::Track::finalize() { mDescriptors.push_back(descriptor); } - int32_t hdcpVersion; - if (mFormat->findInt32("hdcp-version", &hdcpVersion)) { - // HDCP descriptor + mFinalized = true; +} + +//////////////////////////////////////////////////////////////////////////////// - CHECK(hdcpVersion == 0x20 || hdcpVersion == 0x21); +TSPacketizer::TSPacketizer(uint32_t flags) + : mFlags(flags), + mPATContinuityCounter(0), + mPMTContinuityCounter(0) { + initCrcTable(); + if (flags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)) { + int32_t hdcpVersion; + if (flags & EMIT_HDCP20_DESCRIPTOR) { + CHECK(!(flags & EMIT_HDCP21_DESCRIPTOR)); + hdcpVersion = 0x20; + } else { + CHECK(!(flags & EMIT_HDCP20_DESCRIPTOR)); + + // HDCP2.0 _and_ HDCP 2.1 specs say to set the version + // inside the HDCP descriptor to 0x20!!! + hdcpVersion = 0x20; + } + + // HDCP descriptor sp<ABuffer> descriptor = new ABuffer(7); uint8_t *data = descriptor->data(); data[0] = 0x05; // descriptor_tag @@ -330,18 +360,8 @@ void TSPacketizer::Track::finalize() { data[5] = 'P'; data[6] = hdcpVersion; - mDescriptors.push_back(descriptor); + mProgramInfoDescriptors.push_back(descriptor); } - - mFinalized = true; -} - -//////////////////////////////////////////////////////////////////////////////// - -TSPacketizer::TSPacketizer() - : mPATContinuityCounter(0), - mPMTContinuityCounter(0) { - initCrcTable(); } TSPacketizer::~TSPacketizer() { @@ -407,6 +427,17 @@ ssize_t TSPacketizer::addTrack(const sp<AMessage> &format) { return mTracks.add(track); } +status_t TSPacketizer::extractCSDIfNecessary(size_t trackIndex) { + if (trackIndex >= mTracks.size()) { + return -ERANGE; + } + + const sp<Track> &track = mTracks.itemAt(trackIndex); + track->extractCSDIfNecessary(); + + return OK; +} + status_t TSPacketizer::packetize( size_t trackIndex, const sp<ABuffer> &_accessUnit, @@ -471,16 +502,121 @@ status_t TSPacketizer::packetize( // reserved = b1 // the first fragment of "buffer" follows + // Each transport packet (except for the last one contributing to the PES + // payload) must contain a multiple of 16 bytes of payload per HDCP spec. + bool alignPayload = + (mFlags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)); + + /* + a) The very first PES transport stream packet contains + + 4 bytes of TS header + ... padding + 14 bytes of static PES header + PES_private_data_len + 1 bytes (only if PES_private_data_len > 0) + numStuffingBytes bytes + + followed by the payload + + b) Subsequent PES transport stream packets contain + + 4 bytes of TS header + ... padding + + followed by the payload + */ + size_t PES_packet_length = accessUnit->size() + 8 + numStuffingBytes; if (PES_private_data_len > 0) { PES_packet_length += PES_private_data_len + 1; } - size_t numTSPackets; - if (PES_packet_length <= 178) { - numTSPackets = 1; - } else { - numTSPackets = 1 + ((PES_packet_length - 178) + 183) / 184; + size_t numTSPackets = 1; + + { + // Make sure the PES header fits into a single TS packet: + size_t PES_header_size = 14 + numStuffingBytes; + if (PES_private_data_len > 0) { + PES_header_size += PES_private_data_len + 1; + } + + CHECK_LE(PES_header_size, 188u - 4u); + + size_t sizeAvailableForPayload = 188 - 4 - PES_header_size; + size_t numBytesOfPayload = accessUnit->size(); + + if (numBytesOfPayload > sizeAvailableForPayload) { + numBytesOfPayload = sizeAvailableForPayload; + + if (alignPayload && numBytesOfPayload > 16) { + numBytesOfPayload -= (numBytesOfPayload % 16); + } + } + + // size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload; + ALOGV("packet 1 contains %zd padding bytes and %zd bytes of payload", + numPaddingBytes, numBytesOfPayload); + + size_t numBytesOfPayloadRemaining = accessUnit->size() - numBytesOfPayload; + +#if 0 + // The following hopefully illustrates the logic that led to the + // more efficient computation in the #else block... + + while (numBytesOfPayloadRemaining > 0) { + size_t sizeAvailableForPayload = 188 - 4; + + size_t numBytesOfPayload = numBytesOfPayloadRemaining; + + if (numBytesOfPayload > sizeAvailableForPayload) { + numBytesOfPayload = sizeAvailableForPayload; + + if (alignPayload && numBytesOfPayload > 16) { + numBytesOfPayload -= (numBytesOfPayload % 16); + } + } + + size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload; + ALOGI("packet %zd contains %zd padding bytes and %zd bytes of payload", + numTSPackets + 1, numPaddingBytes, numBytesOfPayload); + + numBytesOfPayloadRemaining -= numBytesOfPayload; + ++numTSPackets; + } +#else + // This is how many bytes of payload each subsequent TS packet + // can contain at most. + sizeAvailableForPayload = 188 - 4; + size_t sizeAvailableForAlignedPayload = sizeAvailableForPayload; + if (alignPayload) { + // We're only going to use a subset of the available space + // since we need to make each fragment a multiple of 16 in size. + sizeAvailableForAlignedPayload -= + (sizeAvailableForAlignedPayload % 16); + } + + size_t numFullTSPackets = + numBytesOfPayloadRemaining / sizeAvailableForAlignedPayload; + + numTSPackets += numFullTSPackets; + + numBytesOfPayloadRemaining -= + numFullTSPackets * sizeAvailableForAlignedPayload; + + // numBytesOfPayloadRemaining < sizeAvailableForAlignedPayload + if (numFullTSPackets == 0 && numBytesOfPayloadRemaining > 0) { + // There wasn't enough payload left to form a full aligned payload, + // the last packet doesn't have to be aligned. + ++numTSPackets; + } else if (numFullTSPackets > 0 + && numBytesOfPayloadRemaining + + sizeAvailableForAlignedPayload > sizeAvailableForPayload) { + // The last packet emitted had a full aligned payload and together + // with the bytes remaining does exceed the unaligned payload + // size, so we need another packet. + ++numTSPackets; + } +#endif } if (flags & EMIT_PAT_AND_PMT) { @@ -583,8 +719,9 @@ status_t TSPacketizer::packetize( // reserved = b111 // PCR_PID = kPCR_PID (13 bits) // reserved = b1111 - // program_info_length = 0x000 - // one or more elementary stream descriptions follow: + // program_info_length = 0x??? + // program_info_descriptors follow + // one or more elementary stream descriptions follow: // stream_type = 0x?? // reserved = b111 // elementary_PID = b? ???? ???? ???? (13 bits) @@ -616,8 +753,21 @@ status_t TSPacketizer::packetize( *ptr++ = 0x00; *ptr++ = 0xe0 | (kPID_PCR >> 8); *ptr++ = kPID_PCR & 0xff; - *ptr++ = 0xf0; - *ptr++ = 0x00; + + size_t program_info_length = 0; + for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) { + program_info_length += mProgramInfoDescriptors.itemAt(i)->size(); + } + + CHECK_LT(program_info_length, 0x400); + *ptr++ = 0xf0 | (program_info_length >> 8); + *ptr++ = (program_info_length & 0xff); + + for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) { + const sp<ABuffer> &desc = mProgramInfoDescriptors.itemAt(i); + memcpy(ptr, desc->data(), desc->size()); + ptr += desc->size(); + } for (size_t i = 0; i < mTracks.size(); ++i) { const sp<Track> &track = mTracks.itemAt(i); @@ -710,8 +860,6 @@ status_t TSPacketizer::packetize( uint64_t PTS = (timeUs * 9ll) / 100ll; - bool padding = (PES_packet_length < (188 - 10)); - if (PES_packet_length >= 65536) { // This really should only happen for video. CHECK(track->isVideo()); @@ -720,19 +868,37 @@ status_t TSPacketizer::packetize( PES_packet_length = 0; } + size_t sizeAvailableForPayload = 188 - 4 - 14 - numStuffingBytes; + if (PES_private_data_len > 0) { + sizeAvailableForPayload -= PES_private_data_len + 1; + } + + size_t copy = accessUnit->size(); + + if (copy > sizeAvailableForPayload) { + copy = sizeAvailableForPayload; + + if (alignPayload && copy > 16) { + copy -= (copy % 16); + } + } + + size_t numPaddingBytes = sizeAvailableForPayload - copy; + uint8_t *ptr = packetDataStart; *ptr++ = 0x47; *ptr++ = 0x40 | (track->PID() >> 8); *ptr++ = track->PID() & 0xff; - *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter(); - if (padding) { - size_t paddingSize = 188 - 10 - PES_packet_length; - *ptr++ = paddingSize - 1; - if (paddingSize >= 2) { + *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10) + | track->incrementContinuityCounter(); + + if (numPaddingBytes > 0) { + *ptr++ = numPaddingBytes - 1; + if (numPaddingBytes >= 2) { *ptr++ = 0x00; - memset(ptr, 0xff, paddingSize - 2); - ptr += paddingSize - 2; + memset(ptr, 0xff, numPaddingBytes - 2); + ptr += numPaddingBytes - 2; } } @@ -768,25 +934,14 @@ status_t TSPacketizer::packetize( *ptr++ = 0xff; } - // 18 bytes of TS/PES header leave 188 - 18 = 170 bytes for the payload - - size_t sizeLeft = packetDataStart + 188 - ptr; - size_t copy = accessUnit->size(); - if (copy > sizeLeft) { - copy = sizeLeft; - } - memcpy(ptr, accessUnit->data(), copy); ptr += copy; - CHECK_EQ(sizeLeft, copy); - memset(ptr, 0xff, sizeLeft - copy); + CHECK_EQ(ptr, packetDataStart + 188); packetDataStart += 188; size_t offset = copy; while (offset < accessUnit->size()) { - bool padding = (accessUnit->size() - offset) < (188 - 4); - // for subsequent fragments of "buffer": // 0x47 // transport_error_indicator = b0 @@ -798,35 +953,40 @@ status_t TSPacketizer::packetize( // continuity_counter = b???? // the fragment of "buffer" follows. + size_t sizeAvailableForPayload = 188 - 4; + + size_t copy = accessUnit->size() - offset; + + if (copy > sizeAvailableForPayload) { + copy = sizeAvailableForPayload; + + if (alignPayload && copy > 16) { + copy -= (copy % 16); + } + } + + size_t numPaddingBytes = sizeAvailableForPayload - copy; + uint8_t *ptr = packetDataStart; *ptr++ = 0x47; *ptr++ = 0x00 | (track->PID() >> 8); *ptr++ = track->PID() & 0xff; - *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter(); + *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10) + | track->incrementContinuityCounter(); - if (padding) { - size_t paddingSize = 188 - 4 - (accessUnit->size() - offset); - *ptr++ = paddingSize - 1; - if (paddingSize >= 2) { + if (numPaddingBytes > 0) { + *ptr++ = numPaddingBytes - 1; + if (numPaddingBytes >= 2) { *ptr++ = 0x00; - memset(ptr, 0xff, paddingSize - 2); - ptr += paddingSize - 2; + memset(ptr, 0xff, numPaddingBytes - 2); + ptr += numPaddingBytes - 2; } } - // 4 bytes of TS header leave 188 - 4 = 184 bytes for the payload - - size_t sizeLeft = packetDataStart + 188 - ptr; - size_t copy = accessUnit->size() - offset; - if (copy > sizeLeft) { - copy = sizeLeft; - } - memcpy(ptr, accessUnit->data() + offset, copy); ptr += copy; - CHECK_EQ(sizeLeft, copy); - memset(ptr, 0xff, sizeLeft - copy); + CHECK_EQ(ptr, packetDataStart + 188); offset += copy; packetDataStart += 188; diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h index a37917d..4a664ee 100644 --- a/media/libstagefright/wifi-display/source/TSPacketizer.h +++ b/media/libstagefright/wifi-display/source/TSPacketizer.h @@ -32,7 +32,11 @@ struct AMessage; // Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based // on flags. struct TSPacketizer : public RefBase { - TSPacketizer(); + enum { + EMIT_HDCP20_DESCRIPTOR = 1, + EMIT_HDCP21_DESCRIPTOR = 2, + }; + TSPacketizer(uint32_t flags); // Returns trackIndex or error. ssize_t addTrack(const sp<AMessage> &format); @@ -50,6 +54,8 @@ struct TSPacketizer : public RefBase { const uint8_t *PES_private_data, size_t PES_private_data_len, size_t numStuffingBytes = 0); + status_t extractCSDIfNecessary(size_t trackIndex); + // XXX to be removed once encoder config option takes care of this for // encrypted mode. sp<ABuffer> prependCSD( @@ -66,8 +72,11 @@ private: struct Track; + uint32_t mFlags; Vector<sp<Track> > mTracks; + Vector<sp<ABuffer> > mProgramInfoDescriptors; + unsigned mPATContinuityCounter; unsigned mPMTContinuityCounter; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 08f67f9..22dd0b1 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -22,10 +22,10 @@ #include "PlaybackSession.h" #include "Parameters.h" #include "ParsedMessage.h" -#include "Sender.h" +#include "rtp/RTPSender.h" #include <binder/IServiceManager.h> -#include <gui/ISurfaceTexture.h> +#include <gui/IGraphicBufferProducer.h> #include <media/IHDCP.h> #include <media/IMediaPlayerService.h> #include <media/IRemoteDisplayClient.h> @@ -33,6 +33,7 @@ #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/Utils.h> #include <arpa/inet.h> #include <cutils/properties.h> @@ -41,9 +42,13 @@ namespace android { +// static +const AString WifiDisplaySource::sUserAgent = MakeUserAgent(); + WifiDisplaySource::WifiDisplaySource( const sp<ANetworkSession> &netSession, - const sp<IRemoteDisplayClient> &client) + const sp<IRemoteDisplayClient> &client, + const char *path) : mState(INITIALIZED), mNetSession(netSession), mClient(client), @@ -58,8 +63,16 @@ WifiDisplaySource::WifiDisplaySource( mIsHDCP2_0(false), mHDCPPort(0), mHDCPInitializationComplete(false), - mSetupTriggerDeferred(false) -{ + mSetupTriggerDeferred(false), + mPlaybackSessionEstablished(false) { + if (path != NULL) { + mMediaPath.setTo(path); + } + + mSupportedSourceVideoFormats.disableAll(); + + mSupportedSourceVideoFormats.setNativeResolution( + VideoFormats::RESOLUTION_CEA, 5); // 1280x720 p30 } WifiDisplaySource::~WifiDisplaySource() { @@ -151,9 +164,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { } else { err = -EINVAL; } - } - if (err == OK) { mState = AWAITING_CLIENT_CONNECTION; } @@ -253,7 +264,8 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { if (!strcasecmp(val, "pause") && mState == PLAYING) { mState = PLAYING_TO_PAUSED; sendTrigger(mClientSessionID, TRIGGER_PAUSE); - } else if (!strcasecmp(val, "play") && mState == PAUSED) { + } else if (!strcasecmp(val, "play") + && mState == PAUSED) { mState = PAUSED_TO_PLAYING; sendTrigger(mClientSessionID, TRIGGER_PLAY); } @@ -262,6 +274,11 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { break; } + case ANetworkSession::kWhatNetworkStall: + { + break; + } + default: TRESPASS(); } @@ -374,16 +391,41 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { mClient->onDisplayError( IRemoteDisplayClient::kDisplayErrorUnknown); } else if (what == PlaybackSession::kWhatSessionEstablished) { + mPlaybackSessionEstablished = true; + if (mClient != NULL) { - mClient->onDisplayConnected( - mClientInfo.mPlaybackSession->getSurfaceTexture(), - mClientInfo.mPlaybackSession->width(), - mClientInfo.mPlaybackSession->height(), - mUsingHDCP - ? IRemoteDisplayClient::kDisplayFlagSecure - : 0); + if (!mSinkSupportsVideo) { + mClient->onDisplayConnected( + NULL, // SurfaceTexture + 0, // width, + 0, // height, + mUsingHDCP + ? IRemoteDisplayClient::kDisplayFlagSecure + : 0); + } else { + size_t width, height; + + CHECK(VideoFormats::GetConfiguration( + mChosenVideoResolutionType, + mChosenVideoResolutionIndex, + &width, + &height, + NULL /* framesPerSecond */, + NULL /* interlaced */)); + + mClient->onDisplayConnected( + mClientInfo.mPlaybackSession + ->getSurfaceTexture(), + width, + height, + mUsingHDCP + ? IRemoteDisplayClient::kDisplayFlagSecure + : 0); + } } + finishPlay(); + if (mState == ABOUT_TO_PLAY) { mState = PLAYING; } @@ -564,55 +606,38 @@ status_t WifiDisplaySource::sendM3(int32_t sessionID) { } status_t WifiDisplaySource::sendM4(int32_t sessionID) { - // wfd_video_formats: - // 1 byte "native" - // 1 byte "preferred-display-mode-supported" 0 or 1 - // one or more avc codec structures - // 1 byte profile - // 1 byte level - // 4 byte CEA mask - // 4 byte VESA mask - // 4 byte HH mask - // 1 byte latency - // 2 byte min-slice-slice - // 2 byte slice-enc-params - // 1 byte framerate-control-support - // max-hres (none or 2 byte) - // max-vres (none or 2 byte) - CHECK_EQ(sessionID, mClientSessionID); - AString transportString = "UDP"; - - char val[PROPERTY_VALUE_MAX]; - if (property_get("media.wfd.enable-tcp", val, NULL) - && (!strcasecmp("true", val) || !strcmp("1", val))) { - ALOGI("Using TCP transport."); - transportString = "TCP"; - } - - // For 720p60: - // use "30 00 02 02 00000040 00000000 00000000 00 0000 0000 00 none none\r\n" - // For 720p30: - // use "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n" - // For 720p24: - // use "78 00 02 02 00008000 00000000 00000000 00 0000 0000 00 none none\r\n" - // For 1080p30: - // use "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n" - AString body = StringPrintf( - "wfd_video_formats: " -#if USE_1080P - "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n" -#else - "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n" -#endif - "wfd_audio_codecs: %s\r\n" - "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n" - "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n", - (mUsingPCMAudio - ? "LPCM 00000002 00" // 2 ch PCM 48kHz - : "AAC 00000001 00"), // 2 ch AAC 48kHz - mClientInfo.mLocalIP.c_str(), transportString.c_str(), mChosenRTPPort); + AString body; + + if (mSinkSupportsVideo) { + body.append("wfd_video_formats: "); + + VideoFormats chosenVideoFormat; + chosenVideoFormat.disableAll(); + chosenVideoFormat.setNativeResolution( + mChosenVideoResolutionType, mChosenVideoResolutionIndex); + + body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */)); + body.append("\r\n"); + } + + if (mSinkSupportsAudio) { + body.append( + StringPrintf("wfd_audio_codecs: %s\r\n", + (mUsingPCMAudio + ? "LPCM 00000002 00" // 2 ch PCM 48kHz + : "AAC 00000001 00"))); // 2 ch AAC 48kHz + } + + body.append( + StringPrintf( + "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n", + mClientInfo.mLocalIP.c_str())); + + body.append( + StringPrintf( + "wfd_client_rtp_ports: %s\r\n", mWfdClientRtpPorts.c_str())); AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n"; AppendCommonResponse(&request, mNextCSeq); @@ -775,53 +800,115 @@ status_t WifiDisplaySource::onReceiveM3Response( return ERROR_MALFORMED; } - unsigned port0, port1; + unsigned port0 = 0, port1 = 0; if (sscanf(value.c_str(), "RTP/AVP/UDP;unicast %u %u mode=play", &port0, - &port1) != 2 - || port0 == 0 || port0 > 65535 || port1 != 0) { - ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)", + &port1) == 2 + || sscanf(value.c_str(), + "RTP/AVP/TCP;unicast %u %u mode=play", + &port0, + &port1) == 2) { + if (port0 == 0 || port0 > 65535 || port1 != 0) { + ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)", + value.c_str()); + + return ERROR_MALFORMED; + } + } else if (strcmp(value.c_str(), "RTP/AVP/TCP;interleaved mode=play")) { + ALOGE("Unsupported value for wfd_client_rtp_ports (%s)", value.c_str()); - return ERROR_MALFORMED; + return ERROR_UNSUPPORTED; } + mWfdClientRtpPorts = value; mChosenRTPPort = port0; + if (!params->findParameter("wfd_video_formats", &value)) { + ALOGE("Sink doesn't report its choice of wfd_video_formats."); + return ERROR_MALFORMED; + } + + mSinkSupportsVideo = false; + + if (!(value == "none")) { + mSinkSupportsVideo = true; + if (!mSupportedSinkVideoFormats.parseFormatSpec(value.c_str())) { + ALOGE("Failed to parse sink provided wfd_video_formats (%s)", + value.c_str()); + + return ERROR_MALFORMED; + } + + if (!VideoFormats::PickBestFormat( + mSupportedSinkVideoFormats, + mSupportedSourceVideoFormats, + &mChosenVideoResolutionType, + &mChosenVideoResolutionIndex)) { + ALOGE("Sink and source share no commonly supported video " + "formats."); + + return ERROR_UNSUPPORTED; + } + + size_t width, height, framesPerSecond; + bool interlaced; + CHECK(VideoFormats::GetConfiguration( + mChosenVideoResolutionType, + mChosenVideoResolutionIndex, + &width, + &height, + &framesPerSecond, + &interlaced)); + + ALOGI("Picked video resolution %u x %u %c%u", + width, height, interlaced ? 'i' : 'p', framesPerSecond); + } else { + ALOGI("Sink doesn't support video at all."); + } + if (!params->findParameter("wfd_audio_codecs", &value)) { ALOGE("Sink doesn't report its choice of wfd_audio_codecs."); return ERROR_MALFORMED; } - if (value == "none") { - ALOGE("Sink doesn't support audio at all."); - return ERROR_UNSUPPORTED; - } + mSinkSupportsAudio = false; - uint32_t modes; - GetAudioModes(value.c_str(), "AAC", &modes); + if (!(value == "none")) { + mSinkSupportsAudio = true; - bool supportsAAC = (modes & 1) != 0; // AAC 2ch 48kHz + uint32_t modes; + GetAudioModes(value.c_str(), "AAC", &modes); - GetAudioModes(value.c_str(), "LPCM", &modes); + bool supportsAAC = (modes & 1) != 0; // AAC 2ch 48kHz - bool supportsPCM = (modes & 2) != 0; // LPCM 2ch 48kHz + GetAudioModes(value.c_str(), "LPCM", &modes); - char val[PROPERTY_VALUE_MAX]; - if (supportsPCM - && property_get("media.wfd.use-pcm-audio", val, NULL) - && (!strcasecmp("true", val) || !strcmp("1", val))) { - ALOGI("Using PCM audio."); - mUsingPCMAudio = true; - } else if (supportsAAC) { - ALOGI("Using AAC audio."); - mUsingPCMAudio = false; - } else if (supportsPCM) { - ALOGI("Using PCM audio."); - mUsingPCMAudio = true; + bool supportsPCM = (modes & 2) != 0; // LPCM 2ch 48kHz + + char val[PROPERTY_VALUE_MAX]; + if (supportsPCM + && property_get("media.wfd.use-pcm-audio", val, NULL) + && (!strcasecmp("true", val) || !strcmp("1", val))) { + ALOGI("Using PCM audio."); + mUsingPCMAudio = true; + } else if (supportsAAC) { + ALOGI("Using AAC audio."); + mUsingPCMAudio = false; + } else if (supportsPCM) { + ALOGI("Using PCM audio."); + mUsingPCMAudio = true; + } else { + ALOGI("Sink doesn't support an audio format we do."); + return ERROR_UNSUPPORTED; + } } else { - ALOGI("Sink doesn't support an audio format we do."); + ALOGI("Sink doesn't support audio at all."); + } + + if (!mSinkSupportsVideo && !mSinkSupportsAudio) { + ALOGE("Sink supports neither video nor audio..."); return ERROR_UNSUPPORTED; } @@ -1065,7 +1152,7 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } - Sender::TransportMode transportMode = Sender::TRANSPORT_UDP; + RTPSender::TransportMode rtpMode = RTPSender::TRANSPORT_UDP; int clientRtp, clientRtcp; if (transport.startsWith("RTP/AVP/TCP;")) { @@ -1074,7 +1161,7 @@ status_t WifiDisplaySource::onSetupRequest( transport.c_str(), "interleaved", &interleaved) && sscanf(interleaved.c_str(), "%d-%d", &clientRtp, &clientRtcp) == 2) { - transportMode = Sender::TRANSPORT_TCP_INTERLEAVED; + rtpMode = RTPSender::TRANSPORT_TCP_INTERLEAVED; } else { bool badRequest = false; @@ -1096,7 +1183,7 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } - transportMode = Sender::TRANSPORT_TCP; + rtpMode = RTPSender::TRANSPORT_TCP; } } else if (transport.startsWith("RTP/AVP;unicast;") || transport.startsWith("RTP/AVP/UDP;unicast;")) { @@ -1138,7 +1225,7 @@ status_t WifiDisplaySource::onSetupRequest( sp<PlaybackSession> playbackSession = new PlaybackSession( - mNetSession, notify, mInterfaceAddr, mHDCP); + mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str()); looper()->registerHandler(playbackSession); @@ -1155,12 +1242,22 @@ status_t WifiDisplaySource::onSetupRequest( return ERROR_MALFORMED; } + RTPSender::TransportMode rtcpMode = RTPSender::TRANSPORT_UDP; + if (clientRtcp < 0) { + rtcpMode = RTPSender::TRANSPORT_NONE; + } + status_t err = playbackSession->init( mClientInfo.mRemoteIP.c_str(), clientRtp, + rtpMode, clientRtcp, - transportMode, - mUsingPCMAudio); + rtcpMode, + mSinkSupportsAudio, + mUsingPCMAudio, + mSinkSupportsVideo, + mChosenVideoResolutionType, + mChosenVideoResolutionIndex); if (err != OK) { looper()->unregisterHandler(playbackSession->id()); @@ -1184,7 +1281,7 @@ status_t WifiDisplaySource::onSetupRequest( AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq, playbackSessionID); - if (transportMode == Sender::TRANSPORT_TCP_INTERLEAVED) { + if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) { response.append( StringPrintf( "Transport: RTP/AVP/TCP;interleaved=%d-%d;", @@ -1193,7 +1290,7 @@ status_t WifiDisplaySource::onSetupRequest( int32_t serverRtp = playbackSession->getRTPPort(); AString transportString = "UDP"; - if (transportMode == Sender::TRANSPORT_TCP) { + if (rtpMode == RTPSender::TRANSPORT_TCP) { transportString = "TCP"; } @@ -1243,17 +1340,28 @@ status_t WifiDisplaySource::onPlayRequest( return ERROR_MALFORMED; } - ALOGI("Received PLAY request."); + if (mState != AWAITING_CLIENT_PLAY) { + ALOGW("Received PLAY request but we're in state %d", mState); - status_t err = playbackSession->play(); - CHECK_EQ(err, (status_t)OK); + sendErrorResponse( + sessionID, "455 Method Not Valid in This State", cseq); + + return INVALID_OPERATION; + } + + ALOGI("Received PLAY request."); + if (mPlaybackSessionEstablished) { + finishPlay(); + } else { + ALOGI("deferring PLAY request until session established."); + } AString response = "RTSP/1.0 200 OK\r\n"; AppendCommonResponse(&response, cseq, playbackSessionID); response.append("Range: npt=now-\r\n"); response.append("\r\n"); - err = mNetSession->sendRequest(sessionID, response.c_str()); + status_t err = mNetSession->sendRequest(sessionID, response.c_str()); if (err != OK) { return err; @@ -1264,14 +1372,20 @@ status_t WifiDisplaySource::onPlayRequest( return OK; } - playbackSession->finishPlay(); - CHECK_EQ(mState, AWAITING_CLIENT_PLAY); mState = ABOUT_TO_PLAY; return OK; } +void WifiDisplaySource::finishPlay() { + const sp<PlaybackSession> &playbackSession = + mClientInfo.mPlaybackSession; + + status_t err = playbackSession->play(); + CHECK_EQ(err, (status_t)OK); +} + status_t WifiDisplaySource::onPauseRequest( int32_t sessionID, int32_t cseq, @@ -1447,7 +1561,7 @@ void WifiDisplaySource::AppendCommonResponse( response->append(buf); response->append("\r\n"); - response->append("Server: Mine/1.0\r\n"); + response->append(StringPrintf("Server: %s\r\n", sUserAgent.c_str())); if (cseq >= 0) { response->append(StringPrintf("CSeq: %d\r\n", cseq)); @@ -1557,10 +1671,13 @@ void WifiDisplaySource::HDCPObserver::notify( status_t WifiDisplaySource::makeHDCP() { sp<IServiceManager> sm = defaultServiceManager(); sp<IBinder> binder = sm->getService(String16("media.player")); - sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder); + + sp<IMediaPlayerService> service = + interface_cast<IMediaPlayerService>(binder); + CHECK(service != NULL); - mHDCP = service->makeHDCP(); + mHDCP = service->makeHDCP(true /* createEncryptionModule */); if (mHDCP == NULL) { return ERROR_UNSUPPORTED; diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 974e070..44d3e4d 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -19,6 +19,7 @@ #define WIFI_DISPLAY_SOURCE_H_ #include "ANetworkSession.h" +#include "VideoFormats.h" #include <media/stagefright/foundation/AHandler.h> @@ -26,8 +27,6 @@ namespace android { -#define USE_1080P 0 - struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; @@ -39,7 +38,8 @@ struct WifiDisplaySource : public AHandler { WifiDisplaySource( const sp<ANetworkSession> &netSession, - const sp<IRemoteDisplayClient> &client); + const sp<IRemoteDisplayClient> &client, + const char *path = NULL); status_t start(const char *iface); status_t stop(); @@ -111,16 +111,29 @@ private: static const int64_t kPlaybackSessionTimeoutUs = kPlaybackSessionTimeoutSecs * 1000000ll; + static const AString sUserAgent; + State mState; + VideoFormats mSupportedSourceVideoFormats; sp<ANetworkSession> mNetSession; sp<IRemoteDisplayClient> mClient; + AString mMediaPath; struct in_addr mInterfaceAddr; int32_t mSessionID; uint32_t mStopReplyID; + AString mWfdClientRtpPorts; int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports" + bool mSinkSupportsVideo; + VideoFormats mSupportedSinkVideoFormats; + + VideoFormats::ResolutionType mChosenVideoResolutionType; + size_t mChosenVideoResolutionIndex; + + bool mSinkSupportsAudio; + bool mUsingPCMAudio; int32_t mClientSessionID; @@ -149,6 +162,8 @@ private: bool mHDCPInitializationComplete; bool mSetupTriggerDeferred; + bool mPlaybackSessionEstablished; + status_t makeHDCP(); // <<<< HDCP specific section @@ -245,6 +260,8 @@ private: void finishStopAfterDisconnectingClient(); void finishStop2(); + void finishPlay(); + DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource); }; diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp deleted file mode 100644 index 1cd82c3..0000000 --- a/media/libstagefright/wifi-display/udptest.cpp +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright 2012, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NEBUG 0 -#define LOG_TAG "udptest" -#include <utils/Log.h> - -#include "ANetworkSession.h" - -#include <binder/ProcessState.h> -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/AHandler.h> -#include <media/stagefright/foundation/ALooper.h> -#include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/Utils.h> - -namespace android { - -struct TestHandler : public AHandler { - TestHandler(const sp<ANetworkSession> &netSession); - - void startServer(unsigned localPort); - void startClient(const char *remoteHost, unsigned remotePort); - -protected: - virtual ~TestHandler(); - - virtual void onMessageReceived(const sp<AMessage> &msg); - -private: - enum { - kWhatStartServer, - kWhatStartClient, - kWhatUDPNotify, - kWhatSendPacket, - }; - - sp<ANetworkSession> mNetSession; - - bool mIsServer; - bool mConnected; - int32_t mUDPSession; - uint32_t mSeqNo; - double mTotalTimeUs; - int32_t mCount; - - void postSendPacket(int64_t delayUs = 0ll); - - DISALLOW_EVIL_CONSTRUCTORS(TestHandler); -}; - -TestHandler::TestHandler(const sp<ANetworkSession> &netSession) - : mNetSession(netSession), - mIsServer(false), - mConnected(false), - mUDPSession(0), - mSeqNo(0), - mTotalTimeUs(0.0), - mCount(0) { -} - -TestHandler::~TestHandler() { -} - -void TestHandler::startServer(unsigned localPort) { - sp<AMessage> msg = new AMessage(kWhatStartServer, id()); - msg->setInt32("localPort", localPort); - msg->post(); -} - -void TestHandler::startClient(const char *remoteHost, unsigned remotePort) { - sp<AMessage> msg = new AMessage(kWhatStartClient, id()); - msg->setString("remoteHost", remoteHost); - msg->setInt32("remotePort", remotePort); - msg->post(); -} - -void TestHandler::onMessageReceived(const sp<AMessage> &msg) { - switch (msg->what()) { - case kWhatStartClient: - { - AString remoteHost; - CHECK(msg->findString("remoteHost", &remoteHost)); - - int32_t remotePort; - CHECK(msg->findInt32("remotePort", &remotePort)); - - sp<AMessage> notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - 0 /* localPort */, - remoteHost.c_str(), - remotePort, - notify, - &mUDPSession)); - - postSendPacket(); - break; - } - - case kWhatStartServer: - { - mIsServer = true; - - int32_t localPort; - CHECK(msg->findInt32("localPort", &localPort)); - - sp<AMessage> notify = new AMessage(kWhatUDPNotify, id()); - - CHECK_EQ((status_t)OK, - mNetSession->createUDPSession( - localPort, notify, &mUDPSession)); - - break; - } - - case kWhatSendPacket: - { - char buffer[12]; - memset(buffer, 0, sizeof(buffer)); - - buffer[0] = mSeqNo >> 24; - buffer[1] = (mSeqNo >> 16) & 0xff; - buffer[2] = (mSeqNo >> 8) & 0xff; - buffer[3] = mSeqNo & 0xff; - ++mSeqNo; - - int64_t nowUs = ALooper::GetNowUs(); - buffer[4] = nowUs >> 56; - buffer[5] = (nowUs >> 48) & 0xff; - buffer[6] = (nowUs >> 40) & 0xff; - buffer[7] = (nowUs >> 32) & 0xff; - buffer[8] = (nowUs >> 24) & 0xff; - buffer[9] = (nowUs >> 16) & 0xff; - buffer[10] = (nowUs >> 8) & 0xff; - buffer[11] = nowUs & 0xff; - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, buffer, sizeof(buffer))); - - postSendPacket(20000ll); - break; - } - - case kWhatUDPNotify: - { - int32_t reason; - CHECK(msg->findInt32("reason", &reason)); - - switch (reason) { - case ANetworkSession::kWhatError: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - int32_t err; - CHECK(msg->findInt32("err", &err)); - - AString detail; - CHECK(msg->findString("detail", &detail)); - - ALOGE("An error occurred in session %d (%d, '%s/%s').", - sessionID, - err, - detail.c_str(), - strerror(-err)); - - mNetSession->destroySession(sessionID); - break; - } - - case ANetworkSession::kWhatDatagram: - { - int32_t sessionID; - CHECK(msg->findInt32("sessionID", &sessionID)); - - sp<ABuffer> data; - CHECK(msg->findBuffer("data", &data)); - - if (mIsServer) { - if (!mConnected) { - AString fromAddr; - CHECK(msg->findString("fromAddr", &fromAddr)); - - int32_t fromPort; - CHECK(msg->findInt32("fromPort", &fromPort)); - - CHECK_EQ((status_t)OK, - mNetSession->connectUDPSession( - mUDPSession, fromAddr.c_str(), fromPort)); - - mConnected = true; - } - - int64_t nowUs = ALooper::GetNowUs(); - - sp<ABuffer> buffer = new ABuffer(data->size() + 8); - memcpy(buffer->data(), data->data(), data->size()); - - uint8_t *ptr = buffer->data() + data->size(); - - *ptr++ = nowUs >> 56; - *ptr++ = (nowUs >> 48) & 0xff; - *ptr++ = (nowUs >> 40) & 0xff; - *ptr++ = (nowUs >> 32) & 0xff; - *ptr++ = (nowUs >> 24) & 0xff; - *ptr++ = (nowUs >> 16) & 0xff; - *ptr++ = (nowUs >> 8) & 0xff; - *ptr++ = nowUs & 0xff; - - CHECK_EQ((status_t)OK, - mNetSession->sendRequest( - mUDPSession, buffer->data(), buffer->size())); - } else { - CHECK_EQ(data->size(), 20u); - - uint32_t seqNo = U32_AT(data->data()); - int64_t t1 = U64_AT(data->data() + 4); - int64_t t2 = U64_AT(data->data() + 12); - - int64_t t3; - CHECK(data->meta()->findInt64("arrivalTimeUs", &t3)); - -#if 0 - printf("roundtrip seqNo %u, time = %lld us\n", - seqNo, t3 - t1); -#else - mTotalTimeUs += t3 - t1; - ++mCount; - printf("avg. roundtrip time %.2f us\n", mTotalTimeUs / mCount); -#endif - } - break; - } - - default: - TRESPASS(); - } - - break; - } - - default: - TRESPASS(); - } -} - -void TestHandler::postSendPacket(int64_t delayUs) { - (new AMessage(kWhatSendPacket, id()))->post(delayUs); -} - -} // namespace android - -static void usage(const char *me) { - fprintf(stderr, - "usage: %s -c host[:port]\tconnect to test server\n" - " -l \tcreate a test server\n", - me); -} - -int main(int argc, char **argv) { - using namespace android; - - ProcessState::self()->startThreadPool(); - - int32_t localPort = -1; - int32_t connectToPort = -1; - AString connectToHost; - - int res; - while ((res = getopt(argc, argv, "hc:l:")) >= 0) { - switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - connectToHost = optarg; - connectToPort = 49152; - } else { - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 'l': - { - char *end; - localPort = strtol(optarg, &end, 10); - - if (*end != '\0' || end == optarg - || localPort < 1 || localPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - break; - } - - case '?': - case 'h': - usage(argv[0]); - exit(1); - } - } - - if (localPort < 0 && connectToPort < 0) { - fprintf(stderr, - "You need to select either client or server mode.\n"); - exit(1); - } - - sp<ANetworkSession> netSession = new ANetworkSession; - netSession->start(); - - sp<ALooper> looper = new ALooper; - - sp<TestHandler> handler = new TestHandler(netSession); - looper->registerHandler(handler); - - if (localPort >= 0) { - handler->startServer(localPort); - } else { - handler->startClient(connectToHost.c_str(), connectToPort); - } - - looper->start(true /* runOnCallingThread */); - - return 0; -} - diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp index 03a1123..c947765 100644 --- a/media/libstagefright/wifi-display/wfd.cpp +++ b/media/libstagefright/wifi-display/wfd.cpp @@ -18,11 +18,11 @@ #define LOG_TAG "wfd" #include <utils/Log.h> -#include "sink/WifiDisplaySink.h" #include "source/WifiDisplaySource.h" #include <binder/ProcessState.h> #include <binder/IServiceManager.h> +#include <gui/ISurfaceComposer.h> #include <gui/SurfaceComposerClient.h> #include <media/AudioSystem.h> #include <media/IMediaPlayerService.h> @@ -30,16 +30,16 @@ #include <media/IRemoteDisplayClient.h> #include <media/stagefright/DataSource.h> #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <ui/DisplayInfo.h> namespace android { static void usage(const char *me) { fprintf(stderr, "usage:\n" - " %s -c host[:port]\tconnect to wifi source\n" - " -u uri \tconnect to an rtsp uri\n" - " -l ip[:port] \tlisten on the specified port " - "(create a sink)\n", + " %s -l iface[:port]\tcreate a wifi display source\n" + " -f(ilename) \tstream media\n", me); } @@ -47,7 +47,7 @@ struct RemoteDisplayClient : public BnRemoteDisplayClient { RemoteDisplayClient(); virtual void onDisplayConnected( - const sp<ISurfaceTexture> &surfaceTexture, + const sp<IGraphicBufferProducer> &bufferProducer, uint32_t width, uint32_t height, uint32_t flags); @@ -67,7 +67,7 @@ private: bool mDone; sp<SurfaceComposerClient> mComposerClient; - sp<ISurfaceTexture> mSurfaceTexture; + sp<IGraphicBufferProducer> mSurfaceTexture; sp<IBinder> mDisplayBinder; DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplayClient); @@ -83,29 +83,31 @@ RemoteDisplayClient::~RemoteDisplayClient() { } void RemoteDisplayClient::onDisplayConnected( - const sp<ISurfaceTexture> &surfaceTexture, + const sp<IGraphicBufferProducer> &bufferProducer, uint32_t width, uint32_t height, uint32_t flags) { ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x", width, height, flags); - mSurfaceTexture = surfaceTexture; - mDisplayBinder = mComposerClient->createDisplay( - String8("foo"), false /* secure */); + if (bufferProducer != NULL) { + mSurfaceTexture = bufferProducer; + mDisplayBinder = mComposerClient->createDisplay( + String8("foo"), false /* secure */); - SurfaceComposerClient::openGlobalTransaction(); - mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture); + SurfaceComposerClient::openGlobalTransaction(); + mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture); - Rect layerStackRect(1280, 720); // XXX fix this. - Rect displayRect(1280, 720); + Rect layerStackRect(1280, 720); // XXX fix this. + Rect displayRect(1280, 720); - mComposerClient->setDisplayProjection( - mDisplayBinder, 0 /* 0 degree rotation */, - layerStackRect, - displayRect); + mComposerClient->setDisplayProjection( + mDisplayBinder, 0 /* 0 degree rotation */, + layerStackRect, + displayRect); - SurfaceComposerClient::closeGlobalTransaction(); + SurfaceComposerClient::closeGlobalTransaction(); + } } void RemoteDisplayClient::onDisplayDisconnected() { @@ -178,6 +180,26 @@ static void createSource(const AString &addr, int32_t port) { enableAudioSubmix(false /* enable */); } +static void createFileSource( + const AString &addr, int32_t port, const char *path) { + sp<ANetworkSession> session = new ANetworkSession; + session->start(); + + sp<ALooper> looper = new ALooper; + looper->start(); + + sp<RemoteDisplayClient> client = new RemoteDisplayClient; + sp<WifiDisplaySource> source = new WifiDisplaySource(session, client, path); + looper->registerHandler(source); + + AString iface = StringPrintf("%s:%d", addr.c_str(), port); + CHECK_EQ((status_t)OK, source->start(iface.c_str())); + + client->waitUntilDone(); + + source->stop(); +} + } // namespace android int main(int argc, char **argv) { @@ -187,41 +209,17 @@ int main(int argc, char **argv) { DataSource::RegisterDefaultSniffers(); - AString connectToHost; - int32_t connectToPort = -1; - AString uri; - AString listenOnAddr; int32_t listenOnPort = -1; + AString path; + int res; - while ((res = getopt(argc, argv, "hc:l:u:")) >= 0) { + while ((res = getopt(argc, argv, "hl:f:")) >= 0) { switch (res) { - case 'c': - { - const char *colonPos = strrchr(optarg, ':'); - - if (colonPos == NULL) { - connectToHost = optarg; - connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort; - } else { - connectToHost.setTo(optarg, colonPos - optarg); - - char *end; - connectToPort = strtol(colonPos + 1, &end, 10); - - if (*end != '\0' || end == colonPos + 1 - || connectToPort < 1 || connectToPort > 65535) { - fprintf(stderr, "Illegal port specified.\n"); - exit(1); - } - } - break; - } - - case 'u': + case 'f': { - uri = optarg; + path = optarg; break; } @@ -255,47 +253,17 @@ int main(int argc, char **argv) { } } - if (connectToPort >= 0 && listenOnPort >= 0) { - fprintf(stderr, - "You can connect to a source or create one, " - "but not both at the same time.\n"); - exit(1); - } - if (listenOnPort >= 0) { - createSource(listenOnAddr, listenOnPort); - exit(0); - } - - if (connectToPort < 0 && uri.empty()) { - fprintf(stderr, - "You need to select either source host or uri.\n"); - - exit(1); - } - - if (connectToPort >= 0 && !uri.empty()) { - fprintf(stderr, - "You need to either connect to a wfd host or an rtsp url, " - "not both.\n"); - exit(1); - } - - sp<ANetworkSession> session = new ANetworkSession; - session->start(); - - sp<ALooper> looper = new ALooper; - - sp<WifiDisplaySink> sink = new WifiDisplaySink(session); - looper->registerHandler(sink); + if (path.empty()) { + createSource(listenOnAddr, listenOnPort); + } else { + createFileSource(listenOnAddr, listenOnPort, path.c_str()); + } - if (connectToPort >= 0) { - sink->start(connectToHost.c_str(), connectToPort); - } else { - sink->start(uri.c_str()); + exit(0); } - looper->start(true /* runOnCallingThread */); + usage(argv[0]); return 0; } diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk index a4253f6..b3f7b1b 100644 --- a/media/libstagefright/yuv/Android.mk +++ b/media/libstagefright/yuv/Android.mk @@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \ YUVCanvas.cpp LOCAL_SHARED_LIBRARIES := \ - libcutils + libcutils \ + liblog LOCAL_MODULE:= libstagefright_yuv diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index 5a73cdd..1ac647a 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -1,4 +1,13 @@ LOCAL_PATH:= $(call my-dir) + +ifneq ($(BOARD_USE_CUSTOM_MEDIASERVEREXTENSIONS),true) +include $(CLEAR_VARS) +LOCAL_SRC_FILES := register.cpp +LOCAL_MODULE := libregistermsext +LOCAL_MODULE_TAGS := optional +include $(BUILD_STATIC_LIBRARY) +endif + include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ @@ -7,16 +16,23 @@ LOCAL_SRC_FILES:= \ LOCAL_SHARED_LIBRARIES := \ libaudioflinger \ libcameraservice \ + libmedialogservice \ + libcutils \ + libnbaio \ + libmedia \ libmediaplayerservice \ libutils \ + liblog \ libbinder -# FIXME The duplicate audioflinger is temporary +LOCAL_STATIC_LIBRARIES := \ + libregistermsext + LOCAL_C_INCLUDES := \ frameworks/av/media/libmediaplayerservice \ + frameworks/av/services/medialog \ frameworks/av/services/audioflinger \ - frameworks/av/services/camera/libcameraservice \ - frameworks/native/services/audioflinger + frameworks/av/services/camera/libcameraservice LOCAL_MODULE:= mediaserver diff --git a/media/mediaserver/RegisterExtensions.h b/media/mediaserver/RegisterExtensions.h new file mode 100644 index 0000000..9a8c03c --- /dev/null +++ b/media/mediaserver/RegisterExtensions.h @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef REGISTER_EXTENSIONS_H +#define REGISTER_EXTENSIONS_H + +extern void registerExtensions(); + +#endif // REGISTER_EXTENSIONS_H diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index ddd5b84..d5207d5 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -18,14 +18,20 @@ #define LOG_TAG "mediaserver" //#define LOG_NDEBUG 0 +#include <fcntl.h> +#include <sys/prctl.h> +#include <sys/wait.h> #include <binder/IPCThreadState.h> #include <binder/ProcessState.h> #include <binder/IServiceManager.h> +#include <cutils/properties.h> #include <utils/Log.h> +#include "RegisterExtensions.h" // from LOCAL_C_INCLUDES #include "AudioFlinger.h" #include "CameraService.h" +#include "MediaLogService.h" #include "MediaPlayerService.h" #include "AudioPolicyService.h" @@ -34,13 +40,96 @@ using namespace android; int main(int argc, char** argv) { signal(SIGPIPE, SIG_IGN); - sp<ProcessState> proc(ProcessState::self()); - sp<IServiceManager> sm = defaultServiceManager(); - ALOGI("ServiceManager: %p", sm.get()); - AudioFlinger::instantiate(); - MediaPlayerService::instantiate(); - CameraService::instantiate(); - AudioPolicyService::instantiate(); - ProcessState::self()->startThreadPool(); - IPCThreadState::self()->joinThreadPool(); + char value[PROPERTY_VALUE_MAX]; + bool doLog = (property_get("ro.test_harness", value, "0") > 0) && (atoi(value) == 1); + pid_t childPid; + // FIXME The advantage of making the process containing media.log service the parent process of + // the process that contains all the other real services, is that it allows us to collect more + // detailed information such as signal numbers, stop and continue, resource usage, etc. + // But it is also more complex. Consider replacing this by independent processes, and using + // binder on death notification instead. + if (doLog && (childPid = fork()) != 0) { + // media.log service + //prctl(PR_SET_NAME, (unsigned long) "media.log", 0, 0, 0); + // unfortunately ps ignores PR_SET_NAME for the main thread, so use this ugly hack + strcpy(argv[0], "media.log"); + sp<ProcessState> proc(ProcessState::self()); + MediaLogService::instantiate(); + ProcessState::self()->startThreadPool(); + for (;;) { + siginfo_t info; + int ret = waitid(P_PID, childPid, &info, WEXITED | WSTOPPED | WCONTINUED); + if (ret == EINTR) { + continue; + } + if (ret < 0) { + break; + } + char buffer[32]; + const char *code; + switch (info.si_code) { + case CLD_EXITED: + code = "CLD_EXITED"; + break; + case CLD_KILLED: + code = "CLD_KILLED"; + break; + case CLD_DUMPED: + code = "CLD_DUMPED"; + break; + case CLD_STOPPED: + code = "CLD_STOPPED"; + break; + case CLD_TRAPPED: + code = "CLD_TRAPPED"; + break; + case CLD_CONTINUED: + code = "CLD_CONTINUED"; + break; + default: + snprintf(buffer, sizeof(buffer), "unknown (%d)", info.si_code); + code = buffer; + break; + } + struct rusage usage; + getrusage(RUSAGE_CHILDREN, &usage); + ALOG(LOG_ERROR, "media.log", "pid %d status %d code %s user %ld.%03lds sys %ld.%03lds", + info.si_pid, info.si_status, code, + usage.ru_utime.tv_sec, usage.ru_utime.tv_usec / 1000, + usage.ru_stime.tv_sec, usage.ru_stime.tv_usec / 1000); + sp<IServiceManager> sm = defaultServiceManager(); + sp<IBinder> binder = sm->getService(String16("media.log")); + if (binder != 0) { + Vector<String16> args; + binder->dump(-1, args); + } + switch (info.si_code) { + case CLD_EXITED: + case CLD_KILLED: + case CLD_DUMPED: { + ALOG(LOG_INFO, "media.log", "exiting"); + _exit(0); + // not reached + } + default: + break; + } + } + } else { + // all other services + if (doLog) { + prctl(PR_SET_PDEATHSIG, SIGKILL); // if parent media.log dies before me, kill me also + setpgid(0, 0); // but if I die first, don't kill my parent + } + sp<ProcessState> proc(ProcessState::self()); + sp<IServiceManager> sm = defaultServiceManager(); + ALOGI("ServiceManager: %p", sm.get()); + AudioFlinger::instantiate(); + MediaPlayerService::instantiate(); + CameraService::instantiate(); + AudioPolicyService::instantiate(); + registerExtensions(); + ProcessState::self()->startThreadPool(); + IPCThreadState::self()->joinThreadPool(); + } } diff --git a/media/mediaserver/register.cpp b/media/mediaserver/register.cpp new file mode 100644 index 0000000..4ffb2ba --- /dev/null +++ b/media/mediaserver/register.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "RegisterExtensions.h" + +void registerExtensions() +{ +} diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk index bee28d4..ac608a1 100644 --- a/media/mtp/Android.mk +++ b/media/mtp/Android.mk @@ -42,6 +42,6 @@ LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST # Needed for <bionic_time.h> LOCAL_C_INCLUDES := bionic/libc/private -LOCAL_SHARED_LIBRARIES := libutils libcutils libusbhost libbinder +LOCAL_SHARED_LIBRARIES := libutils libcutils liblog libusbhost libbinder include $(BUILD_SHARED_LIBRARY) diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp index 662a93d..df87db4 100644 --- a/media/mtp/MtpServer.cpp +++ b/media/mtp/MtpServer.cpp @@ -704,7 +704,8 @@ MtpResponseCode MtpServer::doGetObjectInfo() { mData.putUInt32(info.mAssociationDesc); mData.putUInt32(info.mSequenceNumber); mData.putString(info.mName); - mData.putEmptyString(); // date created + formatDateTime(info.mDateCreated, date, sizeof(date)); + mData.putString(date); // date created formatDateTime(info.mDateModified, date, sizeof(date)); mData.putString(date); // date modified mData.putEmptyString(); // keywords @@ -1118,7 +1119,7 @@ MtpResponseCode MtpServer::doSendPartialObject() { int initialData = ret - MTP_CONTAINER_HEADER_SIZE; if (initialData > 0) { - ret = write(edit->mFD, mData.getData(), initialData); + ret = pwrite(edit->mFD, mData.getData(), initialData, offset); offset += initialData; length -= initialData; } |