diff options
Diffstat (limited to 'media')
296 files changed, 22525 insertions, 7067 deletions
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h index 4389b02..3dcedc5 100644 --- a/media/img_utils/include/img_utils/DngUtils.h +++ b/media/img_utils/include/img_utils/DngUtils.h @@ -31,6 +31,7 @@ namespace android {  namespace img_utils {  #define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0]))) +#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x)))  /**   * Utility class for building values for the OpcodeList tags specified @@ -107,13 +108,49 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {                                      uint32_t mapPlanes,                                      const float* mapGains); +        /** +         * Add WarpRectilinear opcode for the given metadata parameters. +         * +         * Returns OK on success, or a negative error code. +         */ +        virtual status_t addWarpRectilinearForMetadata(const float* kCoeffs, +                                                       uint32_t activeArrayWidth, +                                                       uint32_t activeArrayHeight, +                                                       float opticalCenterX, +                                                       float opticalCenterY); + +        /** +         * Add a WarpRectilinear opcode. +         * +         * numPlanes - Number of planes included in this opcode. +         * opticalCenterX, opticalCenterY - Normalized x,y coordinates of the sensor optical +         *          center relative to the top,left pixel of the produced images (e.g. [0.5, 0.5] +         *          gives a sensor optical center in the image center. +         * kCoeffs - A list of coefficients for the polynomial equation representing the distortion +         *          correction.  For each plane, 6 coefficients must be included: +         *          {k_r0, k_r1, k_r2, k_r3, k_t0, k_t1}.  See the DNG 1.4 specification for an +         *          outline of the polynomial used here. +         * +         * Returns OK on success, or a negative error code. +         */ +        virtual status_t addWarpRectilinear(uint32_t numPlanes, +                                            double opticalCenterX, +                                            double opticalCenterY, +                                            const double* kCoeffs); +          // TODO: Add other Opcode methods      protected:          static const uint32_t FLAG_OPTIONAL = 0x1u;          static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u; +        // Opcode IDs          enum { +            WARP_RECTILINEAR_ID = 1,              GAIN_MAP_ID = 9, +        }; + +        // LSM mosaic indices +        enum {              LSM_R_IND = 0,              LSM_GE_IND = 1,              LSM_GO_IND = 2, diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp index d3b4a35..b213403 100644 --- a/media/img_utils/src/DngUtils.cpp +++ b/media/img_utils/src/DngUtils.cpp @@ -16,6 +16,10 @@  #include <img_utils/DngUtils.h> +#include <inttypes.h> + +#include <math.h> +  namespace android {  namespace img_utils { @@ -229,7 +233,7 @@ status_t OpcodeListBuilder::addGainMap(uint32_t top,      err = mEndianOut.write(version, 0, NELEMS(version));      if (err != OK) return err; -    // Do not include optional flag for preview, as this can have a large effect on the output. +    // Allow this opcode to be skipped if not supported      uint32_t flags = FLAG_OPTIONAL;      err = mEndianOut.write(&flags, 0, 1); @@ -278,5 +282,96 @@ status_t OpcodeListBuilder::addGainMap(uint32_t top,      return OK;  } +status_t OpcodeListBuilder::addWarpRectilinearForMetadata(const float* kCoeffs, +                                                          uint32_t activeArrayWidth, +                                                          uint32_t activeArrayHeight, +                                                          float opticalCenterX, +                                                          float opticalCenterY) { +    if (activeArrayWidth <= 1 || activeArrayHeight <= 1) { +        ALOGE("%s: Cannot add opcode for active array with dimensions w=%" PRIu32 ", h=%" PRIu32, +                __FUNCTION__, activeArrayWidth, activeArrayHeight); +        return BAD_VALUE; +    } + +    double normalizedOCX = opticalCenterX / static_cast<double>(activeArrayWidth - 1); +    double normalizedOCY = opticalCenterY / static_cast<double>(activeArrayHeight - 1); + +    normalizedOCX = CLAMP(normalizedOCX, 0, 1); +    normalizedOCY = CLAMP(normalizedOCY, 0, 1); + +    // Conversion factors from Camera2 K factors to DNG spec. K factors: +    // +    //      Note: these are necessary because our unit system assumes a +    //      normalized max radius of sqrt(2), whereas the DNG spec's +    //      WarpRectilinear opcode assumes a normalized max radius of 1. +    //      Thus, each K coefficient must include the domain scaling +    //      factor (the DNG domain is scaled by sqrt(2) to emulate the +    //      domain used by the Camera2 specification). + +    const double c_0 = sqrt(2); +    const double c_1 = 2 * sqrt(2); +    const double c_2 = 4 * sqrt(2); +    const double c_3 = 8 * sqrt(2); +    const double c_4 = 2; +    const double c_5 = 2; + +    const double coeffs[] = { c_0 * kCoeffs[0], +                              c_1 * kCoeffs[1], +                              c_2 * kCoeffs[2], +                              c_3 * kCoeffs[3], +                              c_4 * kCoeffs[4], +                              c_5 * kCoeffs[5] }; + + +    return addWarpRectilinear(/*numPlanes*/1, +                              /*opticalCenterX*/normalizedOCX, +                              /*opticalCenterY*/normalizedOCY, +                              coeffs); +} + +status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes, +                                               double opticalCenterX, +                                               double opticalCenterY, +                                               const double* kCoeffs) { + +    uint32_t opcodeId = WARP_RECTILINEAR_ID; + +    status_t err = mEndianOut.write(&opcodeId, 0, 1); +    if (err != OK) return err; + +    uint8_t version[] = {1, 3, 0, 0}; +    err = mEndianOut.write(version, 0, NELEMS(version)); +    if (err != OK) return err; + +    // Allow this opcode to be skipped if not supported +    uint32_t flags = FLAG_OPTIONAL; + +    err = mEndianOut.write(&flags, 0, 1); +    if (err != OK) return err; + +    const uint32_t NUMBER_CENTER_ARGS = 2; +    const uint32_t NUMBER_COEFFS = numPlanes * 6; +    uint32_t totalSize = (NUMBER_CENTER_ARGS + NUMBER_COEFFS) * sizeof(double) + sizeof(uint32_t); + +    err = mEndianOut.write(&totalSize, 0, 1); +    if (err != OK) return err; + +    err = mEndianOut.write(&numPlanes, 0, 1); +    if (err != OK) return err; + +    err = mEndianOut.write(kCoeffs, 0, NUMBER_COEFFS); +    if (err != OK) return err; + +    err = mEndianOut.write(&opticalCenterX, 0, 1); +    if (err != OK) return err; + +    err = mEndianOut.write(&opticalCenterY, 0, 1); +    if (err != OK) return err; + +    mCount++; + +    return OK; +} +  } /*namespace img_utils*/  } /*namespace android*/ diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c index 6d30d64..db7865a 100644 --- a/media/libeffects/factory/EffectsFactory.c +++ b/media/libeffects/factory/EffectsFactory.c @@ -24,10 +24,12 @@  #include <cutils/misc.h>  #include <cutils/config_utils.h> +#include <cutils/properties.h>  #include <audio_effects/audio_effects_conf.h>  static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects  static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries +static list_elem_t *gSkippedEffects; // list of effects skipped because of duplicate uuid  // list of effect_descriptor and list of sub effects : all currently loaded  // It does not contain effects without sub effects.  static list_sub_elem_t *gSubEffectList; @@ -63,10 +65,10 @@ static int findEffect(const effect_uuid_t *type,                 lib_entry_t **lib,                 effect_descriptor_t **desc);  // To search a subeffect in the gSubEffectList -int findSubEffect(const effect_uuid_t *uuid, +static int findSubEffect(const effect_uuid_t *uuid,                 lib_entry_t **lib,                 effect_descriptor_t **desc); -static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len); +static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent);  static int stringToUuid(const char *str, effect_uuid_t *uuid);  static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen); @@ -237,8 +239,8 @@ int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor)      }  #if (LOG_NDEBUG == 0) -    char str[256]; -    dumpEffectDescriptor(pDescriptor, str, 256); +    char str[512]; +    dumpEffectDescriptor(pDescriptor, str, sizeof(str), 0 /* indent */);      ALOGV("EffectQueryEffect() desc:%s", str);  #endif      pthread_mutex_unlock(&gLibLock); @@ -446,12 +448,19 @@ int init() {          return 0;      } +    // ignore effects or not? +    const bool ignoreFxConfFiles = property_get_bool(PROPERTY_IGNORE_EFFECTS, false); +      pthread_mutex_init(&gLibLock, NULL); -    if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) { -        loadEffectConfigFile(AUDIO_EFFECT_VENDOR_CONFIG_FILE); -    } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) { -        loadEffectConfigFile(AUDIO_EFFECT_DEFAULT_CONFIG_FILE); +    if (ignoreFxConfFiles) { +        ALOGI("Audio effects in configuration files will be ignored"); +    } else { +        if (access(AUDIO_EFFECT_VENDOR_CONFIG_FILE, R_OK) == 0) { +            loadEffectConfigFile(AUDIO_EFFECT_VENDOR_CONFIG_FILE); +        } else if (access(AUDIO_EFFECT_DEFAULT_CONFIG_FILE, R_OK) == 0) { +            loadEffectConfigFile(AUDIO_EFFECT_DEFAULT_CONFIG_FILE); +        }      }      updateNumEffects(); @@ -503,15 +512,31 @@ int loadLibrary(cnode *root, const char *name)      audio_effect_library_t *desc;      list_elem_t *e;      lib_entry_t *l; +    char path[PATH_MAX]; +    char *str; +    size_t len;      node = config_find(root, PATH_TAG);      if (node == NULL) {          return -EINVAL;      } +    // audio_effects.conf always specifies 32 bit lib path: convert to 64 bit path if needed +    strlcpy(path, node->value, PATH_MAX); +#ifdef __LP64__ +    str = strstr(path, "/lib/"); +    if (str == NULL) +        return -EINVAL; +    len = str - path; +    path[len] = '\0'; +    strlcat(path, "/lib64/", PATH_MAX); +    strlcat(path, node->value + len + strlen("/lib/"), PATH_MAX); +#endif +    if (strlen(path) >= PATH_MAX - 1) +        return -EINVAL; -    hdl = dlopen(node->value, RTLD_NOW); +    hdl = dlopen(path, RTLD_NOW);      if (hdl == NULL) { -        ALOGW("loadLibrary() failed to open %s", node->value); +        ALOGW("loadLibrary() failed to open %s", path);          goto error;      } @@ -535,7 +560,7 @@ int loadLibrary(cnode *root, const char *name)      // add entry for library in gLibraryList      l = malloc(sizeof(lib_entry_t));      l->name = strndup(name, PATH_MAX); -    l->path = strndup(node->value, PATH_MAX); +    l->path = strndup(path, PATH_MAX);      l->handle = hdl;      l->desc = desc;      l->effects = NULL; @@ -547,7 +572,7 @@ int loadLibrary(cnode *root, const char *name)      e->next = gLibraryList;      gLibraryList = e;      pthread_mutex_unlock(&gLibLock); -    ALOGV("getLibrary() linked library %p for path %s", l, node->value); +    ALOGV("getLibrary() linked library %p for path %s", l, path);      return 0; @@ -595,8 +620,8 @@ int addSubEffect(cnode *root)          return -EINVAL;      }  #if (LOG_NDEBUG==0) -    char s[256]; -    dumpEffectDescriptor(d, s, 256); +    char s[512]; +    dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);      ALOGV("addSubEffect() read descriptor %p:%s",d, s);  #endif      if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != @@ -660,6 +685,13 @@ int loadEffect(cnode *root)          ALOGW("loadEffect() invalid uuid %s", node->value);          return -EINVAL;      } +    lib_entry_t *tmp; +    bool skip = false; +    if (findEffect(NULL, &uuid, &tmp, NULL) == 0) { +        ALOGW("skipping duplicate uuid %s %s", node->value, +                node->next ? "and its sub-effects" : ""); +        skip = true; +    }      d = malloc(sizeof(effect_descriptor_t));      if (l->desc->get_descriptor(&uuid, d) != 0) { @@ -670,8 +702,8 @@ int loadEffect(cnode *root)          return -EINVAL;      }  #if (LOG_NDEBUG==0) -    char s[256]; -    dumpEffectDescriptor(d, s, 256); +    char s[512]; +    dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);      ALOGV("loadEffect() read descriptor %p:%s",d, s);  #endif      if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != @@ -682,8 +714,14 @@ int loadEffect(cnode *root)      }      e = malloc(sizeof(list_elem_t));      e->object = d; -    e->next = l->effects; -    l->effects = e; +    if (skip) { +        e->next = gSkippedEffects; +        gSkippedEffects = e; +        return -EINVAL; +    } else { +        e->next = l->effects; +        l->effects = e; +    }      // After the UUID node in the config_tree, if node->next is valid,      // that would be sub effect node. @@ -876,22 +914,30 @@ int findEffect(const effect_uuid_t *type,      return ret;  } -void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len) { +void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent) {      char s[256]; +    char ss[256]; +    char idt[indent + 1]; + +    memset(idt, ' ', indent); +    idt[indent] = 0; + +    str[0] = 0; -    snprintf(str, len, "\nEffect Descriptor %p:\n", desc); -    strncat(str, "- TYPE: ", len); -    uuidToString(&desc->uuid, s, 256); -    snprintf(str, len, "- UUID: %s\n", s); -    uuidToString(&desc->type, s, 256); -    snprintf(str, len, "- TYPE: %s\n", s); -    sprintf(s, "- apiVersion: %08X\n- flags: %08X\n", -            desc->apiVersion, desc->flags); -    strncat(str, s, len); -    sprintf(s, "- name: %s\n", desc->name); -    strncat(str, s, len); -    sprintf(s, "- implementor: %s\n", desc->implementor); -    strncat(str, s, len); +    snprintf(s, sizeof(s), "%s%s / %s\n", idt, desc->name, desc->implementor); +    strlcat(str, s, len); + +    uuidToString(&desc->uuid, s, sizeof(s)); +    snprintf(ss, sizeof(ss), "%s  UUID: %s\n", idt, s); +    strlcat(str, ss, len); + +    uuidToString(&desc->type, s, sizeof(s)); +    snprintf(ss, sizeof(ss), "%s  TYPE: %s\n", idt, s); +    strlcat(str, ss, len); + +    sprintf(s, "%s  apiVersion: %08X\n%s  flags: %08X\n", idt, +            desc->apiVersion, idt, desc->flags); +    strlcat(str, s, len);  }  int stringToUuid(const char *str, effect_uuid_t *uuid) @@ -934,3 +980,40 @@ int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen)      return 0;  } +int EffectDumpEffects(int fd) { +    char s[512]; +    list_elem_t *e = gLibraryList; +    lib_entry_t *l = NULL; +    effect_descriptor_t *d = NULL; +    int found = 0; +    int ret = 0; + +    while (e) { +        l = (lib_entry_t *)e->object; +        list_elem_t *efx = l->effects; +        dprintf(fd, "Library %s\n", l->name); +        if (!efx) { +            dprintf(fd, "  (no effects)\n"); +        } +        while (efx) { +            d = (effect_descriptor_t *)efx->object; +            dumpEffectDescriptor(d, s, sizeof(s), 2); +            dprintf(fd, "%s", s); +            efx = efx->next; +        } +        e = e->next; +    } + +    e = gSkippedEffects; +    if (e) { +        dprintf(fd, "Skipped effects\n"); +        while(e) { +            d = (effect_descriptor_t *)e->object; +            dumpEffectDescriptor(d, s, sizeof(s), 2 /* indent */); +            dprintf(fd, "%s", s); +            e = e->next; +        } +    } +    return ret; +} + diff --git a/media/libeffects/factory/EffectsFactory.h b/media/libeffects/factory/EffectsFactory.h index 560b485..518800d 100644 --- a/media/libeffects/factory/EffectsFactory.h +++ b/media/libeffects/factory/EffectsFactory.h @@ -26,6 +26,7 @@  extern "C" {  #endif +#define PROPERTY_IGNORE_EFFECTS "ro.audio.ignore_effects"  typedef struct list_elem_s {      void *object; diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp index 40c7fef..af904a6 100644 --- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp +++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp @@ -27,6 +27,7 @@  #include <cutils/log.h>  #include "EffectBundle.h" +#include "math.h"  // effect_handle_t interface implementation for bass boost @@ -830,32 +831,69 @@ void LvmEffect_limitLevel(EffectContext *pContext) {      int gainCorrection = 0;      //Count the energy contribution per band for EQ and BassBoost only if they are active.      float energyContribution = 0; +    float energyCross = 0; +    float energyBassBoost = 0; +    float crossCorrection = 0;      //EQ contribution      if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) {          for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { -            float bandEnergy = (pContext->pBundledContext->bandGaindB[i] * -                    LimitLevel_bandEnergyContribution[i])/15.0; +            float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0; +            float bandCoefficient = LimitLevel_bandEnergyCoefficient[i]; +            float bandEnergy = bandFactor * bandCoefficient * bandCoefficient;              if (bandEnergy > 0)                  energyContribution += bandEnergy;          } + +        //cross EQ coefficients +        float bandFactorSum = 0; +        for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) { +            float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0; +            float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0; + +            if (bandFactor1 > 0 && bandFactor2 > 0) { +                float crossEnergy = bandFactor1 * bandFactor2 * +                        LimitLevel_bandEnergyCrossCoefficient[i]; +                bandFactorSum += bandFactor1 * bandFactor2; + +                if (crossEnergy > 0) +                    energyCross += crossEnergy; +            } +        } +        bandFactorSum -= 1.0; +        if (bandFactorSum > 0) +            crossCorrection = bandFactorSum * 0.7;      }      //BassBoost contribution      if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) { -        float bandEnergy = (pContext->pBundledContext->BassStrengthSaved * -                LimitLevel_bassBoostEnergyContribution)/1000.0; -        if (bandEnergy > 0) -            energyContribution += bandEnergy; +        float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0; +        float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient; + +        energyContribution += boostFactor * boostCoefficient * boostCoefficient; + +        for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { +            float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0; +            float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i]; +            float bandEnergy = boostFactor * bandFactor * +                    bandCrossCoefficient; +            if (bandEnergy > 0) +                energyBassBoost += bandEnergy; +        }      }      //Virtualizer contribution      if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) { -                   energyContribution += LimitLevel_virtualizerContribution; -        } +        energyContribution += LimitLevel_virtualizerContribution * +                LimitLevel_virtualizerContribution; +    } + +    double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) - +            crossCorrection; +    ALOGV(" TOTAL energy estimation: %0.2f", totalEnergyEstimation);      //roundoff -    int maxLevelRound = (int)(energyContribution + 0.99); +    int maxLevelRound = (int)(totalEnergyEstimation + 0.99);      if (maxLevelRound + pContext->pBundledContext->volume > 0) {          gainCorrection = maxLevelRound + pContext->pBundledContext->volume;      } diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h index b3071f4..9459b87 100644 --- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h +++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.h @@ -200,10 +200,16 @@ static const PresetConfig gEqualizerPresets[] = {   * updated.   */ -static const float LimitLevel_bandEnergyContribution[FIVEBAND_NUMBANDS] = { -        5.0, 6.5, 6.45, 4.8, 1.7 }; +static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = { +        7.56, 9.69, 9.59, 7.37, 2.88}; -static const float LimitLevel_bassBoostEnergyContribution = 6.7; +static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS-1] = { +        126.0, 115.0, 125.0, 104.0 }; + +static const float LimitLevel_bassBoostEnergyCrossCoefficient[FIVEBAND_NUMBANDS] = { +        221.21, 208.10, 28.16, 0.0, 0.0 }; + +static const float LimitLevel_bassBoostEnergyCoefficient = 7.12;  static const float LimitLevel_virtualizerContribution = 1.9; diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 6c585fb..a3c3d3c 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -7,6 +7,9 @@ LOCAL_SRC_FILES:= \  LOCAL_MODULE:= libmedia_helper  LOCAL_MODULE_TAGS := optional +LOCAL_C_FLAGS += -Werror -Wno-error=deprecated-declarations -Wall +LOCAL_CLANG := true +  include $(BUILD_STATIC_LIBRARY)  include $(CLEAR_VARS) @@ -19,6 +22,7 @@ LOCAL_SRC_FILES:= \      IAudioTrack.cpp \      IAudioRecord.cpp \      ICrypto.cpp \ +    IDataSource.cpp \      IDrm.cpp \      IDrmClient.cpp \      IHDCP.cpp \ @@ -36,6 +40,8 @@ LOCAL_SRC_FILES:= \      IMediaRecorder.cpp \      IRemoteDisplay.cpp \      IRemoteDisplayClient.cpp \ +    IResourceManagerClient.cpp \ +    IResourceManagerService.cpp \      IStreamSource.cpp \      MediaCodecInfo.cpp \      Metadata.cpp \ @@ -53,6 +59,8 @@ LOCAL_SRC_FILES:= \      CharacterEncodingDetector.cpp \      IMediaDeathNotifier.cpp \      MediaProfiles.cpp \ +    MediaResource.cpp \ +    MediaResourcePolicy.cpp \      IEffect.cpp \      IEffectClient.cpp \      AudioEffect.cpp \ @@ -61,15 +69,11 @@ LOCAL_SRC_FILES:= \      StringArray.cpp \      AudioPolicy.cpp -LOCAL_SRC_FILES += ../libnbaio/roundup.c -  LOCAL_SHARED_LIBRARIES := \  	libui liblog libcutils libutils libbinder libsonivox libicuuc libicui18n libexpat \          libcamera_client libstagefright_foundation \          libgui libdl libaudioutils libnbaio -LOCAL_STATIC_LIBRARIES += libinstantssq -  LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper  LOCAL_MODULE:= libmedia @@ -83,14 +87,8 @@ LOCAL_C_INCLUDES := \      $(call include-path-for, audio-effects) \      $(call include-path-for, audio-utils) -include $(BUILD_SHARED_LIBRARY) - -include $(CLEAR_VARS) - -LOCAL_SRC_FILES += SingleStateQueue.cpp -LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"' +LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall +LOCAL_CLANG := true -LOCAL_MODULE := libinstantssq -LOCAL_MODULE_TAGS := optional +include $(BUILD_SHARED_LIBRARY) -include $(BUILD_STATIC_LIBRARY) diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp index af103c1..ff82544 100644 --- a/media/libmedia/AudioEffect.cpp +++ b/media/libmedia/AudioEffect.cpp @@ -35,13 +35,14 @@ namespace android {  // --------------------------------------------------------------------------- -AudioEffect::AudioEffect() -    : mStatus(NO_INIT) +AudioEffect::AudioEffect(const String16& opPackageName) +    : mStatus(NO_INIT), mOpPackageName(opPackageName)  {  }  AudioEffect::AudioEffect(const effect_uuid_t *type, +                const String16& opPackageName,                  const effect_uuid_t *uuid,                  int32_t priority,                  effect_callback_t cbf, @@ -49,12 +50,13 @@ AudioEffect::AudioEffect(const effect_uuid_t *type,                  int sessionId,                  audio_io_handle_t io                  ) -    : mStatus(NO_INIT) +    : mStatus(NO_INIT), mOpPackageName(opPackageName)  {      mStatus = set(type, uuid, priority, cbf, user, sessionId, io);  }  AudioEffect::AudioEffect(const char *typeStr, +                const String16& opPackageName,                  const char *uuidStr,                  int32_t priority,                  effect_callback_t cbf, @@ -62,7 +64,7 @@ AudioEffect::AudioEffect(const char *typeStr,                  int sessionId,                  audio_io_handle_t io                  ) -    : mStatus(NO_INIT) +    : mStatus(NO_INIT), mOpPackageName(opPackageName)  {      effect_uuid_t type;      effect_uuid_t *pType = NULL; @@ -128,16 +130,18 @@ status_t AudioEffect::set(const effect_uuid_t *type,      mIEffectClient = new EffectClient(this);      iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor, -            mIEffectClient, priority, io, mSessionId, &mStatus, &mId, &enabled); +            mIEffectClient, priority, io, mSessionId, mOpPackageName, &mStatus, &mId, &enabled);      if (iEffect == 0 || (mStatus != NO_ERROR && mStatus != ALREADY_EXISTS)) {          ALOGE("set(): AudioFlinger could not create effect, status: %d", mStatus); +        if (iEffect == 0) { +            mStatus = NO_INIT; +        }          return mStatus;      }      mEnabled = (volatile int32_t)enabled; -    mIEffect = iEffect;      cblk = iEffect->getCblk();      if (cblk == 0) {          mStatus = NO_INIT; @@ -145,6 +149,7 @@ status_t AudioEffect::set(const effect_uuid_t *type,          return mStatus;      } +    mIEffect = iEffect;      mCblkMemory = cblk;      mCblk = static_cast<effect_param_cblk_t*>(cblk->pointer());      int bufOffset = ((sizeof(effect_param_cblk_t) - 1) / sizeof(int) + 1) * sizeof(int); @@ -175,11 +180,11 @@ AudioEffect::~AudioEffect()              mIEffect->disconnect();              IInterface::asBinder(mIEffect)->unlinkToDeath(mIEffectClient);          } +        mIEffect.clear(); +        mCblkMemory.clear(); +        mIEffectClient.clear();          IPCThreadState::self()->flushCommands();      } -    mIEffect.clear(); -    mIEffectClient.clear(); -    mCblkMemory.clear();  } @@ -486,4 +491,4 @@ status_t AudioEffect::guidToString(const effect_uuid_t *guid, char *str, size_t  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioParameter.cpp b/media/libmedia/AudioParameter.cpp index 33dbf0b..8c8cf45 100644 --- a/media/libmedia/AudioParameter.cpp +++ b/media/libmedia/AudioParameter.cpp @@ -180,4 +180,4 @@ status_t AudioParameter::getAt(size_t index, String8& key, String8& value)      }  } -};  // namespace android +} // namespace android diff --git a/media/libmedia/AudioPolicy.cpp b/media/libmedia/AudioPolicy.cpp index d2d0971..9d07011 100644 --- a/media/libmedia/AudioPolicy.cpp +++ b/media/libmedia/AudioPolicy.cpp @@ -68,6 +68,7 @@ status_t AudioMix::readFromParcel(Parcel *parcel)      mFormat.format = (audio_format_t)parcel->readInt32();      mRouteFlags = parcel->readInt32();      mRegistrationId = parcel->readString8(); +    mCbFlags = (uint32_t)parcel->readInt32();      size_t size = (size_t)parcel->readInt32();      if (size > MAX_CRITERIA_PER_MIX) {          size = MAX_CRITERIA_PER_MIX; @@ -89,6 +90,7 @@ status_t AudioMix::writeToParcel(Parcel *parcel) const      parcel->writeInt32(mFormat.format);      parcel->writeInt32(mRouteFlags);      parcel->writeString8(mRegistrationId); +    parcel->writeInt32(mCbFlags);      size_t size = mCriteria.size();      if (size > MAX_CRITERIA_PER_MIX) {          size = MAX_CRITERIA_PER_MIX; @@ -112,4 +114,4 @@ status_t AudioMix::writeToParcel(Parcel *parcel) const      return NO_ERROR;  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 07ca14f..011b31f 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -65,9 +65,10 @@ status_t AudioRecord::getMinFrameCount(  // --------------------------------------------------------------------------- -AudioRecord::AudioRecord() -    : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE), -      mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT) +AudioRecord::AudioRecord(const String16 &opPackageName) +    : mStatus(NO_INIT), mOpPackageName(opPackageName), mSessionId(AUDIO_SESSION_ALLOCATE), +      mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT), +      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)  {  } @@ -76,6 +77,7 @@ AudioRecord::AudioRecord(          uint32_t sampleRate,          audio_format_t format,          audio_channel_mask_t channelMask, +        const String16& opPackageName,          size_t frameCount,          callback_t cbf,          void* user, @@ -83,15 +85,20 @@ AudioRecord::AudioRecord(          int sessionId,          transfer_type transferType,          audio_input_flags_t flags, +        int uid, +        pid_t pid,          const audio_attributes_t* pAttributes) -    : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE), +    : mStatus(NO_INIT), +      mOpPackageName(opPackageName), +      mSessionId(AUDIO_SESSION_ALLOCATE),        mPreviousPriority(ANDROID_PRIORITY_NORMAL),        mPreviousSchedulingGroup(SP_DEFAULT), -      mProxy(NULL) +      mProxy(NULL), +      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)  {      mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,              notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags, -            pAttributes); +            uid, pid, pAttributes);  }  AudioRecord::~AudioRecord() @@ -107,12 +114,18 @@ AudioRecord::~AudioRecord()              mAudioRecordThread->requestExitAndWait();              mAudioRecordThread.clear();          } +        // No lock here: worst case we remove a NULL callback which will be a nop +        if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) { +            AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput); +        }          IInterface::asBinder(mAudioRecord)->unlinkToDeath(mDeathNotifier, this);          mAudioRecord.clear();          mCblkMemory.clear();          mBufferMemory.clear();          IPCThreadState::self()->flushCommands(); -        AudioSystem::releaseAudioSessionId(mSessionId, -1); +        ALOGV("~AudioRecord, releasing session id %d", +                mSessionId); +        AudioSystem::releaseAudioSessionId(mSessionId, -1 /*pid*/);      }  } @@ -129,12 +142,15 @@ status_t AudioRecord::set(          int sessionId,          transfer_type transferType,          audio_input_flags_t flags, +        int uid, +        pid_t pid,          const audio_attributes_t* pAttributes)  {      ALOGV("set(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, " -          "notificationFrames %u, sessionId %d, transferType %d, flags %#x", +          "notificationFrames %u, sessionId %d, transferType %d, flags %#x, opPackageName %s " +          "uid %d, pid %d",            inputSource, sampleRate, format, channelMask, frameCount, notificationFrames, -          sessionId, transferType, flags); +          sessionId, transferType, flags, String8(mOpPackageName).string(), uid, pid);      switch (transferType) {      case TRANSFER_DEFAULT: @@ -159,8 +175,6 @@ status_t AudioRecord::set(      }      mTransfer = transferType; -    AutoMutex lock(mLock); -      // invariant that mAudioRecord != 0 is true only after set() returns successfully      if (mAudioRecord != 0) {          ALOGE("Track already in use"); @@ -189,13 +203,9 @@ status_t AudioRecord::set(      }      // validate parameters -    if (!audio_is_valid_format(format)) { -        ALOGE("Invalid format %#x", format); -        return BAD_VALUE; -    } -    // Temporary restriction: AudioFlinger currently supports 16-bit PCM only -    if (format != AUDIO_FORMAT_PCM_16_BIT) { -        ALOGE("Format %#x is not supported", format); +    // AudioFlinger capture only supports linear PCM +    if (!audio_is_valid_format(format) || !audio_is_linear_pcm(format)) { +        ALOGE("Format %#x is not linear pcm", format);          return BAD_VALUE;      }      mFormat = format; @@ -227,16 +237,30 @@ status_t AudioRecord::set(      }      ALOGV("set(): mSessionId %d", mSessionId); +    int callingpid = IPCThreadState::self()->getCallingPid(); +    int mypid = getpid(); +    if (uid == -1 || (callingpid != mypid)) { +        mClientUid = IPCThreadState::self()->getCallingUid(); +    } else { +        mClientUid = uid; +    } +    if (pid == -1 || (callingpid != mypid)) { +        mClientPid = callingpid; +    } else { +        mClientPid = pid; +    } +      mFlags = flags;      mCbf = cbf;      if (cbf != NULL) {          mAudioRecordThread = new AudioRecordThread(*this, threadCanCallJava);          mAudioRecordThread->run("AudioRecord", ANDROID_PRIORITY_AUDIO); +        // thread begins in paused state, and will not reference us until start()      }      // create the IAudioRecord -    status_t status = openRecord_l(0 /*epoch*/); +    status_t status = openRecord_l(0 /*epoch*/, mOpPackageName);      if (status != NO_ERROR) {          if (mAudioRecordThread != 0) { @@ -284,9 +308,10 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession)      mNewPosition = mProxy->getPosition() + mUpdatePeriod;      int32_t flags = android_atomic_acquire_load(&mCblk->mFlags); +    mActive = true; +      status_t status = NO_ERROR;      if (!(flags & CBLK_INVALID)) { -        ALOGV("mAudioRecord->start()");          status = mAudioRecord->start(event, triggerSession);          if (status == DEAD_OBJECT) {              flags |= CBLK_INVALID; @@ -297,9 +322,9 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession)      }      if (status != NO_ERROR) { +        mActive = false;          ALOGE("start() status %d", status);      } else { -        mActive = true;          sp<AudioRecordThread> t = mAudioRecordThread;          if (t != 0) {              t->resume(); @@ -352,6 +377,10 @@ status_t AudioRecord::setMarkerPosition(uint32_t marker)      mMarkerPosition = marker;      mMarkerReached = false; +    sp<AudioRecordThread> t = mAudioRecordThread; +    if (t != 0) { +        t->wake(); +    }      return NO_ERROR;  } @@ -378,6 +407,10 @@ status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod)      mNewPosition = mProxy->getPosition() + updatePeriod;      mUpdatePeriod = updatePeriod; +    sp<AudioRecordThread> t = mAudioRecordThread; +    if (t != 0) { +        t->wake(); +    }      return NO_ERROR;  } @@ -408,15 +441,42 @@ status_t AudioRecord::getPosition(uint32_t *position) const  uint32_t AudioRecord::getInputFramesLost() const  {      // no need to check mActive, because if inactive this will return 0, which is what we want -    return AudioSystem::getInputFramesLost(getInput()); +    return AudioSystem::getInputFramesLost(getInputPrivate()); +} + +// ---- Explicit Routing --------------------------------------------------- +status_t AudioRecord::setInputDevice(audio_port_handle_t deviceId) { +    AutoMutex lock(mLock); +    if (mSelectedDeviceId != deviceId) { +        mSelectedDeviceId = deviceId; +        // stop capture so that audio policy manager does not reject the new instance start request +        // as only one capture can be active at a time. +        if (mAudioRecord != 0 && mActive) { +            mAudioRecord->stop(); +        } +        android_atomic_or(CBLK_INVALID, &mCblk->mFlags); +    } +    return NO_ERROR; +} + +audio_port_handle_t AudioRecord::getInputDevice() { +    AutoMutex lock(mLock); +    return mSelectedDeviceId; +} + +audio_port_handle_t AudioRecord::getRoutedDeviceId() { +    AutoMutex lock(mLock); +    if (mInput == AUDIO_IO_HANDLE_NONE) { +        return AUDIO_PORT_HANDLE_NONE; +    } +    return AudioSystem::getDeviceIdForIo(mInput);  }  // -------------------------------------------------------------------------  // must be called with mLock held -status_t AudioRecord::openRecord_l(size_t epoch) +status_t AudioRecord::openRecord_l(size_t epoch, const String16& opPackageName)  { -    status_t status;      const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();      if (audioFlinger == 0) {          ALOGE("Could not get audioflinger"); @@ -431,12 +491,16 @@ status_t AudioRecord::openRecord_l(size_t epoch)      }      // Client can only express a preference for FAST.  Server will perform additional tests. -    if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !( -            // use case: callback transfer mode -            (mTransfer == TRANSFER_CALLBACK) && +    if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !(( +            // either of these use cases: +            // use case 1: callback transfer mode +            (mTransfer == TRANSFER_CALLBACK) || +            // use case 2: obtain/release mode +            (mTransfer == TRANSFER_OBTAIN)) &&              // matching sample rate              (mSampleRate == afSampleRate))) { -        ALOGW("AUDIO_INPUT_FLAG_FAST denied by client"); +        ALOGW("AUDIO_INPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, primary %u Hz", +                mTransfer, mSampleRate, afSampleRate);          // once denied, do not request again if IAudioRecord is re-created          mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);      } @@ -451,9 +515,16 @@ status_t AudioRecord::openRecord_l(size_t epoch)          }      } +    if (mDeviceCallback != 0 && mInput != AUDIO_IO_HANDLE_NONE) { +        AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput); +    } +      audio_io_handle_t input; -    status = AudioSystem::getInputForAttr(&mAttributes, &input, (audio_session_t)mSessionId, -                                        mSampleRate, mFormat, mChannelMask, mFlags); +    status_t status = AudioSystem::getInputForAttr(&mAttributes, &input, +                                        (audio_session_t)mSessionId, +                                        IPCThreadState::self()->getCallingUid(), +                                        mSampleRate, mFormat, mChannelMask, +                                        mFlags, mSelectedDeviceId);      if (status != NO_ERROR) {          ALOGE("Could not get audio input for record source %d, sample rate %u, format %#x, " @@ -476,11 +547,14 @@ status_t AudioRecord::openRecord_l(size_t epoch)      sp<IMemory> iMem;           // for cblk      sp<IMemory> bufferMem;      sp<IAudioRecord> record = audioFlinger->openRecord(input, -                                                       mSampleRate, mFormat, +                                                       mSampleRate, +                                                       mFormat,                                                         mChannelMask, +                                                       opPackageName,                                                         &temp,                                                         &trackFlags,                                                         tid, +                                                       mClientUid,                                                         &mSessionId,                                                         ¬ificationFrames,                                                         iMem, @@ -577,6 +651,10 @@ status_t AudioRecord::openRecord_l(size_t epoch)      mDeathNotifier = new DeathNotifier(this);      IInterface::asBinder(mAudioRecord)->linkToDeath(mDeathNotifier, this); +    if (mDeviceCallback != 0) { +        AudioSystem::addAudioDeviceCallback(mDeviceCallback, mInput); +    } +      return NO_ERROR;      } @@ -588,15 +666,21 @@ release:      return status;  } -status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) +status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount, size_t *nonContig)  {      if (audioBuffer == NULL) { +        if (nonContig != NULL) { +            *nonContig = 0; +        }          return BAD_VALUE;      }      if (mTransfer != TRANSFER_OBTAIN) {          audioBuffer->frameCount = 0;          audioBuffer->size = 0;          audioBuffer->raw = NULL; +        if (nonContig != NULL) { +            *nonContig = 0; +        }          return INVALID_OPERATION;      } @@ -615,7 +699,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)          ALOGE("%s invalid waitCount %d", __func__, waitCount);          requested = NULL;      } -    return obtainBuffer(audioBuffer, requested); +    return obtainBuffer(audioBuffer, requested, NULL /*elapsed*/, nonContig);  }  status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, @@ -684,9 +768,9 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r      return status;  } -void AudioRecord::releaseBuffer(Buffer* audioBuffer) +void AudioRecord::releaseBuffer(const Buffer* audioBuffer)  { -    // all TRANSFER_* are valid +    // FIXME add error checking on mode, by adding an internal version      size_t stepCount = audioBuffer->size / mFrameSize;      if (stepCount == 0) { @@ -704,7 +788,7 @@ void AudioRecord::releaseBuffer(Buffer* audioBuffer)      // the server does not automatically disable recorder on overrun, so no need to restart  } -audio_io_handle_t AudioRecord::getInput() const +audio_io_handle_t AudioRecord::getInputPrivate() const  {      AutoMutex lock(mLock);      return mInput; @@ -712,7 +796,7 @@ audio_io_handle_t AudioRecord::getInput() const  // ------------------------------------------------------------------------- -ssize_t AudioRecord::read(void* buffer, size_t userSize) +ssize_t AudioRecord::read(void* buffer, size_t userSize, bool blocking)  {      if (mTransfer != TRANSFER_SYNC) {          return INVALID_OPERATION; @@ -731,7 +815,8 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)      while (userSize >= mFrameSize) {          audioBuffer.frameCount = userSize / mFrameSize; -        status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever); +        status_t err = obtainBuffer(&audioBuffer, +                blocking ? &ClientProxy::kForever : &ClientProxy::kNonBlocking);          if (err < 0) {              if (read > 0) {                  break; @@ -863,8 +948,11 @@ nsecs_t AudioRecord::processAudioBuffer()      if (!markerReached && position < markerPosition) {          minFrames = markerPosition - position;      } -    if (updatePeriod > 0 && updatePeriod < minFrames) { -        minFrames = updatePeriod; +    if (updatePeriod > 0) { +        uint32_t remaining = newPosition - position; +        if (remaining < minFrames) { +            minFrames = remaining; +        }      }      // If > 0, poll periodically to recover from a stuck server.  A good value is 2. @@ -990,14 +1078,13 @@ status_t AudioRecord::restoreRecord_l(const char *from)  {      ALOGW("dead IAudioRecord, creating a new one from %s()", from);      ++mSequence; -    status_t result;      // if the new IAudioRecord is created, openRecord_l() will modify the      // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.      // It will also delete the strong references on previous IAudioRecord and IMemory      size_t position = mProxy->getPosition();      mNewPosition = position + mUpdatePeriod; -    result = openRecord_l(position); +    status_t result = openRecord_l(position, mOpPackageName);      if (result == NO_ERROR) {          if (mActive) {              // callback thread or sync event hasn't changed @@ -1013,6 +1100,48 @@ status_t AudioRecord::restoreRecord_l(const char *from)      return result;  } +status_t AudioRecord::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) +{ +    if (callback == 0) { +        ALOGW("%s adding NULL callback!", __FUNCTION__); +        return BAD_VALUE; +    } +    AutoMutex lock(mLock); +    if (mDeviceCallback == callback) { +        ALOGW("%s adding same callback!", __FUNCTION__); +        return INVALID_OPERATION; +    } +    status_t status = NO_ERROR; +    if (mInput != AUDIO_IO_HANDLE_NONE) { +        if (mDeviceCallback != 0) { +            ALOGW("%s callback already present!", __FUNCTION__); +            AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput); +        } +        status = AudioSystem::addAudioDeviceCallback(callback, mInput); +    } +    mDeviceCallback = callback; +    return status; +} + +status_t AudioRecord::removeAudioDeviceCallback( +        const sp<AudioSystem::AudioDeviceCallback>& callback) +{ +    if (callback == 0) { +        ALOGW("%s removing NULL callback!", __FUNCTION__); +        return BAD_VALUE; +    } +    AutoMutex lock(mLock); +    if (mDeviceCallback != callback) { +        ALOGW("%s removing different callback!", __FUNCTION__); +        return INVALID_OPERATION; +    } +    if (mInput != AUDIO_IO_HANDLE_NONE) { +        AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mInput); +    } +    mDeviceCallback = 0; +    return NO_ERROR; +} +  // =========================================================================  void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused) @@ -1069,8 +1198,8 @@ bool AudioRecord::AudioRecordThread::threadLoop()      case NS_NEVER:          return false;      case NS_WHENEVER: -        // FIXME increase poll interval, or make event-driven -        ns = 1000000000LL; +        // Event driven: call wake() when callback notifications conditions change. +        ns = INT64_MAX;          // fall through      default:          LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns); @@ -1103,6 +1232,21 @@ void AudioRecord::AudioRecordThread::resume()      }  } +void AudioRecord::AudioRecordThread::wake() +{ +    AutoMutex _l(mMyLock); +    if (!mPaused) { +        // wake() might be called while servicing a callback - ignore the next +        // pause time and call processAudioBuffer. +        mIgnoreNextPausedInt = true; +        if (mPausedInt && mPausedNs > 0) { +            // audio record is active and internally paused with timeout. +            mPausedInt = false; +            mMyCond.signal(); +        } +    } +} +  void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)  {      AutoMutex _l(mMyLock); @@ -1112,4 +1256,4 @@ void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)  // ------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 9cae21c..3bfb09a 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -32,23 +32,12 @@ namespace android {  // client singleton for AudioFlinger binder interface  Mutex AudioSystem::gLock; -Mutex AudioSystem::gLockCache;  Mutex AudioSystem::gLockAPS; -Mutex AudioSystem::gLockAPC;  sp<IAudioFlinger> AudioSystem::gAudioFlinger;  sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;  audio_error_callback AudioSystem::gAudioErrorCallback = NULL; +dynamic_policy_callback AudioSystem::gDynPolicyCallback = NULL; -// Cached values for output handles -DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(NULL); - -// Cached values for recording queries, all protected by gLock -uint32_t AudioSystem::gPrevInSamplingRate; -audio_format_t AudioSystem::gPrevInFormat; -audio_channel_mask_t AudioSystem::gPrevInChannelMask; -size_t AudioSystem::gInBuffSize = 0;    // zero indicates cache is invalid - -sp<AudioSystem::AudioPortCallback> AudioSystem::gAudioPortCallback;  // establish binder interface to AudioFlinger service  const sp<IAudioFlinger> AudioSystem::get_audio_flinger() @@ -87,6 +76,25 @@ const sp<IAudioFlinger> AudioSystem::get_audio_flinger()      return af;  } +const sp<AudioSystem::AudioFlingerClient> AudioSystem::getAudioFlingerClient() +{ +    // calling get_audio_flinger() will initialize gAudioFlingerClient if needed +    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); +    if (af == 0) return 0; +    Mutex::Autolock _l(gLock); +    return gAudioFlingerClient; +} + +sp<AudioIoDescriptor> AudioSystem::getIoDescriptor(audio_io_handle_t ioHandle) +{ +    sp<AudioIoDescriptor> desc; +    const sp<AudioFlingerClient> afc = getAudioFlingerClient(); +    if (afc != 0) { +        desc = afc->getIoDescriptor(ioHandle); +    } +    return desc; +} +  /* static */ status_t AudioSystem::checkAudioFlinger()  {      if (defaultServiceManager()->checkService(String16("media.audio_flinger")) != 0) { @@ -260,18 +268,13 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output,  {      const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();      if (af == 0) return PERMISSION_DENIED; - -    Mutex::Autolock _l(gLockCache); - -    OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output); -    if (outputDesc == NULL) { +    sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output); +    if (outputDesc == 0) {          ALOGV("getOutputSamplingRate() no output descriptor for output %d in gOutputs", output); -        gLockCache.unlock();          *samplingRate = af->sampleRate(output); -        gLockCache.lock();      } else {          ALOGV("getOutputSamplingRate() reading from output desc"); -        *samplingRate = outputDesc->samplingRate; +        *samplingRate = outputDesc->mSamplingRate;      }      if (*samplingRate == 0) {          ALOGE("AudioSystem::getSamplingRate failed for output %d", output); @@ -304,16 +307,11 @@ status_t AudioSystem::getFrameCount(audio_io_handle_t output,  {      const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();      if (af == 0) return PERMISSION_DENIED; - -    Mutex::Autolock _l(gLockCache); - -    OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output); -    if (outputDesc == NULL) { -        gLockCache.unlock(); +    sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output); +    if (outputDesc == 0) {          *frameCount = af->frameCount(output); -        gLockCache.lock();      } else { -        *frameCount = outputDesc->frameCount; +        *frameCount = outputDesc->mFrameCount;      }      if (*frameCount == 0) {          ALOGE("AudioSystem::getFrameCount failed for output %d", output); @@ -346,16 +344,11 @@ status_t AudioSystem::getLatency(audio_io_handle_t output,  {      const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();      if (af == 0) return PERMISSION_DENIED; - -    Mutex::Autolock _l(gLockCache); - -    OutputDescriptor *outputDesc = AudioSystem::gOutputs.valueFor(output); -    if (outputDesc == NULL) { -        gLockCache.unlock(); +    sp<AudioIoDescriptor> outputDesc = getIoDescriptor(output); +    if (outputDesc == 0) {          *latency = af->latency(output); -        gLockCache.lock();      } else { -        *latency = outputDesc->latency; +        *latency = outputDesc->mLatency;      }      ALOGV("getLatency() output %d, latency %d", output, *latency); @@ -366,34 +359,11 @@ status_t AudioSystem::getLatency(audio_io_handle_t output,  status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t format,          audio_channel_mask_t channelMask, size_t* buffSize)  { -    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); -    if (af == 0) { -        return PERMISSION_DENIED; -    } -    Mutex::Autolock _l(gLockCache); -    // Do we have a stale gInBufferSize or are we requesting the input buffer size for new values -    size_t inBuffSize = gInBuffSize; -    if ((inBuffSize == 0) || (sampleRate != gPrevInSamplingRate) || (format != gPrevInFormat) -        || (channelMask != gPrevInChannelMask)) { -        gLockCache.unlock(); -        inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask); -        gLockCache.lock(); -        if (inBuffSize == 0) { -            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x", -                    sampleRate, format, channelMask); -            return BAD_VALUE; -        } -        // A benign race is possible here: we could overwrite a fresher cache entry -        // save the request params -        gPrevInSamplingRate = sampleRate; -        gPrevInFormat = format; -        gPrevInChannelMask = channelMask; - -        gInBuffSize = inBuffSize; +    const sp<AudioFlingerClient> afc = getAudioFlingerClient(); +    if (afc == 0) { +        return NO_INIT;      } -    *buffSize = inBuffSize; - -    return NO_ERROR; +    return afc->getInputBufferSize(sampleRate, format, channelMask, buffSize);  }  status_t AudioSystem::setVoiceVolume(float value) @@ -453,8 +423,26 @@ audio_hw_sync_t AudioSystem::getAudioHwSyncForSession(audio_session_t sessionId)      return af->getAudioHwSyncForSession(sessionId);  } +status_t AudioSystem::systemReady() +{ +    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); +    if (af == 0) return NO_INIT; +    return af->systemReady(); +} +  // --------------------------------------------------------------------------- + +void AudioSystem::AudioFlingerClient::clearIoCache() +{ +    Mutex::Autolock _l(mLock); +    mIoDescriptors.clear(); +    mInBuffSize = 0; +    mInSamplingRate = 0; +    mInFormat = AUDIO_FORMAT_DEFAULT; +    mInChannelMask = AUDIO_CHANNEL_NONE; +} +  void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)  {      audio_error_callback cb = NULL; @@ -464,11 +452,8 @@ void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused          cb = gAudioErrorCallback;      } -    { -        // clear output handles and stream to output map caches -        Mutex::Autolock _l(gLockCache); -        AudioSystem::gOutputs.clear(); -    } +    // clear output handles and stream to output map caches +    clearIoCache();      if (cb) {          cb(DEAD_OBJECT); @@ -476,76 +461,191 @@ void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused      ALOGW("AudioFlinger server died!");  } -void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle_t ioHandle, -        const void *param2) { +void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event event, +                                                      const sp<AudioIoDescriptor>& ioDesc) {      ALOGV("ioConfigChanged() event %d", event); -    const OutputDescriptor *desc; -    audio_stream_type_t stream; -    if (ioHandle == AUDIO_IO_HANDLE_NONE) return; +    if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return; -    Mutex::Autolock _l(AudioSystem::gLockCache); +    audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE; +    Vector < sp<AudioDeviceCallback> > callbacks; + +    { +        Mutex::Autolock _l(mLock); + +        switch (event) { +        case AUDIO_OUTPUT_OPENED: +        case AUDIO_INPUT_OPENED: { +            sp<AudioIoDescriptor> oldDesc = getIoDescriptor(ioDesc->mIoHandle); +            if (oldDesc == 0) { +                mIoDescriptors.add(ioDesc->mIoHandle, ioDesc); +            } else { +                deviceId = oldDesc->getDeviceId(); +                mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc); +            } + +            if (ioDesc->getDeviceId() != AUDIO_PORT_HANDLE_NONE) { +                deviceId = ioDesc->getDeviceId(); +                ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle); +                if (ioIndex >= 0) { +                    callbacks = mAudioDeviceCallbacks.valueAt(ioIndex); +                } +            } +            ALOGV("ioConfigChanged() new %s opened %d samplingRate %u, format %#x channel mask %#x " +                    "frameCount %zu deviceId %d", event == AUDIO_OUTPUT_OPENED ? "output" : "input", +                    ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat, ioDesc->mChannelMask, +                    ioDesc->mFrameCount, ioDesc->getDeviceId()); +            } break; +        case AUDIO_OUTPUT_CLOSED: +        case AUDIO_INPUT_CLOSED: { +            if (getIoDescriptor(ioDesc->mIoHandle) == 0) { +                ALOGW("ioConfigChanged() closing unknown %s %d", +                      event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle); +                break; +            } +            ALOGV("ioConfigChanged() %s %d closed", +                  event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle); + +            mIoDescriptors.removeItem(ioDesc->mIoHandle); +            mAudioDeviceCallbacks.removeItem(ioDesc->mIoHandle); +            } break; + +        case AUDIO_OUTPUT_CONFIG_CHANGED: +        case AUDIO_INPUT_CONFIG_CHANGED: { +            sp<AudioIoDescriptor> oldDesc = getIoDescriptor(ioDesc->mIoHandle); +            if (oldDesc == 0) { +                ALOGW("ioConfigChanged() modifying unknown output! %d", ioDesc->mIoHandle); +                break; +            } + +            deviceId = oldDesc->getDeviceId(); +            mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc); + +            if (deviceId != ioDesc->getDeviceId()) { +                deviceId = ioDesc->getDeviceId(); +                ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle); +                if (ioIndex >= 0) { +                    callbacks = mAudioDeviceCallbacks.valueAt(ioIndex); +                } +            } +            ALOGV("ioConfigChanged() new config for %s %d samplingRate %u, format %#x " +                    "channel mask %#x frameCount %zu deviceId %d", +                    event == AUDIO_OUTPUT_CONFIG_CHANGED ? "output" : "input", +                    ioDesc->mIoHandle, ioDesc->mSamplingRate, ioDesc->mFormat, +                    ioDesc->mChannelMask, ioDesc->mFrameCount, ioDesc->getDeviceId()); -    switch (event) { -    case STREAM_CONFIG_CHANGED: -        break; -    case OUTPUT_OPENED: { -        if (gOutputs.indexOfKey(ioHandle) >= 0) { -            ALOGV("ioConfigChanged() opening already existing output! %d", ioHandle); -            break; -        } -        if (param2 == NULL) break; -        desc = (const OutputDescriptor *)param2; - -        OutputDescriptor *outputDesc =  new OutputDescriptor(*desc); -        gOutputs.add(ioHandle, outputDesc); -        ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x frameCount %zu " -                "latency %d", -                outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask, -                outputDesc->frameCount, outputDesc->latency);          } break; -    case OUTPUT_CLOSED: { -        if (gOutputs.indexOfKey(ioHandle) < 0) { -            ALOGW("ioConfigChanged() closing unknown output! %d", ioHandle); -            break;          } -        ALOGV("ioConfigChanged() output %d closed", ioHandle); +    } +    // callbacks.size() != 0 =>  ioDesc->mIoHandle and deviceId are valid +    for (size_t i = 0; i < callbacks.size(); i++) { +        callbacks[i]->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId); +    } +} -        gOutputs.removeItem(ioHandle); -        } break; +status_t AudioSystem::AudioFlingerClient::getInputBufferSize( +                                                uint32_t sampleRate, audio_format_t format, +                                                audio_channel_mask_t channelMask, size_t* buffSize) +{ +    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); +    if (af == 0) { +        return PERMISSION_DENIED; +    } +    Mutex::Autolock _l(mLock); +    // Do we have a stale mInBuffSize or are we requesting the input buffer size for new values +    if ((mInBuffSize == 0) || (sampleRate != mInSamplingRate) || (format != mInFormat) +        || (channelMask != mInChannelMask)) { +        size_t inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask); +        if (inBuffSize == 0) { +            ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x", +                    sampleRate, format, channelMask); +            return BAD_VALUE; +        } +        // A benign race is possible here: we could overwrite a fresher cache entry +        // save the request params +        mInSamplingRate = sampleRate; +        mInFormat = format; +        mInChannelMask = channelMask; -    case OUTPUT_CONFIG_CHANGED: { -        int index = gOutputs.indexOfKey(ioHandle); -        if (index < 0) { -            ALOGW("ioConfigChanged() modifying unknown output! %d", ioHandle); -            break; +        mInBuffSize = inBuffSize; +    } + +    *buffSize = mInBuffSize; + +    return NO_ERROR; +} + +sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle) +{ +    sp<AudioIoDescriptor> desc; +    ssize_t index = mIoDescriptors.indexOfKey(ioHandle); +    if (index >= 0) { +        desc = mIoDescriptors.valueAt(index); +    } +    return desc; +} + +status_t AudioSystem::AudioFlingerClient::addAudioDeviceCallback( +        const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo) +{ +    Mutex::Autolock _l(mLock); +    Vector < sp<AudioDeviceCallback> > callbacks; +    ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(audioIo); +    if (ioIndex >= 0) { +        callbacks = mAudioDeviceCallbacks.valueAt(ioIndex); +    } + +    for (size_t cbIndex = 0; cbIndex < callbacks.size(); cbIndex++) { +        if (callbacks[cbIndex] == callback) { +            return INVALID_OPERATION;          } -        if (param2 == NULL) break; -        desc = (const OutputDescriptor *)param2; +    } +    callbacks.add(callback); + +    mAudioDeviceCallbacks.replaceValueFor(audioIo, callbacks); +    return NO_ERROR; +} -        ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x channel mask %#x " -                "frameCount %zu latency %d", -                ioHandle, desc->samplingRate, desc->format, -                desc->channelMask, desc->frameCount, desc->latency); -        OutputDescriptor *outputDesc = gOutputs.valueAt(index); -        delete outputDesc; -        outputDesc =  new OutputDescriptor(*desc); -        gOutputs.replaceValueFor(ioHandle, outputDesc); -    } break; -    case INPUT_OPENED: -    case INPUT_CLOSED: -    case INPUT_CONFIG_CHANGED: -        break; +status_t AudioSystem::AudioFlingerClient::removeAudioDeviceCallback( +        const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo) +{ +    Mutex::Autolock _l(mLock); +    ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(audioIo); +    if (ioIndex < 0) { +        return INVALID_OPERATION; +    } +    Vector < sp<AudioDeviceCallback> > callbacks = mAudioDeviceCallbacks.valueAt(ioIndex); +    size_t cbIndex; +    for (cbIndex = 0; cbIndex < callbacks.size(); cbIndex++) { +        if (callbacks[cbIndex] == callback) { +            break; +        } +    } +    if (cbIndex == callbacks.size()) { +        return INVALID_OPERATION;      } +    callbacks.removeAt(cbIndex); +    if (callbacks.size() != 0) { +        mAudioDeviceCallbacks.replaceValueFor(audioIo, callbacks); +    } else { +        mAudioDeviceCallbacks.removeItem(audioIo); +    } +    return NO_ERROR;  } -void AudioSystem::setErrorCallback(audio_error_callback cb) +/* static */ void AudioSystem::setErrorCallback(audio_error_callback cb)  {      Mutex::Autolock _l(gLock);      gAudioErrorCallback = cb;  } +/*static*/ void AudioSystem::setDynPolicyCallback(dynamic_policy_callback cb) +{ +    Mutex::Autolock _l(gLock); +    gDynPolicyCallback = cb; +} +  // client singleton for AudioPolicyService binder interface  // protected by gLockAPS  sp<IAudioPolicyService> AudioSystem::gAudioPolicyService; @@ -590,18 +690,22 @@ const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service()  status_t AudioSystem::setDeviceConnectionState(audio_devices_t device,                                                 audio_policy_dev_state_t state, -                                               const char *device_address) +                                               const char *device_address, +                                               const char *device_name)  {      const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();      const char *address = ""; +    const char *name = "";      if (aps == 0) return PERMISSION_DENIED;      if (device_address != NULL) {          address = device_address;      } - -    return aps->setDeviceConnectionState(device, state, address); +    if (device_name != NULL) { +        name = device_name; +    } +    return aps->setDeviceConnectionState(device, state, address, name);  }  audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device, @@ -653,17 +757,19 @@ status_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,                                          audio_io_handle_t *output,                                          audio_session_t session,                                          audio_stream_type_t *stream, +                                        uid_t uid,                                          uint32_t samplingRate,                                          audio_format_t format,                                          audio_channel_mask_t channelMask,                                          audio_output_flags_t flags, +                                        audio_port_handle_t selectedDeviceId,                                          const audio_offload_info_t *offloadInfo)  {      const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();      if (aps == 0) return NO_INIT; -    return aps->getOutputForAttr(attr, output, session, stream, +    return aps->getOutputForAttr(attr, output, session, stream, uid,                                   samplingRate, format, channelMask, -                                 flags, offloadInfo); +                                 flags, selectedDeviceId, offloadInfo);  }  status_t AudioSystem::startOutput(audio_io_handle_t output, @@ -696,14 +802,17 @@ void AudioSystem::releaseOutput(audio_io_handle_t output,  status_t AudioSystem::getInputForAttr(const audio_attributes_t *attr,                                  audio_io_handle_t *input,                                  audio_session_t session, +                                uid_t uid,                                  uint32_t samplingRate,                                  audio_format_t format,                                  audio_channel_mask_t channelMask, -                                audio_input_flags_t flags) +                                audio_input_flags_t flags, +                                audio_port_handle_t selectedDeviceId)  {      const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();      if (aps == 0) return NO_INIT; -    return aps->getInputForAttr(attr, input, session, samplingRate, format, channelMask, flags); +    return aps->getInputForAttr( +            attr, input, session, uid, samplingRate, format, channelMask, flags, selectedDeviceId);  }  status_t AudioSystem::startInput(audio_io_handle_t input, @@ -858,18 +967,16 @@ void AudioSystem::clearAudioConfigCache()      // called by restoreTrack_l(), which needs new IAudioFlinger and IAudioPolicyService instances      ALOGV("clearAudioConfigCache()");      { -        Mutex::Autolock _l(gLockCache); -        gOutputs.clear(); -    } -    {          Mutex::Autolock _l(gLock); +        if (gAudioFlingerClient != 0) { +            gAudioFlingerClient->clearIoCache(); +        }          gAudioFlinger.clear();      }      {          Mutex::Autolock _l(gLockAPS);          gAudioPolicyService.clear();      } -    // Do not clear gAudioPortCallback  }  bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info) @@ -929,10 +1036,75 @@ status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config)      return aps->setAudioPortConfig(config);  } -void AudioSystem::setAudioPortCallback(sp<AudioPortCallback> callBack) +status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callback) +{ +    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); +    if (aps == 0) return PERMISSION_DENIED; + +    Mutex::Autolock _l(gLockAPS); +    if (gAudioPolicyServiceClient == 0) { +        return NO_INIT; +    } +    int ret = gAudioPolicyServiceClient->addAudioPortCallback(callback); +    if (ret == 1) { +        aps->setAudioPortCallbacksEnabled(true); +    } +    return (ret < 0) ? INVALID_OPERATION : NO_ERROR; +} + +/*static*/ +status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callback) +{ +    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); +    if (aps == 0) return PERMISSION_DENIED; + +    Mutex::Autolock _l(gLockAPS); +    if (gAudioPolicyServiceClient == 0) { +        return NO_INIT; +    } +    int ret = gAudioPolicyServiceClient->removeAudioPortCallback(callback); +    if (ret == 0) { +        aps->setAudioPortCallbacksEnabled(false); +    } +    return (ret < 0) ? INVALID_OPERATION : NO_ERROR; +} + +status_t AudioSystem::addAudioDeviceCallback( +        const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)  { -    Mutex::Autolock _l(gLockAPC); -    gAudioPortCallback = callBack; +    const sp<AudioFlingerClient> afc = getAudioFlingerClient(); +    if (afc == 0) { +        return NO_INIT; +    } +    status_t status = afc->addAudioDeviceCallback(callback, audioIo); +    if (status == NO_ERROR) { +        const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); +        if (af != 0) { +            af->registerClient(afc); +        } +    } +    return status; +} + +status_t AudioSystem::removeAudioDeviceCallback( +        const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo) +{ +    const sp<AudioFlingerClient> afc = getAudioFlingerClient(); +    if (afc == 0) { +        return NO_INIT; +    } +    return afc->removeAudioDeviceCallback(callback, audioIo); +} + +audio_port_handle_t AudioSystem::getDeviceIdForIo(audio_io_handle_t audioIo) +{ +    const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger(); +    if (af == 0) return PERMISSION_DENIED; +    const sp<AudioIoDescriptor> desc = getIoDescriptor(audioIo); +    if (desc == 0) { +        return AUDIO_PORT_HANDLE_NONE; +    } +    return desc->getDeviceId();  }  status_t AudioSystem::acquireSoundTriggerSession(audio_session_t *session, @@ -965,38 +1137,100 @@ status_t AudioSystem::registerPolicyMixes(Vector<AudioMix> mixes, bool registrat      return aps->registerPolicyMixes(mixes, registration);  } +status_t AudioSystem::startAudioSource(const struct audio_port_config *source, +                                       const audio_attributes_t *attributes, +                                       audio_io_handle_t *handle) +{ +    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); +    if (aps == 0) return PERMISSION_DENIED; +    return aps->startAudioSource(source, attributes, handle); +} + +status_t AudioSystem::stopAudioSource(audio_io_handle_t handle) +{ +    const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); +    if (aps == 0) return PERMISSION_DENIED; +    return aps->stopAudioSource(handle); +} +  // --------------------------------------------------------------------------- -void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) +int AudioSystem::AudioPolicyServiceClient::addAudioPortCallback( +        const sp<AudioPortCallback>& callback)  { -    { -        Mutex::Autolock _l(gLockAPC); -        if (gAudioPortCallback != 0) { -            gAudioPortCallback->onServiceDied(); +    Mutex::Autolock _l(mLock); +    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { +        if (mAudioPortCallbacks[i] == callback) { +            return -1;          }      } -    { -        Mutex::Autolock _l(gLockAPS); -        AudioSystem::gAudioPolicyService.clear(); -    } +    mAudioPortCallbacks.add(callback); +    return mAudioPortCallbacks.size(); +} -    ALOGW("AudioPolicyService server died!"); +int AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback( +        const sp<AudioPortCallback>& callback) +{ +    Mutex::Autolock _l(mLock); +    size_t i; +    for (i = 0; i < mAudioPortCallbacks.size(); i++) { +        if (mAudioPortCallbacks[i] == callback) { +            break; +        } +    } +    if (i == mAudioPortCallbacks.size()) { +        return -1; +    } +    mAudioPortCallbacks.removeAt(i); +    return mAudioPortCallbacks.size();  } +  void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()  { -    Mutex::Autolock _l(gLockAPC); -    if (gAudioPortCallback != 0) { -        gAudioPortCallback->onAudioPortListUpdate(); +    Mutex::Autolock _l(mLock); +    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { +        mAudioPortCallbacks[i]->onAudioPortListUpdate();      }  }  void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()  { -    Mutex::Autolock _l(gLockAPC); -    if (gAudioPortCallback != 0) { -        gAudioPortCallback->onAudioPatchListUpdate(); +    Mutex::Autolock _l(mLock); +    for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { +        mAudioPortCallbacks[i]->onAudioPatchListUpdate(); +    } +} + +void AudioSystem::AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate( +        String8 regId, int32_t state) +{ +    ALOGV("AudioPolicyServiceClient::onDynamicPolicyMixStateUpdate(%s, %d)", regId.string(), state); +    dynamic_policy_callback cb = NULL; +    { +        Mutex::Autolock _l(AudioSystem::gLock); +        cb = gDynPolicyCallback; +    } + +    if (cb != NULL) { +        cb(DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE, regId, state);      }  } -}; // namespace android +void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) +{ +    { +        Mutex::Autolock _l(mLock); +        for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { +            mAudioPortCallbacks[i]->onServiceDied(); +        } +    } +    { +        Mutex::Autolock _l(gLockAPS); +        AudioSystem::gAudioPolicyService.clear(); +    } + +    ALOGW("AudioPolicyService server died!"); +} + +} // namespace android diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 735db5c..444f4d8 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -33,11 +33,28 @@  #define WAIT_PERIOD_MS                  10  #define WAIT_STREAM_END_TIMEOUT_SEC     120 - +static const int kMaxLoopCountNotifications = 32;  namespace android {  // --------------------------------------------------------------------------- +// TODO: Move to a separate .h + +template <typename T> +static inline const T &min(const T &x, const T &y) { +    return x < y ? x : y; +} + +template <typename T> +static inline const T &max(const T &x, const T &y) { +    return x > y ? x : y; +} + +static inline nsecs_t framesToNanoseconds(ssize_t frames, uint32_t sampleRate, float speed) +{ +    return ((double)frames * 1000000000) / ((double)sampleRate * speed); +} +  static int64_t convertTimespecToUs(const struct timespec &tv)  {      return tv.tv_sec * 1000000ll + tv.tv_nsec / 1000; @@ -51,6 +68,42 @@ static int64_t getNowUs()      return convertTimespecToUs(tv);  } +// FIXME: we don't use the pitch setting in the time stretcher (not working); +// instead we emulate it using our sample rate converter. +static const bool kFixPitch = true; // enable pitch fix +static inline uint32_t adjustSampleRate(uint32_t sampleRate, float pitch) +{ +    return kFixPitch ? (sampleRate * pitch + 0.5) : sampleRate; +} + +static inline float adjustSpeed(float speed, float pitch) +{ +    return kFixPitch ? speed / max(pitch, AUDIO_TIMESTRETCH_PITCH_MIN_DELTA) : speed; +} + +static inline float adjustPitch(float pitch) +{ +    return kFixPitch ? AUDIO_TIMESTRETCH_PITCH_NORMAL : pitch; +} + +// Must match similar computation in createTrack_l in Threads.cpp. +// TODO: Move to a common library +static size_t calculateMinFrameCount( +        uint32_t afLatencyMs, uint32_t afFrameCount, uint32_t afSampleRate, +        uint32_t sampleRate, float speed) +{ +    // Ensure that buffer depth covers at least audio hardware latency +    uint32_t minBufCount = afLatencyMs / ((1000 * afFrameCount) / afSampleRate); +    if (minBufCount < 2) { +        minBufCount = 2; +    } +    ALOGV("calculateMinFrameCount afLatency %u  afFrameCount %u  afSampleRate %u  " +            "sampleRate %u  speed %f  minBufCount: %u", +            afLatencyMs, afFrameCount, afSampleRate, sampleRate, speed, minBufCount); +    return minBufCount * sourceFramesNeededWithTimestretch( +            sampleRate, afFrameCount, afSampleRate, speed); +} +  // static  status_t AudioTrack::getMinFrameCount(          size_t* frameCount, @@ -61,12 +114,11 @@ status_t AudioTrack::getMinFrameCount(          return BAD_VALUE;      } -    // FIXME merge with similar code in createTrack_l(), except we're missing -    //       some information here that is available in createTrack_l(): +    // FIXME handle in server, like createTrack_l(), possible missing info:      //          audio_io_handle_t output      //          audio_format_t format      //          audio_channel_mask_t channelMask -    //          audio_output_flags_t flags +    //          audio_output_flags_t flags (FAST)      uint32_t afSampleRate;      status_t status;      status = AudioSystem::getOutputSamplingRate(&afSampleRate, streamType); @@ -90,23 +142,20 @@ status_t AudioTrack::getMinFrameCount(          return status;      } -    // Ensure that buffer depth covers at least audio hardware latency -    uint32_t minBufCount = afLatency / ((1000 * afFrameCount) / afSampleRate); -    if (minBufCount < 2) { -        minBufCount = 2; -    } +    // When called from createTrack, speed is 1.0f (normal speed). +    // This is rechecked again on setting playback rate (TODO: on setting sample rate, too). +    *frameCount = calculateMinFrameCount(afLatency, afFrameCount, afSampleRate, sampleRate, 1.0f); -    *frameCount = (sampleRate == 0) ? afFrameCount * minBufCount : -            afFrameCount * minBufCount * uint64_t(sampleRate) / afSampleRate; -    // The formula above should always produce a non-zero value, but return an error -    // in the unlikely event that it does not, as that's part of the API contract. +    // The formula above should always produce a non-zero value under normal circumstances: +    // AudioTrack.SAMPLE_RATE_HZ_MIN <= sampleRate <= AudioTrack.SAMPLE_RATE_HZ_MAX. +    // Return error in the unlikely event that it does not, as that's part of the API contract.      if (*frameCount == 0) { -        ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %d", +        ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %u",                  streamType, sampleRate);          return BAD_VALUE;      } -    ALOGV("getMinFrameCount=%zu: afFrameCount=%zu, minBufCount=%d, afSampleRate=%d, afLatency=%d", -            *frameCount, afFrameCount, minBufCount, afSampleRate, afLatency); +    ALOGV("getMinFrameCount=%zu: afFrameCount=%zu, afSampleRate=%u, afLatency=%u", +            *frameCount, afFrameCount, afSampleRate, afLatency);      return NO_ERROR;  } @@ -117,7 +166,8 @@ AudioTrack::AudioTrack()        mIsTimed(false),        mPreviousPriority(ANDROID_PRIORITY_NORMAL),        mPreviousSchedulingGroup(SP_DEFAULT), -      mPausedPosition(0) +      mPausedPosition(0), +      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)  {      mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;      mAttributes.usage = AUDIO_USAGE_UNKNOWN; @@ -140,17 +190,19 @@ AudioTrack::AudioTrack(          const audio_offload_info_t *offloadInfo,          int uid,          pid_t pid, -        const audio_attributes_t* pAttributes) +        const audio_attributes_t* pAttributes, +        bool doNotReconnect)      : mStatus(NO_INIT),        mIsTimed(false),        mPreviousPriority(ANDROID_PRIORITY_NORMAL),        mPreviousSchedulingGroup(SP_DEFAULT), -      mPausedPosition(0) +      mPausedPosition(0), +      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)  {      mStatus = set(streamType, sampleRate, format, channelMask,              frameCount, flags, cbf, user, notificationFrames,              0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType, -            offloadInfo, uid, pid, pAttributes); +            offloadInfo, uid, pid, pAttributes, doNotReconnect);  }  AudioTrack::AudioTrack( @@ -168,17 +220,19 @@ AudioTrack::AudioTrack(          const audio_offload_info_t *offloadInfo,          int uid,          pid_t pid, -        const audio_attributes_t* pAttributes) +        const audio_attributes_t* pAttributes, +        bool doNotReconnect)      : mStatus(NO_INIT),        mIsTimed(false),        mPreviousPriority(ANDROID_PRIORITY_NORMAL),        mPreviousSchedulingGroup(SP_DEFAULT), -      mPausedPosition(0) +      mPausedPosition(0), +      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE)  {      mStatus = set(streamType, sampleRate, format, channelMask,              0 /*frameCount*/, flags, cbf, user, notificationFrames,              sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo, -            uid, pid, pAttributes); +            uid, pid, pAttributes, doNotReconnect);  }  AudioTrack::~AudioTrack() @@ -194,13 +248,17 @@ AudioTrack::~AudioTrack()              mAudioTrackThread->requestExitAndWait();              mAudioTrackThread.clear();          } +        // No lock here: worst case we remove a NULL callback which will be a nop +        if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) { +            AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput); +        }          IInterface::asBinder(mAudioTrack)->unlinkToDeath(mDeathNotifier, this);          mAudioTrack.clear();          mCblkMemory.clear();          mSharedBuffer.clear();          IPCThreadState::self()->flushCommands(); -        ALOGV("~AudioTrack, releasing session id from %d on behalf of %d", -                IPCThreadState::self()->getCallingPid(), mClientPid); +        ALOGV("~AudioTrack, releasing session id %d from %d on behalf of %d", +                mSessionId, IPCThreadState::self()->getCallingPid(), mClientPid);          AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);      }  } @@ -222,12 +280,13 @@ status_t AudioTrack::set(          const audio_offload_info_t *offloadInfo,          int uid,          pid_t pid, -        const audio_attributes_t* pAttributes) +        const audio_attributes_t* pAttributes, +        bool doNotReconnect)  {      ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, " -          "flags #%x, notificationFrames %u, sessionId %d, transferType %d", +          "flags #%x, notificationFrames %u, sessionId %d, transferType %d, uid %d, pid %d",            streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames, -          sessionId, transferType); +          sessionId, transferType, uid, pid);      switch (transferType) {      case TRANSFER_DEFAULT: @@ -264,14 +323,13 @@ status_t AudioTrack::set(      }      mSharedBuffer = sharedBuffer;      mTransfer = transferType; +    mDoNotReconnect = doNotReconnect; -    ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(), +    ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %zu", sharedBuffer->pointer(),              sharedBuffer->size());      ALOGV("set() streamType %d frameCount %zu flags %04x", streamType, frameCount, flags); -    AutoMutex lock(mLock); -      // invariant that mAudioTrack != 0 is true only after set() returns successfully      if (mAudioTrack != 0) {          ALOGE("Track already in use"); @@ -295,6 +353,9 @@ status_t AudioTrack::set(          ALOGV("Building AudioTrack with attributes: usage=%d content=%d flags=0x%x tags=[%s]",                  mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);          mStreamType = AUDIO_STREAM_DEFAULT; +        if ((mAttributes.flags & AUDIO_FLAG_HW_AV_SYNC) != 0) { +            flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_HW_AV_SYNC); +        }      }      // these below should probably come from the audioFlinger too... @@ -317,12 +378,6 @@ status_t AudioTrack::set(      uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);      mChannelCount = channelCount; -    // AudioFlinger does not currently support 8-bit data in shared memory -    if (format == AUDIO_FORMAT_PCM_8_BIT && sharedBuffer != 0) { -        ALOGE("8-bit data in shared memory is not supported"); -        return BAD_VALUE; -    } -      // force direct flag if format is not linear PCM      // or offload was requested      if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) @@ -346,12 +401,9 @@ status_t AudioTrack::set(          } else {              mFrameSize = sizeof(uint8_t);          } -        mFrameSizeAF = mFrameSize;      } else {          ALOG_ASSERT(audio_is_linear_pcm(format));          mFrameSize = channelCount * audio_bytes_per_sample(format); -        mFrameSizeAF = channelCount * audio_bytes_per_sample( -                format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format);          // createTrack will return an error if PCM format is not supported by server,          // so no need to check for specific PCM formats here      } @@ -361,6 +413,8 @@ status_t AudioTrack::set(          return BAD_VALUE;      }      mSampleRate = sampleRate; +    mOriginalSampleRate = sampleRate; +    mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;      // Make copy of input parameter offloadInfo so that in the future:      //  (a) createTrack_l doesn't need it as an input parameter @@ -403,6 +457,7 @@ status_t AudioTrack::set(      if (cbf != NULL) {          mAudioTrackThread = new AudioTrackThread(*this, threadCanCallJava);          mAudioTrackThread->run("AudioTrack", ANDROID_PRIORITY_AUDIO, 0 /*stack*/); +        // thread begins in paused state, and will not reference us until start()      }      // create the IAudioTrack @@ -420,12 +475,14 @@ status_t AudioTrack::set(      mStatus = NO_ERROR;      mState = STATE_STOPPED;      mUserData = user; -    mLoopPeriod = 0; +    mLoopCount = 0; +    mLoopStart = 0; +    mLoopEnd = 0; +    mLoopCountNotified = 0;      mMarkerPosition = 0;      mMarkerReached = false;      mNewPosition = 0;      mUpdatePeriod = 0; -    mServer = 0;      mPosition = 0;      mReleased = 0;      mStartUs = 0; @@ -433,6 +490,9 @@ status_t AudioTrack::set(      mSequence = 1;      mObservedSequence = mSequence;      mInUnderrun = false; +    mPreviousTimestampValid = false; +    mTimestampStartupGlitchReported = false; +    mRetrogradeMotionReported = false;      return NO_ERROR;  } @@ -459,6 +519,10 @@ status_t AudioTrack::start()      if (previousState == STATE_STOPPED || previousState == STATE_FLUSHED) {          // reset current position as seen by client to 0          mPosition = 0; +        mPreviousTimestampValid = false; +        mTimestampStartupGlitchReported = false; +        mRetrogradeMotionReported = false; +          // For offloaded tracks, we don't know if the hardware counters are really zero here,          // since the flush is asynchronous and stop may not fully drain.          // We save the time when the track is started to later verify whether @@ -531,14 +595,12 @@ void AudioTrack::stop()      // the playback head position will reset to 0, so if a marker is set, we need      // to activate it again      mMarkerReached = false; -#if 0 -    // Force flush if a shared buffer is used otherwise audioflinger -    // will not stop before end of buffer is reached. -    // It may be needed to make sure that we stop playback, likely in case looping is on. +      if (mSharedBuffer != 0) { -        flush_l(); +        // clear buffer position and loop count. +        mStaticProxy->setBufferPositionAndLoop(0 /* position */, +                0 /* loopStart */, 0 /* loopEnd */, 0 /* loopCount */);      } -#endif      sp<AudioTrackThread> t = mAudioTrackThread;      if (t != 0) { @@ -669,24 +731,31 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const  status_t AudioTrack::setSampleRate(uint32_t rate)  { -    if (mIsTimed || isOffloadedOrDirect()) { +    AutoMutex lock(mLock); +    if (rate == mSampleRate) { +        return NO_ERROR; +    } +    if (mIsTimed || isOffloadedOrDirect_l() || (mFlags & AUDIO_OUTPUT_FLAG_FAST)) {          return INVALID_OPERATION;      } - -    AutoMutex lock(mLock);      if (mOutput == AUDIO_IO_HANDLE_NONE) {          return NO_INIT;      } +    // NOTE: it is theoretically possible, but highly unlikely, that a device change +    // could mean a previously allowed sampling rate is no longer allowed.      uint32_t afSamplingRate;      if (AudioSystem::getSamplingRate(mOutput, &afSamplingRate) != NO_ERROR) {          return NO_INIT;      } -    if (rate == 0 || rate > afSamplingRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) { +    // pitch is emulated by adjusting speed and sampleRate +    const uint32_t effectiveSampleRate = adjustSampleRate(rate, mPlaybackRate.mPitch); +    if (rate == 0 || effectiveSampleRate > afSamplingRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) {          return BAD_VALUE;      } +    // TODO: Should we also check if the buffer size is compatible?      mSampleRate = rate; -    mProxy->setSampleRate(rate); +    mProxy->setSampleRate(effectiveSampleRate);      return NO_ERROR;  } @@ -714,6 +783,69 @@ uint32_t AudioTrack::getSampleRate() const      return mSampleRate;  } +uint32_t AudioTrack::getOriginalSampleRate() const +{ +    if (mIsTimed) { +        return 0; +    } + +    return mOriginalSampleRate; +} + +status_t AudioTrack::setPlaybackRate(const AudioPlaybackRate &playbackRate) +{ +    AutoMutex lock(mLock); +    if (isAudioPlaybackRateEqual(playbackRate, mPlaybackRate)) { +        return NO_ERROR; +    } +    if (mIsTimed || isOffloadedOrDirect_l()) { +        return INVALID_OPERATION; +    } +    if (mFlags & AUDIO_OUTPUT_FLAG_FAST) { +        return INVALID_OPERATION; +    } +    // pitch is emulated by adjusting speed and sampleRate +    const uint32_t effectiveRate = adjustSampleRate(mSampleRate, playbackRate.mPitch); +    const float effectiveSpeed = adjustSpeed(playbackRate.mSpeed, playbackRate.mPitch); +    const float effectivePitch = adjustPitch(playbackRate.mPitch); +    AudioPlaybackRate playbackRateTemp = playbackRate; +    playbackRateTemp.mSpeed = effectiveSpeed; +    playbackRateTemp.mPitch = effectivePitch; + +    if (!isAudioPlaybackRateValid(playbackRateTemp)) { +        return BAD_VALUE; +    } +    // Check if the buffer size is compatible. +    if (!isSampleRateSpeedAllowed_l(effectiveRate, effectiveSpeed)) { +        ALOGV("setPlaybackRate(%f, %f) failed", playbackRate.mSpeed, playbackRate.mPitch); +        return BAD_VALUE; +    } + +    // Check resampler ratios are within bounds +    if (effectiveRate > mSampleRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX) { +        ALOGV("setPlaybackRate(%f, %f) failed. Resample rate exceeds max accepted value", +                playbackRate.mSpeed, playbackRate.mPitch); +        return BAD_VALUE; +    } + +    if (effectiveRate * AUDIO_RESAMPLER_UP_RATIO_MAX < mSampleRate) { +        ALOGV("setPlaybackRate(%f, %f) failed. Resample rate below min accepted value", +                        playbackRate.mSpeed, playbackRate.mPitch); +        return BAD_VALUE; +    } +    mPlaybackRate = playbackRate; +    //set effective rates +    mProxy->setPlaybackRate(playbackRateTemp); +    mProxy->setSampleRate(effectiveRate); // FIXME: not quite "atomic" with setPlaybackRate +    return NO_ERROR; +} + +const AudioPlaybackRate& AudioTrack::getPlaybackRate() const +{ +    AutoMutex lock(mLock); +    return mPlaybackRate; +} +  status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)  {      if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) { @@ -740,10 +872,15 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount  void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)  { -    // Setting the loop will reset next notification update period (like setPosition). -    mNewPosition = updateAndGetPosition_l() + mUpdatePeriod; -    mLoopPeriod = loopCount != 0 ? loopEnd - loopStart : 0; +    // We do not update the periodic notification point. +    // mNewPosition = updateAndGetPosition_l() + mUpdatePeriod; +    mLoopCount = loopCount; +    mLoopEnd = loopEnd; +    mLoopStart = loopStart; +    mLoopCountNotified = loopCount;      mStaticProxy->setLoop(loopStart, loopEnd, loopCount); + +    // Waking the AudioTrackThread is not needed as this cannot be called when active.  }  status_t AudioTrack::setMarkerPosition(uint32_t marker) @@ -757,6 +894,10 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker)      mMarkerPosition = marker;      mMarkerReached = false; +    sp<AudioTrackThread> t = mAudioTrackThread; +    if (t != 0) { +        t->wake(); +    }      return NO_ERROR;  } @@ -786,6 +927,10 @@ status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod)      mNewPosition = updateAndGetPosition_l() + updatePeriod;      mUpdatePeriod = updatePeriod; +    sp<AudioTrackThread> t = mAudioTrackThread; +    if (t != 0) { +        t->wake(); +    }      return NO_ERROR;  } @@ -823,12 +968,11 @@ status_t AudioTrack::setPosition(uint32_t position)      if (mState == STATE_ACTIVE) {          return INVALID_OPERATION;      } +    // After setting the position, use full update period before notification.      mNewPosition = updateAndGetPosition_l() + mUpdatePeriod; -    mLoopPeriod = 0; -    // FIXME Check whether loops and setting position are incompatible in old code. -    // If we use setLoop for both purposes we lose the capability to set the position while looping. -    mStaticProxy->setLoop(position, mFrameCount, 0); +    mStaticProxy->setBufferPosition(position); +    // Waking the AudioTrackThread is not needed as this cannot be called when active.      return NO_ERROR;  } @@ -849,15 +993,18 @@ status_t AudioTrack::getPosition(uint32_t *position)          }          if (mOutput != AUDIO_IO_HANDLE_NONE) { -            uint32_t halFrames; -            AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames); +            uint32_t halFrames; // actually unused +            (void) AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames); +            // FIXME: on getRenderPosition() error, we return OK with frame position 0.          }          // FIXME: dspFrames may not be zero in (mState == STATE_STOPPED || mState == STATE_FLUSHED)          // due to hardware latency. We leave this behavior for now.          *position = dspFrames;      } else {          if (mCblk->mFlags & CBLK_INVALID) { -            restoreTrack_l("getPosition"); +            (void) restoreTrack_l("getPosition"); +            // FIXME: for compatibility with the Java API we ignore the restoreTrack_l() +            // error here (e.g. DEAD_OBJECT) and return OK with the last recorded server position.          }          // IAudioTrack::stop() isn't synchronous; we don't know when presentation completes @@ -893,10 +1040,19 @@ status_t AudioTrack::reload()          return INVALID_OPERATION;      }      mNewPosition = mUpdatePeriod; -    mLoopPeriod = 0; -    // FIXME The new code cannot reload while keeping a loop specified. -    // Need to check how the old code handled this, and whether it's a significant change. -    mStaticProxy->setLoop(0, mFrameCount, 0); +    (void) updateAndGetPosition_l(); +    mPosition = 0; +    mPreviousTimestampValid = false; +#if 0 +    // The documentation is not clear on the behavior of reload() and the restoration +    // of loop count. Historically we have not restored loop count, start, end, +    // but it makes sense if one desires to repeat playing a particular sound. +    if (mLoopCount != 0) { +        mLoopCountNotified = mLoopCount; +        mStaticProxy->setLoop(mLoopStart, mLoopEnd, mLoopCount); +    } +#endif +    mStaticProxy->setBufferPosition(0);      return NO_ERROR;  } @@ -906,6 +1062,28 @@ audio_io_handle_t AudioTrack::getOutput() const      return mOutput;  } +status_t AudioTrack::setOutputDevice(audio_port_handle_t deviceId) { +    AutoMutex lock(mLock); +    if (mSelectedDeviceId != deviceId) { +        mSelectedDeviceId = deviceId; +        android_atomic_or(CBLK_INVALID, &mCblk->mFlags); +    } +    return NO_ERROR; +} + +audio_port_handle_t AudioTrack::getOutputDevice() { +    AutoMutex lock(mLock); +    return mSelectedDeviceId; +} + +audio_port_handle_t AudioTrack::getRoutedDeviceId() { +    AutoMutex lock(mLock); +    if (mOutput == AUDIO_IO_HANDLE_NONE) { +        return AUDIO_PORT_HANDLE_NONE; +    } +    return AudioSystem::getDeviceIdForIo(mOutput); +} +  status_t AudioTrack::attachAuxEffect(int effectId)  {      AutoMutex lock(mLock); @@ -935,19 +1113,23 @@ status_t AudioTrack::createTrack_l()          return NO_INIT;      } +    if (mDeviceCallback != 0 && mOutput != AUDIO_IO_HANDLE_NONE) { +        AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput); +    }      audio_io_handle_t output;      audio_stream_type_t streamType = mStreamType;      audio_attributes_t *attr = (mStreamType == AUDIO_STREAM_DEFAULT) ? &mAttributes : NULL; -    status_t status = AudioSystem::getOutputForAttr(attr, &output, -                                                    (audio_session_t)mSessionId, &streamType, -                                                    mSampleRate, mFormat, mChannelMask, -                                                    mFlags, mOffloadInfo); +    status_t status; +    status = AudioSystem::getOutputForAttr(attr, &output, +                                           (audio_session_t)mSessionId, &streamType, mClientUid, +                                           mSampleRate, mFormat, mChannelMask, +                                           mFlags, mSelectedDeviceId, mOffloadInfo);      if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) { -        ALOGE("Could not get audio output for stream type %d, usage %d, sample rate %u, format %#x," +        ALOGE("Could not get audio output for session %d, stream type %d, usage %d, sample rate %u, format %#x,"                " channel mask %#x, flags %#x", -              streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags); +              mSessionId, streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);          return BAD_VALUE;      }      { @@ -955,29 +1137,27 @@ status_t AudioTrack::createTrack_l()      // we must release it ourselves if anything goes wrong.      // Not all of these values are needed under all conditions, but it is easier to get them all - -    uint32_t afLatency; -    status = AudioSystem::getLatency(output, &afLatency); +    status = AudioSystem::getLatency(output, &mAfLatency);      if (status != NO_ERROR) {          ALOGE("getLatency(%d) failed status %d", output, status);          goto release;      } +    ALOGV("createTrack_l() output %d afLatency %u", output, mAfLatency); -    size_t afFrameCount; -    status = AudioSystem::getFrameCount(output, &afFrameCount); +    status = AudioSystem::getFrameCount(output, &mAfFrameCount);      if (status != NO_ERROR) {          ALOGE("getFrameCount(output=%d) status %d", output, status);          goto release;      } -    uint32_t afSampleRate; -    status = AudioSystem::getSamplingRate(output, &afSampleRate); +    status = AudioSystem::getSamplingRate(output, &mAfSampleRate);      if (status != NO_ERROR) {          ALOGE("getSamplingRate(output=%d) status %d", output, status);          goto release;      }      if (mSampleRate == 0) { -        mSampleRate = afSampleRate; +        mSampleRate = mAfSampleRate; +        mOriginalSampleRate = mAfSampleRate;      }      // Client decides whether the track is TIMED (see below), but can only express a preference      // for FAST.  Server will perform additional tests. @@ -986,23 +1166,23 @@ status_t AudioTrack::createTrack_l()              // use case 1: shared buffer              (mSharedBuffer != 0) ||              // use case 2: callback transfer mode -            (mTransfer == TRANSFER_CALLBACK)) && +            (mTransfer == TRANSFER_CALLBACK) || +            // use case 3: obtain/release mode +            (mTransfer == TRANSFER_OBTAIN)) &&              // matching sample rate -            (mSampleRate == afSampleRate))) { -        ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client"); +            (mSampleRate == mAfSampleRate))) { +        ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, output %u Hz", +                mTransfer, mSampleRate, mAfSampleRate);          // once denied, do not request again if IAudioTrack is re-created          mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);      } -    ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);      // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where      //  n = 1   fast track with single buffering; nBuffering is ignored      //  n = 2   fast track with double buffering -    //  n = 2   normal track, no sample rate conversion -    //  n = 3   normal track, with sample rate conversion -    //          (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering) -    //  n > 3   very high latency or very small notification interval; nBuffering is ignored -    const uint32_t nBuffering = (mSampleRate == afSampleRate) ? 2 : 3; +    //  n = 2   normal track, (including those with sample rate conversion) +    //  n >= 3  very high latency or very small notification interval (unused). +    const uint32_t nBuffering = 2;      mNotificationFramesAct = mNotificationFramesReq; @@ -1013,18 +1193,18 @@ status_t AudioTrack::createTrack_l()              // Same comment as below about ignoring frameCount parameter for set()              frameCount = mSharedBuffer->size();          } else if (frameCount == 0) { -            frameCount = afFrameCount; +            frameCount = mAfFrameCount;          }          if (mNotificationFramesAct != frameCount) {              mNotificationFramesAct = frameCount;          }      } else if (mSharedBuffer != 0) { - -        // Ensure that buffer alignment matches channel count -        // 8-bit data in shared memory is not currently supported by AudioFlinger -        size_t alignment = audio_bytes_per_sample( -                mFormat == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : mFormat); +        // FIXME: Ensure client side memory buffers need +        // not have additional alignment beyond sample +        // (e.g. 16 bit stereo accessed as 32 bit frame). +        size_t alignment = audio_bytes_per_sample(mFormat);          if (alignment & 1) { +            // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java).              alignment = 1;          }          if (mChannelCount > 1) { @@ -1042,40 +1222,19 @@ status_t AudioTrack::createTrack_l()          // there's no frameCount parameter.          // But when initializing a shared buffer AudioTrack via set(),          // there _is_ a frameCount parameter.  We silently ignore it. -        frameCount = mSharedBuffer->size() / mFrameSizeAF; - -    } else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) { - -        // FIXME move these calculations and associated checks to server - -        // Ensure that buffer depth covers at least audio hardware latency -        uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); -        ALOGV("afFrameCount=%zu, minBufCount=%d, afSampleRate=%u, afLatency=%d", -                afFrameCount, minBufCount, afSampleRate, afLatency); -        if (minBufCount <= nBuffering) { -            minBufCount = nBuffering; -        } - -        size_t minFrameCount = afFrameCount * minBufCount * uint64_t(mSampleRate) / afSampleRate; -        ALOGV("minFrameCount: %zu, afFrameCount=%zu, minBufCount=%d, sampleRate=%u, afSampleRate=%u" -                ", afLatency=%d", -                minFrameCount, afFrameCount, minBufCount, mSampleRate, afSampleRate, afLatency); - -        if (frameCount == 0) { -            frameCount = minFrameCount; -        } else if (frameCount < minFrameCount) { -            // not ALOGW because it happens all the time when playing key clicks over A2DP -            ALOGV("Minimum buffer size corrected from %zu to %zu", -                     frameCount, minFrameCount); -            frameCount = minFrameCount; -        } -        // Make sure that application is notified with sufficient margin before underrun -        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { -            mNotificationFramesAct = frameCount/nBuffering; -        } - +        frameCount = mSharedBuffer->size() / mFrameSize;      } else { -        // For fast tracks, the frame count calculations and checks are done by server +        // For fast tracks the frame count calculations and checks are done by server + +        if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) == 0) { +            // for normal tracks precompute the frame count based on speed. +            const size_t minFrameCount = calculateMinFrameCount( +                    mAfLatency, mAfFrameCount, mAfSampleRate, mSampleRate, +                    mPlaybackRate.mSpeed); +            if (frameCount < minFrameCount) { +                frameCount = minFrameCount; +            } +        }      }      IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT; @@ -1101,12 +1260,10 @@ status_t AudioTrack::createTrack_l()      size_t temp = frameCount;   // temp may be replaced by a revised value of frameCount,                                  // but we will still need the original value also +    int originalSessionId = mSessionId;      sp<IAudioTrack> track = audioFlinger->createTrack(streamType,                                                        mSampleRate, -                                                      // AudioFlinger only sees 16-bit PCM -                                                      mFormat == AUDIO_FORMAT_PCM_8_BIT && -                                                          !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT) ? -                                                              AUDIO_FORMAT_PCM_16_BIT : mFormat, +                                                      mFormat,                                                        mChannelMask,                                                        &temp,                                                        &trackFlags, @@ -1116,6 +1273,8 @@ status_t AudioTrack::createTrack_l()                                                        &mSessionId,                                                        mClientUid,                                                        &status); +    ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId, +            "session ID changed from %d to %d", originalSessionId, mSessionId);      if (status != NO_ERROR) {          ALOGE("AudioFlinger could not create track, status: %d", status); @@ -1161,23 +1320,10 @@ status_t AudioTrack::createTrack_l()          if (trackFlags & IAudioFlinger::TRACK_FAST) {              ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu", frameCount);              mAwaitBoost = true; -            if (mSharedBuffer == 0) { -                // Theoretically double-buffering is not required for fast tracks, -                // due to tighter scheduling.  But in practice, to accommodate kernels with -                // scheduling jitter, and apps with computation jitter, we use double-buffering. -                if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { -                    mNotificationFramesAct = frameCount/nBuffering; -                } -            }          } else {              ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);              // once denied, do not request again if IAudioTrack is re-created              mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST); -            if (mSharedBuffer == 0) { -                if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { -                    mNotificationFramesAct = frameCount/nBuffering; -                } -            }          }      }      if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) { @@ -1200,6 +1346,16 @@ status_t AudioTrack::createTrack_l()              //return NO_INIT;          }      } +    // Make sure that application is notified with sufficient margin before underrun +    if (mSharedBuffer == 0 && audio_is_linear_pcm(mFormat)) { +        // Theoretically double-buffering is not required for fast tracks, +        // due to tighter scheduling.  But in practice, to accommodate kernels with +        // scheduling jitter, and apps with computation jitter, we use double-buffering +        // for fast tracks just like normal streaming tracks. +        if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount / nBuffering) { +            mNotificationFramesAct = frameCount / nBuffering; +        } +    }      // We retain a copy of the I/O handle, but don't own the reference      mOutput = output; @@ -1211,14 +1367,19 @@ status_t AudioTrack::createTrack_l()      // address space.  AudioFlinger::TrackBase::mBuffer is for the server address space.      void* buffers;      if (mSharedBuffer == 0) { -        buffers = (char*)cblk + sizeof(audio_track_cblk_t); +        buffers = cblk + 1;      } else {          buffers = mSharedBuffer->pointer(); +        if (buffers == NULL) { +            ALOGE("Could not get buffer pointer"); +            return NO_INIT; +        }      }      mAudioTrack->attachAuxEffect(mAuxEffectId); +    // FIXME doesn't take into account speed or future sample rate changes (until restoreTrack)      // FIXME don't believe this lie -    mLatency = afLatency + (1000*frameCount) / mSampleRate; +    mLatency = mAfLatency + (1000*frameCount) / mSampleRate;      mFrameCount = frameCount;      // If IAudioTrack is re-created, don't let the requested frameCount @@ -1227,12 +1388,15 @@ status_t AudioTrack::createTrack_l()          mReqFrameCount = frameCount;      } +    // reset server position to 0 as we have new cblk. +    mServer = 0; +      // update proxy      if (mSharedBuffer == 0) {          mStaticProxy.clear(); -        mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF); +        mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);      } else { -        mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF); +        mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);          mProxy = mStaticProxy;      } @@ -1241,12 +1405,24 @@ status_t AudioTrack::createTrack_l()              gain_from_float(mVolume[AUDIO_INTERLEAVE_RIGHT])));      mProxy->setSendLevel(mSendLevel); -    mProxy->setSampleRate(mSampleRate); +    const uint32_t effectiveSampleRate = adjustSampleRate(mSampleRate, mPlaybackRate.mPitch); +    const float effectiveSpeed = adjustSpeed(mPlaybackRate.mSpeed, mPlaybackRate.mPitch); +    const float effectivePitch = adjustPitch(mPlaybackRate.mPitch); +    mProxy->setSampleRate(effectiveSampleRate); + +    AudioPlaybackRate playbackRateTemp = mPlaybackRate; +    playbackRateTemp.mSpeed = effectiveSpeed; +    playbackRateTemp.mPitch = effectivePitch; +    mProxy->setPlaybackRate(playbackRateTemp);      mProxy->setMinimum(mNotificationFramesAct);      mDeathNotifier = new DeathNotifier(this);      IInterface::asBinder(mAudioTrack)->linkToDeath(mDeathNotifier, this); +    if (mDeviceCallback != 0) { +        AudioSystem::addAudioDeviceCallback(mDeviceCallback, mOutput); +    } +      return NO_ERROR;      } @@ -1258,15 +1434,21 @@ release:      return status;  } -status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) +status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount, size_t *nonContig)  {      if (audioBuffer == NULL) { +        if (nonContig != NULL) { +            *nonContig = 0; +        }          return BAD_VALUE;      }      if (mTransfer != TRANSFER_OBTAIN) {          audioBuffer->frameCount = 0;          audioBuffer->size = 0;          audioBuffer->raw = NULL; +        if (nonContig != NULL) { +            *nonContig = 0; +        }          return INVALID_OPERATION;      } @@ -1285,7 +1467,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)          ALOGE("%s invalid waitCount %d", __func__, waitCount);          requested = NULL;      } -    return obtainBuffer(audioBuffer, requested); +    return obtainBuffer(audioBuffer, requested, NULL /*elapsed*/, nonContig);  }  status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, @@ -1352,7 +1534,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re      } while ((status == DEAD_OBJECT) && (tryCounter-- > 0));      audioBuffer->frameCount = buffer.mFrameCount; -    audioBuffer->size = buffer.mFrameCount * mFrameSizeAF; +    audioBuffer->size = buffer.mFrameCount * mFrameSize;      audioBuffer->raw = buffer.mRaw;      if (nonContig != NULL) {          *nonContig = buffer.mNonContig; @@ -1360,13 +1542,14 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re      return status;  } -void AudioTrack::releaseBuffer(Buffer* audioBuffer) +void AudioTrack::releaseBuffer(const Buffer* audioBuffer)  { +    // FIXME add error checking on mode, by adding an internal version      if (mTransfer == TRANSFER_SHARED) {          return;      } -    size_t stepCount = audioBuffer->size / mFrameSizeAF; +    size_t stepCount = audioBuffer->size / mFrameSize;      if (stepCount == 0) {          return;      } @@ -1431,15 +1614,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize, bool blocking)              return ssize_t(err);          } -        size_t toWrite; -        if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { -            // Divide capacity by 2 to take expansion into account -            toWrite = audioBuffer.size >> 1; -            memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) buffer, toWrite); -        } else { -            toWrite = audioBuffer.size; -            memcpy(audioBuffer.i8, buffer, toWrite); -        } +        size_t toWrite = audioBuffer.size; +        memcpy(audioBuffer.i8, buffer, toWrite);          buffer = ((const char *) buffer) + toWrite;          userSize -= toWrite;          written += toWrite; @@ -1558,10 +1734,10 @@ nsecs_t AudioTrack::processAudioBuffer()          // AudioSystem cache. We should not exit here but after calling the callback so          // that the upper layers can recreate the track          if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) { -            status_t status = restoreTrack_l("processAudioBuffer"); -            mLock.unlock(); -            // Run again immediately, but with a new IAudioTrack -            return 0; +            status_t status __unused = restoreTrack_l("processAudioBuffer"); +            // FIXME unused status +            // after restoration, continue below to make sure that the loop and buffer events +            // are notified because they have been cleared from mCblk->mFlags above.          }      } @@ -1610,9 +1786,9 @@ nsecs_t AudioTrack::processAudioBuffer()      }      // Cache other fields that will be needed soon -    uint32_t loopPeriod = mLoopPeriod;      uint32_t sampleRate = mSampleRate; -    uint32_t notificationFrames = mNotificationFramesAct; +    float speed = mPlaybackRate.mSpeed; +    const uint32_t notificationFrames = mNotificationFramesAct;      if (mRefreshRemaining) {          mRefreshRemaining = false;          mRemainingFrames = notificationFrames; @@ -1622,13 +1798,42 @@ nsecs_t AudioTrack::processAudioBuffer()      uint32_t sequence = mSequence;      sp<AudioTrackClientProxy> proxy = mProxy; +    // Determine the number of new loop callback(s) that will be needed, while locked. +    int loopCountNotifications = 0; +    uint32_t loopPeriod = 0; // time in frames for next EVENT_LOOP_END or EVENT_BUFFER_END + +    if (mLoopCount > 0) { +        int loopCount; +        size_t bufferPosition; +        mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount); +        loopPeriod = ((loopCount > 0) ? mLoopEnd : mFrameCount) - bufferPosition; +        loopCountNotifications = min(mLoopCountNotified - loopCount, kMaxLoopCountNotifications); +        mLoopCountNotified = loopCount; // discard any excess notifications +    } else if (mLoopCount < 0) { +        // FIXME: We're not accurate with notification count and position with infinite looping +        // since loopCount from server side will always return -1 (we could decrement it). +        size_t bufferPosition = mStaticProxy->getBufferPosition(); +        loopCountNotifications = int((flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) != 0); +        loopPeriod = mLoopEnd - bufferPosition; +    } else if (/* mLoopCount == 0 && */ mSharedBuffer != 0) { +        size_t bufferPosition = mStaticProxy->getBufferPosition(); +        loopPeriod = mFrameCount - bufferPosition; +    } +      // These fields don't need to be cached, because they are assigned only by set(): -    //     mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFrameSizeAF, mFlags +    //     mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFlags      // mFlags is also assigned by createTrack_l(), but not the bit we care about.      mLock.unlock(); +    // get anchor time to account for callbacks. +    const nsecs_t timeBeforeCallbacks = systemTime(); +      if (waitStreamEnd) { +        // FIXME:  Instead of blocking in proxy->waitStreamEndDone(), Callback thread +        // should wait on proxy futex and handle CBLK_STREAM_END_DONE within this function +        // (and make sure we don't callback for more data while we're stopping). +        // This helps with position, marker notifications, and track invalidation.          struct timespec timeout;          timeout.tv_sec = WAIT_STREAM_END_TIMEOUT_SEC;          timeout.tv_nsec = 0; @@ -1662,10 +1867,9 @@ nsecs_t AudioTrack::processAudioBuffer()      if (newUnderrun) {          mCbf(EVENT_UNDERRUN, mUserData, NULL);      } -    // FIXME we will miss loops if loop cycle was signaled several times since last call -    //       to processAudioBuffer() -    if (flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) { +    while (loopCountNotifications > 0) {          mCbf(EVENT_LOOP_END, mUserData, NULL); +        --loopCountNotifications;      }      if (flags & CBLK_BUFFER_END) {          mCbf(EVENT_BUFFER_END, mUserData, NULL); @@ -1701,10 +1905,11 @@ nsecs_t AudioTrack::processAudioBuffer()          minFrames = markerPosition - position;      }      if (loopPeriod > 0 && loopPeriod < minFrames) { +        // loopPeriod is already adjusted for actual position.          minFrames = loopPeriod;      } -    if (updatePeriod > 0 && updatePeriod < minFrames) { -        minFrames = updatePeriod; +    if (updatePeriod > 0) { +        minFrames = min(minFrames, uint32_t(newPosition - position));      }      // If > 0, poll periodically to recover from a stuck server.  A good value is 2. @@ -1713,12 +1918,17 @@ nsecs_t AudioTrack::processAudioBuffer()          minFrames = kPoll * notificationFrames;      } +    // This "fudge factor" avoids soaking CPU, and compensates for late progress by server +    static const nsecs_t kWaitPeriodNs = WAIT_PERIOD_MS * 1000000LL; +    const nsecs_t timeAfterCallbacks = systemTime(); +      // Convert frame units to time units      nsecs_t ns = NS_WHENEVER;      if (minFrames != (uint32_t) ~0) { -        // This "fudge factor" avoids soaking CPU, and compensates for late progress by server -        static const nsecs_t kFudgeNs = 10000000LL; // 10 ms -        ns = ((minFrames * 1000000000LL) / sampleRate) + kFudgeNs; +        ns = framesToNanoseconds(minFrames, sampleRate, speed) + kWaitPeriodNs; +        ns -= (timeAfterCallbacks - timeBeforeCallbacks);  // account for callback time +        // TODO: Should we warn if the callback time is too long? +        if (ns < 0) ns = 0;      }      // If not supplying data by EVENT_MORE_DATA, then we're done @@ -1726,6 +1936,13 @@ nsecs_t AudioTrack::processAudioBuffer()          return ns;      } +    // EVENT_MORE_DATA callback handling. +    // Timing for linear pcm audio data formats can be derived directly from the +    // buffer fill level. +    // Timing for compressed data is not directly available from the buffer fill level, +    // rather indirectly from waiting for blocking mode callbacks or waiting for obtain() +    // to return a certain fill level. +      struct timespec timeout;      const struct timespec *requested = &ClientProxy::kForever;      if (ns != NS_WHENEVER) { @@ -1756,24 +1973,21 @@ nsecs_t AudioTrack::processAudioBuffer()              return NS_NEVER;          } -        if (mRetryOnPartialBuffer && !isOffloaded()) { +        if (mRetryOnPartialBuffer && audio_is_linear_pcm(mFormat)) {              mRetryOnPartialBuffer = false;              if (avail < mRemainingFrames) { -                int64_t myns = ((mRemainingFrames - avail) * 1100000000LL) / sampleRate; -                if (ns < 0 || myns < ns) { +                if (ns > 0) { // account for obtain time +                    const nsecs_t timeNow = systemTime(); +                    ns = max((nsecs_t)0, ns - (timeNow - timeAfterCallbacks)); +                } +                nsecs_t myns = framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed); +                if (ns < 0 /* NS_WHENEVER */ || myns < ns) {                      ns = myns;                  }                  return ns;              }          } -        // Divide buffer size by 2 to take into account the expansion -        // due to 8 to 16 bit conversion: the callback must fill only half -        // of the destination buffer -        if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { -            audioBuffer.size >>= 1; -        } -          size_t reqSize = audioBuffer.size;          mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);          size_t writtenSize = audioBuffer.size; @@ -1790,16 +2004,45 @@ nsecs_t AudioTrack::processAudioBuffer()              // Keep this thread going to handle timed events and              // still try to get more data in intervals of WAIT_PERIOD_MS              // but don't just loop and block the CPU, so wait -            return WAIT_PERIOD_MS * 1000000LL; -        } -        if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { -            // 8 to 16 bit conversion, note that source and destination are the same address -            memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) audioBuffer.i8, writtenSize); -            audioBuffer.size <<= 1; +            // mCbf(EVENT_MORE_DATA, ...) might either +            // (1) Block until it can fill the buffer, returning 0 size on EOS. +            // (2) Block until it can fill the buffer, returning 0 data (silence) on EOS. +            // (3) Return 0 size when no data is available, does not wait for more data. +            // +            // (1) and (2) occurs with AudioPlayer/AwesomePlayer; (3) occurs with NuPlayer. +            // We try to compute the wait time to avoid a tight sleep-wait cycle, +            // especially for case (3). +            // +            // The decision to support (1) and (2) affect the sizing of mRemainingFrames +            // and this loop; whereas for case (3) we could simply check once with the full +            // buffer size and skip the loop entirely. + +            nsecs_t myns; +            if (audio_is_linear_pcm(mFormat)) { +                // time to wait based on buffer occupancy +                const nsecs_t datans = mRemainingFrames <= avail ? 0 : +                        framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed); +                // audio flinger thread buffer size (TODO: adjust for fast tracks) +                const nsecs_t afns = framesToNanoseconds(mAfFrameCount, mAfSampleRate, speed); +                // add a half the AudioFlinger buffer time to avoid soaking CPU if datans is 0. +                myns = datans + (afns / 2); +            } else { +                // FIXME: This could ping quite a bit if the buffer isn't full. +                // Note that when mState is stopping we waitStreamEnd, so it never gets here. +                myns = kWaitPeriodNs; +            } +            if (ns > 0) { // account for obtain and callback time +                const nsecs_t timeNow = systemTime(); +                ns = max((nsecs_t)0, ns - (timeNow - timeAfterCallbacks)); +            } +            if (ns < 0 /* NS_WHENEVER */ || myns < ns) { +                ns = myns; +            } +            return ns;          } -        size_t releasedFrames = audioBuffer.size / mFrameSizeAF; +        size_t releasedFrames = writtenSize / mFrameSize;          audioBuffer.frameCount = releasedFrames;          mRemainingFrames -= releasedFrames;          if (misalignment >= releasedFrames) { @@ -1827,7 +2070,7 @@ nsecs_t AudioTrack::processAudioBuffer()          // that total to a sum == notificationFrames.          if (0 < misalignment && misalignment <= mRemainingFrames) {              mRemainingFrames = misalignment; -            return (mRemainingFrames * 1100000000LL) / sampleRate; +            return ((double)mRemainingFrames * 1100000000) / ((double)sampleRate * speed);          }  #endif @@ -1844,51 +2087,49 @@ status_t AudioTrack::restoreTrack_l(const char *from)      ALOGW("dead IAudioTrack, %s, creating a new one from %s()",            isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from);      ++mSequence; -    status_t result;      // refresh the audio configuration cache in this process to make sure we get new      // output parameters and new IAudioFlinger in createTrack_l()      AudioSystem::clearAudioConfigCache(); -    if (isOffloadedOrDirect_l()) { -        // FIXME re-creation of offloaded tracks is not yet implemented +    if (isOffloadedOrDirect_l() || mDoNotReconnect) { +        // FIXME re-creation of offloaded and direct tracks is not yet implemented; +        // reconsider enabling for linear PCM encodings when position can be preserved.          return DEAD_OBJECT;      }      // save the old static buffer position -    size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0; +    size_t bufferPosition = 0; +    int loopCount = 0; +    if (mStaticProxy != 0) { +        mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount); +    }      // If a new IAudioTrack is successfully created, createTrack_l() will modify the      // following member variables: mAudioTrack, mCblkMemory and mCblk.      // It will also delete the strong references on previous IAudioTrack and IMemory.      // If a new IAudioTrack cannot be created, the previous (dead) instance will be left intact. -    result = createTrack_l(); - -    // take the frames that will be lost by track recreation into account in saved position -    (void) updateAndGetPosition_l(); -    mPosition = mReleased; +    status_t result = createTrack_l();      if (result == NO_ERROR) { -        // continue playback from last known position, but -        // don't attempt to restore loop after invalidation; it's difficult and not worthwhile -        if (mStaticProxy != NULL) { -            mLoopPeriod = 0; -            mStaticProxy->setLoop(bufferPosition, mFrameCount, 0); -        } -        // FIXME How do we simulate the fact that all frames present in the buffer at the time of -        //       track destruction have been played? This is critical for SoundPool implementation -        //       This must be broken, and needs to be tested/debugged. -#if 0 -        // restore write index and set other indexes to reflect empty buffer status -        if (!strcmp(from, "start")) { -            // Make sure that a client relying on callback events indicating underrun or -            // the actual amount of audio frames played (e.g SoundPool) receives them. -            if (mSharedBuffer == 0) { -                // restart playback even if buffer is not completely filled. -                android_atomic_or(CBLK_FORCEREADY, &mCblk->mFlags); +        // take the frames that will be lost by track recreation into account in saved position +        // For streaming tracks, this is the amount we obtained from the user/client +        // (not the number actually consumed at the server - those are already lost). +        if (mStaticProxy == 0) { +            mPosition = mReleased; +        } +        // Continue playback from last known position and restore loop. +        if (mStaticProxy != 0) { +            if (loopCount != 0) { +                mStaticProxy->setBufferPositionAndLoop(bufferPosition, +                        mLoopStart, mLoopEnd, loopCount); +            } else { +                mStaticProxy->setBufferPosition(bufferPosition); +                if (bufferPosition == mFrameCount) { +                    ALOGD("restoring track at end of static buffer"); +                }              }          } -#endif          if (mState == STATE_ACTIVE) {              result = mAudioTrack->start();          } @@ -1923,6 +2164,19 @@ uint32_t AudioTrack::updateAndGetPosition_l()      return mPosition += (uint32_t) delta;  } +bool AudioTrack::isSampleRateSpeedAllowed_l(uint32_t sampleRate, float speed) const +{ +    // applicable for mixing tracks only (not offloaded or direct) +    if (mStaticProxy != 0) { +        return true; // static tracks do not have issues with buffer sizing. +    } +    const size_t minFrameCount = +            calculateMinFrameCount(mAfLatency, mAfFrameCount, mAfSampleRate, sampleRate, speed); +    ALOGV("isSampleRateSpeedAllowed_l mFrameCount %zu  minFrameCount %zu", +            mFrameCount, minFrameCount); +    return mFrameCount >= minFrameCount; +} +  status_t AudioTrack::setParameters(const String8& keyValuePairs)  {      AutoMutex lock(mLock); @@ -1932,6 +2186,11 @@ status_t AudioTrack::setParameters(const String8& keyValuePairs)  status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)  {      AutoMutex lock(mLock); + +    bool previousTimestampValid = mPreviousTimestampValid; +    // Set false here to cover all the error return cases. +    mPreviousTimestampValid = false; +      // FIXME not implemented for fast tracks; should use proxy and SSQ      if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {          return INVALID_OPERATION; @@ -1956,7 +2215,12 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)      }      if (mCblk->mFlags & CBLK_INVALID) { -        restoreTrack_l("getTimestamp"); +        const status_t status = restoreTrack_l("getTimestamp"); +        if (status != OK) { +            // per getTimestamp() API doc in header, we return DEAD_OBJECT here, +            // recommending that the track be recreated. +            return DEAD_OBJECT; +        }      }      // The presented frame count must always lag behind the consumed frame count. @@ -1975,7 +2239,12 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)          }          // Check whether a pending flush or stop has completed, as those commands may -        // be asynchronous or return near finish. +        // be asynchronous or return near finish or exhibit glitchy behavior. +        // +        // Originally this showed up as the first timestamp being a continuation of +        // the previous song under gapless playback. +        // However, we sometimes see zero timestamps, then a glitch of +        // the previous song's position, and then correct timestamps afterwards.          if (mStartUs != 0 && mSampleRate != 0) {              static const int kTimeJitterUs = 100000; // 100 ms              static const int k1SecUs = 1000000; @@ -1988,20 +2257,34 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)                      return WOULD_BLOCK;  // stale timestamp time, occurs before start.                  }                  const int64_t deltaTimeUs = timestampTimeUs - mStartUs; -                const int64_t deltaPositionByUs = timestamp.mPosition * 1000000LL / mSampleRate; +                const int64_t deltaPositionByUs = (double)timestamp.mPosition * 1000000 +                        / ((double)mSampleRate * mPlaybackRate.mSpeed);                  if (deltaPositionByUs > deltaTimeUs + kTimeJitterUs) {                      // Verify that the counter can't count faster than the sample rate -                    // since the start time.  If greater, then that means we have failed +                    // since the start time.  If greater, then that means we may have failed                      // to completely flush or stop the previous playing track. -                    ALOGW("incomplete flush or stop:" +                    ALOGW_IF(!mTimestampStartupGlitchReported, +                            "getTimestamp startup glitch detected"                              " deltaTimeUs(%lld) deltaPositionUs(%lld) tsmPosition(%u)",                              (long long)deltaTimeUs, (long long)deltaPositionByUs,                              timestamp.mPosition); +                    mTimestampStartupGlitchReported = true; +                    if (previousTimestampValid +                            && mPreviousTimestamp.mPosition == 0 /* should be true if valid */) { +                        timestamp = mPreviousTimestamp; +                        mPreviousTimestampValid = true; +                        return NO_ERROR; +                    }                      return WOULD_BLOCK;                  } +                if (deltaPositionByUs != 0) { +                    mStartUs = 0; // don't check again, we got valid nonzero position. +                } +            } else { +                mStartUs = 0; // don't check again, start time expired.              } -            mStartUs = 0; // no need to check again, start timestamp has either expired or unneeded. +            mTimestampStartupGlitchReported = false;          }      } else {          // Update the mapping between local consumed (mPosition) and server consumed (mServer) @@ -2029,6 +2312,46 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)          // IAudioTrack.  And timestamp.mPosition is initially in server's          // point of view, so we need to apply the same fudge factor to it.      } + +    // Prevent retrograde motion in timestamp. +    // This is sometimes caused by erratic reports of the available space in the ALSA drivers. +    if (status == NO_ERROR) { +        if (previousTimestampValid) { +#define TIME_TO_NANOS(time) ((uint64_t)time.tv_sec * 1000000000 + time.tv_nsec) +            const uint64_t previousTimeNanos = TIME_TO_NANOS(mPreviousTimestamp.mTime); +            const uint64_t currentTimeNanos = TIME_TO_NANOS(timestamp.mTime); +#undef TIME_TO_NANOS +            if (currentTimeNanos < previousTimeNanos) { +                ALOGW("retrograde timestamp time"); +                // FIXME Consider blocking this from propagating upwards. +            } + +            // Looking at signed delta will work even when the timestamps +            // are wrapping around. +            int32_t deltaPosition = static_cast<int32_t>(timestamp.mPosition +                    - mPreviousTimestamp.mPosition); +            // position can bobble slightly as an artifact; this hides the bobble +            static const int32_t MINIMUM_POSITION_DELTA = 8; +            if (deltaPosition < 0) { +                // Only report once per position instead of spamming the log. +                if (!mRetrogradeMotionReported) { +                    ALOGW("retrograde timestamp position corrected, %d = %u - %u", +                            deltaPosition, +                            timestamp.mPosition, +                            mPreviousTimestamp.mPosition); +                    mRetrogradeMotionReported = true; +                } +            } else { +                mRetrogradeMotionReported = false; +            } +            if (deltaPosition < MINIMUM_POSITION_DELTA) { +                timestamp = mPreviousTimestamp;  // Use last valid timestamp. +            } +        } +        mPreviousTimestamp = timestamp; +        mPreviousTimestampValid = true; +    } +      return status;  } @@ -2075,7 +2398,8 @@ status_t AudioTrack::dump(int fd, const Vector<String16>& args __unused) const      snprintf(buffer, 255, "  format(%d), channel count(%d), frame count(%zu)\n", mFormat,              mChannelCount, mFrameCount);      result.append(buffer); -    snprintf(buffer, 255, "  sample rate(%u), status(%d)\n", mSampleRate, mStatus); +    snprintf(buffer, 255, "  sample rate(%u), speed(%f), status(%d)\n", +            mSampleRate, mPlaybackRate.mSpeed, mStatus);      result.append(buffer);      snprintf(buffer, 255, "  state(%d), latency (%d)\n", mState, mLatency);      result.append(buffer); @@ -2089,6 +2413,48 @@ uint32_t AudioTrack::getUnderrunFrames() const      return mProxy->getUnderrunFrames();  } +status_t AudioTrack::addAudioDeviceCallback(const sp<AudioSystem::AudioDeviceCallback>& callback) +{ +    if (callback == 0) { +        ALOGW("%s adding NULL callback!", __FUNCTION__); +        return BAD_VALUE; +    } +    AutoMutex lock(mLock); +    if (mDeviceCallback == callback) { +        ALOGW("%s adding same callback!", __FUNCTION__); +        return INVALID_OPERATION; +    } +    status_t status = NO_ERROR; +    if (mOutput != AUDIO_IO_HANDLE_NONE) { +        if (mDeviceCallback != 0) { +            ALOGW("%s callback already present!", __FUNCTION__); +            AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput); +        } +        status = AudioSystem::addAudioDeviceCallback(callback, mOutput); +    } +    mDeviceCallback = callback; +    return status; +} + +status_t AudioTrack::removeAudioDeviceCallback( +        const sp<AudioSystem::AudioDeviceCallback>& callback) +{ +    if (callback == 0) { +        ALOGW("%s removing NULL callback!", __FUNCTION__); +        return BAD_VALUE; +    } +    AutoMutex lock(mLock); +    if (mDeviceCallback != callback) { +        ALOGW("%s removing different callback!", __FUNCTION__); +        return INVALID_OPERATION; +    } +    if (mOutput != AUDIO_IO_HANDLE_NONE) { +        AudioSystem::removeAudioDeviceCallback(mDeviceCallback, mOutput); +    } +    mDeviceCallback = 0; +    return NO_ERROR; +} +  // =========================================================================  void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused) @@ -2148,8 +2514,8 @@ bool AudioTrack::AudioTrackThread::threadLoop()      case NS_NEVER:          return false;      case NS_WHENEVER: -        // FIXME increase poll interval, or make event-driven -        ns = 1000000000LL; +        // Event driven: call wake() when callback notifications conditions change. +        ns = INT64_MAX;          // fall through      default:          LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns); @@ -2182,6 +2548,21 @@ void AudioTrack::AudioTrackThread::resume()      }  } +void AudioTrack::AudioTrackThread::wake() +{ +    AutoMutex _l(mMyLock); +    if (!mPaused) { +        // wake() might be called while servicing a callback - ignore the next +        // pause time and call processAudioBuffer. +        mIgnoreNextPausedInt = true; +        if (mPausedInt && mPausedNs > 0) { +            // audio track is active and internally paused with timeout. +            mPausedInt = false; +            mMyCond.signal(); +        } +    } +} +  void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)  {      AutoMutex _l(mMyLock); @@ -2189,4 +2570,4 @@ void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)      mPausedNs = ns;  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index ff24475..6a51a76 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -28,7 +28,21 @@ namespace android {  // used to clamp a value to size_t.  TODO: move to another file.  template <typename T>  size_t clampToSize(T x) { -    return x > SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x; +    return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x; +} + +// incrementSequence is used to determine the next sequence value +// for the loop and position sequence counters.  It should return +// a value between "other" + 1 and "other" + INT32_MAX, the choice of +// which needs to be the "least recently used" sequence value for "self". +// In general, this means (new_self) returned is max(self, other) + 1. + +static uint32_t incrementSequence(uint32_t self, uint32_t other) { +    int32_t diff = self - other; +    if (diff >= 0 && diff < INT32_MAX) { +        return self + 1; // we're already ahead of other. +    } +    return other + 1; // we're behind, so move just ahead of other.  }  audio_track_cblk_t::audio_track_cblk_t() @@ -360,6 +374,9 @@ void AudioTrackClientProxy::flush()      size_t increment = mFrameCountP2 << 1;      size_t mask = increment - 1;      audio_track_cblk_t* cblk = mCblk; +    // mFlush is 32 bits concatenated as [ flush_counter ] [ newfront_offset ] +    // Should newFlush = cblk->u.mStreaming.mRear?  Only problem is +    // if you want to flush twice to the same rear location after a 32 bit wrap.      int32_t newFlush = (cblk->u.mStreaming.mRear & mask) |                          ((cblk->u.mStreaming.mFlush & ~mask) + increment);      android_atomic_release_store(newFlush, &cblk->u.mStreaming.mFlush); @@ -409,7 +426,6 @@ status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *request              goto end;          }          // check for obtainBuffer interrupted by client -        // check for obtainBuffer interrupted by client          if (flags & CBLK_INTERRUPT) {              ALOGV("waitStreamEndDone() interrupted by client");              status = -EINTR; @@ -485,8 +501,11 @@ end:  StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers,          size_t frameCount, size_t frameSize)      : AudioTrackClientProxy(cblk, buffers, frameCount, frameSize), -      mMutator(&cblk->u.mStatic.mSingleStateQueue), mBufferPosition(0) +      mMutator(&cblk->u.mStatic.mSingleStateQueue), +      mPosLoopObserver(&cblk->u.mStatic.mPosLoopQueue)  { +    memset(&mState, 0, sizeof(mState)); +    memset(&mPosLoop, 0, sizeof(mPosLoop));  }  void StaticAudioTrackClientProxy::flush() @@ -501,30 +520,72 @@ void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int          // FIXME Should return an error status          return;      } -    StaticAudioTrackState newState; -    newState.mLoopStart = (uint32_t) loopStart; -    newState.mLoopEnd = (uint32_t) loopEnd; -    newState.mLoopCount = loopCount; -    size_t bufferPosition; -    if (loopCount == 0 || (bufferPosition = getBufferPosition()) >= loopEnd) { -        bufferPosition = loopStart; +    mState.mLoopStart = (uint32_t) loopStart; +    mState.mLoopEnd = (uint32_t) loopEnd; +    mState.mLoopCount = loopCount; +    mState.mLoopSequence = incrementSequence(mState.mLoopSequence, mState.mPositionSequence); +    // set patch-up variables until the mState is acknowledged by the ServerProxy. +    // observed buffer position and loop count will freeze until then to give the +    // illusion of a synchronous change. +    getBufferPositionAndLoopCount(NULL, NULL); +    // preserve behavior to restart at mState.mLoopStart if position exceeds mState.mLoopEnd. +    if (mState.mLoopCount != 0 && mPosLoop.mBufferPosition >= mState.mLoopEnd) { +        mPosLoop.mBufferPosition = mState.mLoopStart;      } -    mBufferPosition = bufferPosition; // snapshot buffer position until loop is acknowledged. -    (void) mMutator.push(newState); +    mPosLoop.mLoopCount = mState.mLoopCount; +    (void) mMutator.push(mState); +} + +void StaticAudioTrackClientProxy::setBufferPosition(size_t position) +{ +    // This can only happen on a 64-bit client +    if (position > UINT32_MAX) { +        // FIXME Should return an error status +        return; +    } +    mState.mPosition = (uint32_t) position; +    mState.mPositionSequence = incrementSequence(mState.mPositionSequence, mState.mLoopSequence); +    // set patch-up variables until the mState is acknowledged by the ServerProxy. +    // observed buffer position and loop count will freeze until then to give the +    // illusion of a synchronous change. +    if (mState.mLoopCount > 0) {  // only check if loop count is changing +        getBufferPositionAndLoopCount(NULL, NULL); // get last position +    } +    mPosLoop.mBufferPosition = position; +    if (position >= mState.mLoopEnd) { +        // no ongoing loop is possible if position is greater than loopEnd. +        mPosLoop.mLoopCount = 0; +    } +    (void) mMutator.push(mState); +} + +void StaticAudioTrackClientProxy::setBufferPositionAndLoop(size_t position, size_t loopStart, +        size_t loopEnd, int loopCount) +{ +    setLoop(loopStart, loopEnd, loopCount); +    setBufferPosition(position);  }  size_t StaticAudioTrackClientProxy::getBufferPosition()  { -    size_t bufferPosition; -    if (mMutator.ack()) { -        bufferPosition = (size_t) mCblk->u.mStatic.mBufferPosition; -        if (bufferPosition > mFrameCount) { -            bufferPosition = mFrameCount; -        } -    } else { -        bufferPosition = mBufferPosition; +    getBufferPositionAndLoopCount(NULL, NULL); +    return mPosLoop.mBufferPosition; +} + +void StaticAudioTrackClientProxy::getBufferPositionAndLoopCount( +        size_t *position, int *loopCount) +{ +    if (mMutator.ack() == StaticAudioTrackSingleStateQueue::SSQ_DONE) { +         if (mPosLoopObserver.poll(mPosLoop)) { +             ; // a valid mPosLoop should be available if ackDone is true. +         } +    } +    if (position != NULL) { +        *position = mPosLoop.mBufferPosition; +    } +    if (loopCount != NULL) { +        *loopCount = mPosLoop.mLoopCount;      } -    return bufferPosition;  }  // --------------------------------------------------------------------------- @@ -555,13 +616,24 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)          front = cblk->u.mStreaming.mFront;          if (flush != mFlush) {              // effectively obtain then release whatever is in the buffer -            size_t mask = (mFrameCountP2 << 1) - 1; +            const size_t overflowBit = mFrameCountP2 << 1; +            const size_t mask = overflowBit - 1;              int32_t newFront = (front & ~mask) | (flush & mask);              ssize_t filled = rear - newFront; +            if (filled >= (ssize_t)overflowBit) { +                // front and rear offsets span the overflow bit of the p2 mask +                // so rebasing newFront on the front offset is off by the overflow bit. +                // adjust newFront to match rear offset. +                ALOGV("flush wrap: filled %zx >= overflowBit %zx", filled, overflowBit); +                newFront += overflowBit; +                filled -= overflowBit; +            }              // Rather than shutting down on a corrupt flush, just treat it as a full flush              if (!(0 <= filled && (size_t) filled <= mFrameCount)) { -                ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, filled %d=%#x", -                        mFlush, flush, front, rear, mask, newFront, filled, filled); +                ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, " +                        "filled %zd=%#x", +                        mFlush, flush, front, rear, +                        (unsigned)mask, newFront, filled, (unsigned)filled);                  newFront = rear;              }              mFlush = flush; @@ -734,18 +806,23 @@ void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)      (void) android_atomic_or(CBLK_UNDERRUN, &cblk->mFlags);  } +AudioPlaybackRate AudioTrackServerProxy::getPlaybackRate() +{   // do not call from multiple threads without holding lock +    mPlaybackRateObserver.poll(mPlaybackRate); +    return mPlaybackRate; +} +  // ---------------------------------------------------------------------------  StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers,          size_t frameCount, size_t frameSize)      : AudioTrackServerProxy(cblk, buffers, frameCount, frameSize), -      mObserver(&cblk->u.mStatic.mSingleStateQueue), mPosition(0), +      mObserver(&cblk->u.mStatic.mSingleStateQueue), +      mPosLoopMutator(&cblk->u.mStatic.mPosLoopQueue),        mFramesReadySafe(frameCount), mFramesReady(frameCount),        mFramesReadyIsCalledByMultipleThreads(false)  { -    mState.mLoopStart = 0; -    mState.mLoopEnd = 0; -    mState.mLoopCount = 0; +    memset(&mState, 0, sizeof(mState));  }  void StaticAudioTrackServerProxy::framesReadyIsCalledByMultipleThreads() @@ -762,55 +839,97 @@ size_t StaticAudioTrackServerProxy::framesReady()      return mFramesReadySafe;  } -ssize_t StaticAudioTrackServerProxy::pollPosition() +status_t StaticAudioTrackServerProxy::updateStateWithLoop( +        StaticAudioTrackState *localState, const StaticAudioTrackState &update) const  { -    size_t position = mPosition; -    StaticAudioTrackState state; -    if (mObserver.poll(state)) { +    if (localState->mLoopSequence != update.mLoopSequence) {          bool valid = false; -        size_t loopStart = state.mLoopStart; -        size_t loopEnd = state.mLoopEnd; -        if (state.mLoopCount == 0) { -            if (loopStart > mFrameCount) { -                loopStart = mFrameCount; -            } -            // ignore loopEnd -            mPosition = position = loopStart; -            mFramesReady = mFrameCount - mPosition; -            mState.mLoopCount = 0; +        const size_t loopStart = update.mLoopStart; +        const size_t loopEnd = update.mLoopEnd; +        size_t position = localState->mPosition; +        if (update.mLoopCount == 0) {              valid = true; -        } else if (state.mLoopCount >= -1) { +        } else if (update.mLoopCount >= -1) {              if (loopStart < loopEnd && loopEnd <= mFrameCount &&                      loopEnd - loopStart >= MIN_LOOP) {                  // If the current position is greater than the end of the loop                  // we "wrap" to the loop start. This might cause an audible pop.                  if (position >= loopEnd) { -                    mPosition = position = loopStart; -                } -                if (state.mLoopCount == -1) { -                    mFramesReady = INT64_MAX; -                } else { -                    // mFramesReady is 64 bits to handle the effective number of frames -                    // that the static audio track contains, including loops. -                    // TODO: Later consider fixing overflow, but does not seem needed now -                    // as will not overflow if loopStart and loopEnd are Java "ints". -                    mFramesReady = int64_t(state.mLoopCount) * (loopEnd - loopStart) -                            + mFrameCount - mPosition; +                    position = loopStart;                  } -                mState = state;                  valid = true;              }          } -        if (!valid || mPosition > mFrameCount) { +        if (!valid || position > mFrameCount) { +            return NO_INIT; +        } +        localState->mPosition = position; +        localState->mLoopCount = update.mLoopCount; +        localState->mLoopEnd = loopEnd; +        localState->mLoopStart = loopStart; +        localState->mLoopSequence = update.mLoopSequence; +    } +    return OK; +} + +status_t StaticAudioTrackServerProxy::updateStateWithPosition( +        StaticAudioTrackState *localState, const StaticAudioTrackState &update) const +{ +    if (localState->mPositionSequence != update.mPositionSequence) { +        if (update.mPosition > mFrameCount) { +            return NO_INIT; +        } else if (localState->mLoopCount != 0 && update.mPosition >= localState->mLoopEnd) { +            localState->mLoopCount = 0; // disable loop count if position is beyond loop end. +        } +        localState->mPosition = update.mPosition; +        localState->mPositionSequence = update.mPositionSequence; +    } +    return OK; +} + +ssize_t StaticAudioTrackServerProxy::pollPosition() +{ +    StaticAudioTrackState state; +    if (mObserver.poll(state)) { +        StaticAudioTrackState trystate = mState; +        bool result; +        const int32_t diffSeq = state.mLoopSequence - state.mPositionSequence; + +        if (diffSeq < 0) { +            result = updateStateWithLoop(&trystate, state) == OK && +                    updateStateWithPosition(&trystate, state) == OK; +        } else { +            result = updateStateWithPosition(&trystate, state) == OK && +                    updateStateWithLoop(&trystate, state) == OK; +        } +        if (!result) { +            mObserver.done(); +            // caution: no update occurs so server state will be inconsistent with client state.              ALOGE("%s client pushed an invalid state, shutting down", __func__);              mIsShutdown = true;              return (ssize_t) NO_INIT;          } +        mState = trystate; +        if (mState.mLoopCount == -1) { +            mFramesReady = INT64_MAX; +        } else if (mState.mLoopCount == 0) { +            mFramesReady = mFrameCount - mState.mPosition; +        } else if (mState.mLoopCount > 0) { +            // TODO: Later consider fixing overflow, but does not seem needed now +            // as will not overflow if loopStart and loopEnd are Java "ints". +            mFramesReady = int64_t(mState.mLoopCount) * (mState.mLoopEnd - mState.mLoopStart) +                    + mFrameCount - mState.mPosition; +        }          mFramesReadySafe = clampToSize(mFramesReady);          // This may overflow, but client is not supposed to rely on it -        mCblk->u.mStatic.mBufferPosition = (uint32_t) position; +        StaticAudioTrackPosLoop posLoop; + +        posLoop.mLoopCount = (int32_t) mState.mLoopCount; +        posLoop.mBufferPosition = (uint32_t) mState.mPosition; +        mPosLoopMutator.push(posLoop); +        mObserver.done(); // safe to read mStatic variables.      } -    return (ssize_t) position; +    return (ssize_t) mState.mPosition;  }  status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused) @@ -849,7 +968,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush      }      // As mFramesReady is the total remaining frames in the static audio track,      // it is always larger or equal to avail. -    LOG_ALWAYS_FATAL_IF(mFramesReady < avail); +    LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail);      buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);      mUnreleased = avail;      return NO_ERROR; @@ -858,7 +977,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush  void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)  {      size_t stepCount = buffer->mFrameCount; -    LOG_ALWAYS_FATAL_IF(!(stepCount <= mFramesReady)); +    LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady));      LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased));      if (stepCount == 0) {          // prevent accidental re-use of buffer @@ -868,11 +987,12 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)      }      mUnreleased -= stepCount;      audio_track_cblk_t* cblk = mCblk; -    size_t position = mPosition; +    size_t position = mState.mPosition;      size_t newPosition = position + stepCount;      int32_t setFlags = 0;      if (!(position <= newPosition && newPosition <= mFrameCount)) { -        ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, mFrameCount); +        ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, +                mFrameCount);          newPosition = mFrameCount;      } else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) {          newPosition = mState.mLoopStart; @@ -885,7 +1005,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)      if (newPosition == mFrameCount) {          setFlags |= CBLK_BUFFER_END;      } -    mPosition = newPosition; +    mState.mPosition = newPosition;      if (mFramesReady != INT64_MAX) {          mFramesReady -= stepCount;      } @@ -893,7 +1013,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)      cblk->mServer += stepCount;      // This may overflow, but client is not supposed to rely on it -    cblk->u.mStatic.mBufferPosition = (uint32_t) newPosition; +    StaticAudioTrackPosLoop posLoop; +    posLoop.mBufferPosition = mState.mPosition; +    posLoop.mLoopCount = mState.mLoopCount; +    mPosLoopMutator.push(posLoop);      if (setFlags != 0) {          (void) android_atomic_or(setFlags, &cblk->mFlags);          // this would be a good place to wake a futex diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp index 41994dc..3020136 100644 --- a/media/libmedia/CharacterEncodingDetector.cpp +++ b/media/libmedia/CharacterEncodingDetector.cpp @@ -89,7 +89,6 @@ void CharacterEncodingDetector::detectAndConvert() {          // try combined detection of artist/album/title etc.          char buf[1024];          buf[0] = 0; -        int idx;          bool allprintable = true;          for (int i = 0; i < size; i++) {              const char *name = mNames.getEntry(i); @@ -169,7 +168,6 @@ void CharacterEncodingDetector::detectAndConvert() {              const char *name = mNames.getEntry(i);              uint8_t* src = (uint8_t *)mValues.getEntry(i);              int len = strlen((char *)src); -            uint8_t* dest = src;              ALOGV("@@@ checking %s", name);              const char *s = mValues.getEntry(i); diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 3e5c883..a3f014b 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -80,7 +80,8 @@ enum {      RELEASE_AUDIO_PATCH,      LIST_AUDIO_PATCHES,      SET_AUDIO_PORT_CONFIG, -    GET_AUDIO_HW_SYNC +    GET_AUDIO_HW_SYNC, +    SYSTEM_READY  };  #define MAX_ITEMS_PER_LIST 1024 @@ -174,9 +175,11 @@ public:                                  uint32_t sampleRate,                                  audio_format_t format,                                  audio_channel_mask_t channelMask, +                                const String16& opPackageName,                                  size_t *pFrameCount,                                  track_flags_t *flags,                                  pid_t tid, +                                int clientUid,                                  int *sessionId,                                  size_t *notificationFrames,                                  sp<IMemory>& cblk, @@ -190,11 +193,13 @@ public:          data.writeInt32(sampleRate);          data.writeInt32(format);          data.writeInt32(channelMask); +        data.writeString16(opPackageName);          size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;          data.writeInt64(frameCount);          track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;          data.writeInt32(lFlags);          data.writeInt32((int32_t) tid); +        data.writeInt32((int32_t) clientUid);          int lSessionId = AUDIO_SESSION_ALLOCATE;          if (sessionId != NULL) {              lSessionId = *sessionId; @@ -702,6 +707,7 @@ public:                                      int32_t priority,                                      audio_io_handle_t output,                                      int sessionId, +                                    const String16& opPackageName,                                      status_t *status,                                      int *id,                                      int *enabled) @@ -722,6 +728,7 @@ public:          data.writeInt32(priority);          data.writeInt32((int32_t) output);          data.writeInt32(sessionId); +        data.writeString16(opPackageName);          status_t lStatus = remote()->transact(CREATE_EFFECT, data, &reply);          if (lStatus != NO_ERROR) { @@ -897,6 +904,12 @@ public:          }          return (audio_hw_sync_t)reply.readInt32();      } +    virtual status_t systemReady() +    { +        Parcel data, reply; +        data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor()); +        return remote()->transact(SYSTEM_READY, data, &reply, IBinder::FLAG_ONEWAY); +    }  };  IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger"); @@ -950,18 +963,19 @@ status_t BnAudioFlinger::onTransact(              uint32_t sampleRate = data.readInt32();              audio_format_t format = (audio_format_t) data.readInt32();              audio_channel_mask_t channelMask = data.readInt32(); +            const String16& opPackageName = data.readString16();              size_t frameCount = data.readInt64();              track_flags_t flags = (track_flags_t) data.readInt32();              pid_t tid = (pid_t) data.readInt32(); +            int clientUid = data.readInt32();              int sessionId = data.readInt32();              size_t notificationFrames = data.readInt64();              sp<IMemory> cblk;              sp<IMemory> buffers;              status_t status;              sp<IAudioRecord> record = openRecord(input, -                    sampleRate, format, channelMask, &frameCount, &flags, tid, &sessionId, -                    ¬ificationFrames, -                    cblk, buffers, &status); +                    sampleRate, format, channelMask, opPackageName, &frameCount, &flags, tid, +                    clientUid, &sessionId, ¬ificationFrames, cblk, buffers, &status);              LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));              reply->writeInt64(frameCount);              reply->writeInt32(flags); @@ -1247,12 +1261,13 @@ status_t BnAudioFlinger::onTransact(              int32_t priority = data.readInt32();              audio_io_handle_t output = (audio_io_handle_t) data.readInt32();              int sessionId = data.readInt32(); +            const String16 opPackageName = data.readString16();              status_t status;              int id;              int enabled;              sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId, -                    &status, &id, &enabled); +                    opPackageName, &status, &id, &enabled);              reply->writeInt32(status);              reply->writeInt32(id);              reply->writeInt32(enabled); @@ -1388,6 +1403,11 @@ status_t BnAudioFlinger::onTransact(              reply->writeInt32(getAudioHwSyncForSession((audio_session_t)data.readInt32()));              return NO_ERROR;          } break; +        case SYSTEM_READY: { +            CHECK_INTERFACE(IAudioFlinger, data, reply); +            systemReady(); +            return NO_ERROR; +        } break;          default:              return BBinder::onTransact(code, data, reply, flags);      } @@ -1395,4 +1415,4 @@ status_t BnAudioFlinger::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 1c299f7..3429d36 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -39,25 +39,18 @@ public:      {      } -    void ioConfigChanged(int event, audio_io_handle_t ioHandle, const void *param2) +    void ioConfigChanged(audio_io_config_event event, const sp<AudioIoDescriptor>& ioDesc)      {          Parcel data, reply;          data.writeInterfaceToken(IAudioFlingerClient::getInterfaceDescriptor());          data.writeInt32(event); -        data.writeInt32((int32_t) ioHandle); -        if (event == AudioSystem::STREAM_CONFIG_CHANGED) { -            uint32_t stream = *(const uint32_t *)param2; -            ALOGV("ioConfigChanged stream %d", stream); -            data.writeInt32(stream); -        } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { -            const AudioSystem::OutputDescriptor *desc = -                    (const AudioSystem::OutputDescriptor *)param2; -            data.writeInt32(desc->samplingRate); -            data.writeInt32(desc->format); -            data.writeInt32(desc->channelMask); -            data.writeInt64(desc->frameCount); -            data.writeInt32(desc->latency); -        } +        data.writeInt32((int32_t)ioDesc->mIoHandle); +        data.write(&ioDesc->mPatch, sizeof(struct audio_patch)); +        data.writeInt32(ioDesc->mSamplingRate); +        data.writeInt32(ioDesc->mFormat); +        data.writeInt32(ioDesc->mChannelMask); +        data.writeInt64(ioDesc->mFrameCount); +        data.writeInt32(ioDesc->mLatency);          remote()->transact(IO_CONFIG_CHANGED, data, &reply, IBinder::FLAG_ONEWAY);      }  }; @@ -72,24 +65,16 @@ status_t BnAudioFlingerClient::onTransact(      switch (code) {      case IO_CONFIG_CHANGED: {              CHECK_INTERFACE(IAudioFlingerClient, data, reply); -            int event = data.readInt32(); -            audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32(); -            const void *param2 = NULL; -            AudioSystem::OutputDescriptor desc; -            uint32_t stream; -            if (event == AudioSystem::STREAM_CONFIG_CHANGED) { -                stream = data.readInt32(); -                param2 = &stream; -                ALOGV("STREAM_CONFIG_CHANGED stream %d", stream); -            } else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) { -                desc.samplingRate = data.readInt32(); -                desc.format = (audio_format_t) data.readInt32(); -                desc.channelMask = (audio_channel_mask_t) data.readInt32(); -                desc.frameCount = data.readInt64(); -                desc.latency = data.readInt32(); -                param2 = &desc; -            } -            ioConfigChanged(event, ioHandle, param2); +            audio_io_config_event event = (audio_io_config_event)data.readInt32(); +            sp<AudioIoDescriptor> ioDesc = new AudioIoDescriptor(); +            ioDesc->mIoHandle = (audio_io_handle_t) data.readInt32(); +            data.read(&ioDesc->mPatch, sizeof(struct audio_patch)); +            ioDesc->mSamplingRate = data.readInt32(); +            ioDesc->mFormat = (audio_format_t) data.readInt32(); +            ioDesc->mChannelMask = (audio_channel_mask_t) data.readInt32(); +            ioDesc->mFrameCount = data.readInt64(); +            ioDesc->mLatency = data.readInt32(); +            ioConfigChanged(event, ioDesc);              return NO_ERROR;          } break;          default: @@ -99,4 +84,4 @@ status_t BnAudioFlingerClient::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index cfb28a9..3348441 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -71,6 +71,9 @@ enum {      RELEASE_SOUNDTRIGGER_SESSION,      GET_PHONE_STATE,      REGISTER_POLICY_MIXES, +    START_AUDIO_SOURCE, +    STOP_AUDIO_SOURCE, +    SET_AUDIO_PORT_CALLBACK_ENABLED,  };  #define MAX_ITEMS_PER_LIST 1024 @@ -86,13 +89,15 @@ public:      virtual status_t setDeviceConnectionState(                                      audio_devices_t device,                                      audio_policy_dev_state_t state, -                                    const char *device_address) +                                    const char *device_address, +                                    const char *device_name)      {          Parcel data, reply;          data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());          data.writeInt32(static_cast <uint32_t>(device));          data.writeInt32(static_cast <uint32_t>(state));          data.writeCString(device_address); +        data.writeCString(device_name);          remote()->transact(SET_DEVICE_CONNECTION_STATE, data, &reply);          return static_cast <status_t> (reply.readInt32());      } @@ -167,10 +172,12 @@ public:                                          audio_io_handle_t *output,                                          audio_session_t session,                                          audio_stream_type_t *stream, +                                        uid_t uid,                                          uint32_t samplingRate,                                          audio_format_t format,                                          audio_channel_mask_t channelMask,                                          audio_output_flags_t flags, +                                        audio_port_handle_t selectedDeviceId,                                          const audio_offload_info_t *offloadInfo)          {              Parcel data, reply; @@ -202,10 +209,12 @@ public:                  data.writeInt32(1);                  data.writeInt32(*stream);              } +            data.writeInt32(uid);              data.writeInt32(samplingRate);              data.writeInt32(static_cast <uint32_t>(format));              data.writeInt32(channelMask);              data.writeInt32(static_cast <uint32_t>(flags)); +            data.writeInt32(selectedDeviceId);              // hasOffloadInfo              if (offloadInfo == NULL) {                  data.writeInt32(0); @@ -269,10 +278,12 @@ public:      virtual status_t getInputForAttr(const audio_attributes_t *attr,                                       audio_io_handle_t *input,                                       audio_session_t session, +                                     uid_t uid,                                       uint32_t samplingRate,                                       audio_format_t format,                                       audio_channel_mask_t channelMask, -                                     audio_input_flags_t flags) +                                     audio_input_flags_t flags, +                                     audio_port_handle_t selectedDeviceId)      {          Parcel data, reply;          data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); @@ -286,10 +297,12 @@ public:          }          data.write(attr, sizeof(audio_attributes_t));          data.writeInt32(session); +        data.writeInt32(uid);          data.writeInt32(samplingRate);          data.writeInt32(static_cast <uint32_t>(format));          data.writeInt32(channelMask);          data.writeInt32(flags); +        data.writeInt32(selectedDeviceId);          status_t status = remote()->transact(GET_INPUT_FOR_ATTR, data, &reply);          if (status != NO_ERROR) {              return status; @@ -634,6 +647,14 @@ public:          remote()->transact(REGISTER_CLIENT, data, &reply);      } +    virtual void setAudioPortCallbacksEnabled(bool enabled) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); +        data.writeInt32(enabled ? 1 : 0); +        remote()->transact(SET_AUDIO_PORT_CALLBACK_ENABLED, data, &reply); +    } +      virtual status_t acquireSoundTriggerSession(audio_session_t *session,                                              audio_io_handle_t *ioHandle,                                              audio_devices_t *device) @@ -710,6 +731,42 @@ public:          }          return status;      } + +    virtual status_t startAudioSource(const struct audio_port_config *source, +                                      const audio_attributes_t *attributes, +                                      audio_io_handle_t *handle) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); +        if (source == NULL || attributes == NULL || handle == NULL) { +            return BAD_VALUE; +        } +        data.write(source, sizeof(struct audio_port_config)); +        data.write(attributes, sizeof(audio_attributes_t)); +        status_t status = remote()->transact(START_AUDIO_SOURCE, data, &reply); +        if (status != NO_ERROR) { +            return status; +        } +        status = (status_t)reply.readInt32(); +        if (status != NO_ERROR) { +            return status; +        } +        *handle = (audio_io_handle_t)reply.readInt32(); +        return status; +    } + +    virtual status_t stopAudioSource(audio_io_handle_t handle) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); +        data.writeInt32(handle); +        status_t status = remote()->transact(STOP_AUDIO_SOURCE, data, &reply); +        if (status != NO_ERROR) { +            return status; +        } +        status = (status_t)reply.readInt32(); +        return status; +    }  };  IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService"); @@ -728,9 +785,11 @@ status_t BnAudioPolicyService::onTransact(              audio_policy_dev_state_t state =                      static_cast <audio_policy_dev_state_t>(data.readInt32());              const char *device_address = data.readCString(); +            const char *device_name = data.readCString();              reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device,                                                                                state, -                                                                              device_address))); +                                                                              device_address, +                                                                              device_name)));              return NO_ERROR;          } break; @@ -806,11 +865,13 @@ status_t BnAudioPolicyService::onTransact(              if (hasStream) {                  stream = (audio_stream_type_t)data.readInt32();              } +            uid_t uid = (uid_t)data.readInt32();              uint32_t samplingRate = data.readInt32();              audio_format_t format = (audio_format_t) data.readInt32();              audio_channel_mask_t channelMask = data.readInt32();              audio_output_flags_t flags =                      static_cast <audio_output_flags_t>(data.readInt32()); +            audio_port_handle_t selectedDeviceId = data.readInt32();              bool hasOffloadInfo = data.readInt32() != 0;              audio_offload_info_t offloadInfo;              if (hasOffloadInfo) { @@ -818,9 +879,9 @@ status_t BnAudioPolicyService::onTransact(              }              audio_io_handle_t output;              status_t status = getOutputForAttr(hasAttributes ? &attr : NULL, -                    &output, session, &stream, +                    &output, session, &stream, uid,                      samplingRate, format, channelMask, -                    flags, hasOffloadInfo ? &offloadInfo : NULL); +                    flags, selectedDeviceId, hasOffloadInfo ? &offloadInfo : NULL);              reply->writeInt32(status);              reply->writeInt32(output);              reply->writeInt32(stream); @@ -865,14 +926,16 @@ status_t BnAudioPolicyService::onTransact(              audio_attributes_t attr;              data.read(&attr, sizeof(audio_attributes_t));              audio_session_t session = (audio_session_t)data.readInt32(); +            uid_t uid = (uid_t)data.readInt32();              uint32_t samplingRate = data.readInt32();              audio_format_t format = (audio_format_t) data.readInt32();              audio_channel_mask_t channelMask = data.readInt32();              audio_input_flags_t flags = (audio_input_flags_t) data.readInt32(); +            audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();              audio_io_handle_t input; -            status_t status = getInputForAttr(&attr, &input, session, +            status_t status = getInputForAttr(&attr, &input, session, uid,                                                samplingRate, format, channelMask, -                                              flags); +                                              flags, selectedDeviceId);              reply->writeInt32(status);              if (status == NO_ERROR) {                  reply->writeInt32(input); @@ -1165,6 +1228,12 @@ status_t BnAudioPolicyService::onTransact(              return NO_ERROR;          } break; +        case SET_AUDIO_PORT_CALLBACK_ENABLED: { +            CHECK_INTERFACE(IAudioPolicyService, data, reply); +            setAudioPortCallbacksEnabled(data.readInt32() == 1); +            return NO_ERROR; +        } break; +          case ACQUIRE_SOUNDTRIGGER_SESSION: {              CHECK_INTERFACE(IAudioPolicyService, data, reply);              sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>( @@ -1217,6 +1286,27 @@ status_t BnAudioPolicyService::onTransact(              return NO_ERROR;          } break; +        case START_AUDIO_SOURCE: { +            CHECK_INTERFACE(IAudioPolicyService, data, reply); +            struct audio_port_config source; +            data.read(&source, sizeof(struct audio_port_config)); +            audio_attributes_t attributes; +            data.read(&attributes, sizeof(audio_attributes_t)); +            audio_io_handle_t handle; +            status_t status = startAudioSource(&source, &attributes, &handle); +            reply->writeInt32(status); +            reply->writeInt32(handle); +            return NO_ERROR; +        } break; + +        case STOP_AUDIO_SOURCE: { +            CHECK_INTERFACE(IAudioPolicyService, data, reply); +            audio_io_handle_t handle = (audio_io_handle_t)data.readInt32(); +            status_t status = stopAudioSource(handle); +            reply->writeInt32(status); +            return NO_ERROR; +        } break; +          default:              return BBinder::onTransact(code, data, reply, flags);      } @@ -1224,4 +1314,4 @@ status_t BnAudioPolicyService::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioPolicyServiceClient.cpp b/media/libmedia/IAudioPolicyServiceClient.cpp index e802277..65cc7d6 100644 --- a/media/libmedia/IAudioPolicyServiceClient.cpp +++ b/media/libmedia/IAudioPolicyServiceClient.cpp @@ -29,7 +29,8 @@ namespace android {  enum {      PORT_LIST_UPDATE = IBinder::FIRST_CALL_TRANSACTION, -    PATCH_LIST_UPDATE +    PATCH_LIST_UPDATE, +    MIX_STATE_UPDATE  };  class BpAudioPolicyServiceClient : public BpInterface<IAudioPolicyServiceClient> @@ -53,6 +54,15 @@ public:          data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());          remote()->transact(PATCH_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);      } + +    void onDynamicPolicyMixStateUpdate(String8 regId, int32_t state) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor()); +        data.writeString8(regId); +        data.writeInt32(state); +        remote()->transact(MIX_STATE_UPDATE, data, &reply, IBinder::FLAG_ONEWAY); +    }  };  IMPLEMENT_META_INTERFACE(AudioPolicyServiceClient, "android.media.IAudioPolicyServiceClient"); @@ -73,6 +83,13 @@ status_t BnAudioPolicyServiceClient::onTransact(              onAudioPatchListUpdate();              return NO_ERROR;          } break; +    case MIX_STATE_UPDATE: { +            CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply); +            String8 regId = data.readString8(); +            int32_t state = data.readInt32(); +            onDynamicPolicyMixStateUpdate(regId, state); +            return NO_ERROR; +    }      default:          return BBinder::onTransact(code, data, reply, flags);      } @@ -80,4 +97,4 @@ status_t BnAudioPolicyServiceClient::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp index 8a4a383..9d80753 100644 --- a/media/libmedia/IAudioRecord.cpp +++ b/media/libmedia/IAudioRecord.cpp @@ -91,4 +91,4 @@ status_t BnAudioRecord::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index df209fd..651cb61 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -292,4 +292,4 @@ status_t BnAudioTrack::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp index c26c5bf..947294f 100644 --- a/media/libmedia/ICrypto.cpp +++ b/media/libmedia/ICrypto.cpp @@ -19,6 +19,7 @@  #include <utils/Log.h>  #include <binder/Parcel.h> +#include <binder/IMemory.h>  #include <media/ICrypto.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/foundation/ADebug.h> @@ -34,6 +35,7 @@ enum {      REQUIRES_SECURE_COMPONENT,      DECRYPT,      NOTIFY_RESOLUTION, +    SET_MEDIADRM_SESSION,  };  struct BpCrypto : public BpInterface<ICrypto> { @@ -97,7 +99,7 @@ struct BpCrypto : public BpInterface<ICrypto> {              const uint8_t key[16],              const uint8_t iv[16],              CryptoPlugin::Mode mode, -            const void *srcPtr, +            const sp<IMemory> &sharedBuffer, size_t offset,              const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,              void *dstPtr,              AString *errorDetailMsg) { @@ -126,7 +128,8 @@ struct BpCrypto : public BpInterface<ICrypto> {          }          data.writeInt32(totalSize); -        data.write(srcPtr, totalSize); +        data.writeStrongBinder(IInterface::asBinder(sharedBuffer)); +        data.writeInt32(offset);          data.writeInt32(numSubSamples);          data.write(subSamples, sizeof(CryptoPlugin::SubSample) * numSubSamples); @@ -139,7 +142,7 @@ struct BpCrypto : public BpInterface<ICrypto> {          ssize_t result = reply.readInt32(); -        if (result >= ERROR_DRM_VENDOR_MIN && result <= ERROR_DRM_VENDOR_MAX) { +        if (isCryptoError(result)) {              errorDetailMsg->setTo(reply.readCString());          } @@ -159,7 +162,28 @@ struct BpCrypto : public BpInterface<ICrypto> {          remote()->transact(NOTIFY_RESOLUTION, data, &reply);      } +    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId) { +        Parcel data, reply; +        data.writeInterfaceToken(ICrypto::getInterfaceDescriptor()); + +        writeVector(data, sessionId); +        remote()->transact(SET_MEDIADRM_SESSION, data, &reply); + +        return reply.readInt32(); +    } +  private: +    void readVector(Parcel &reply, Vector<uint8_t> &vector) const { +        uint32_t size = reply.readInt32(); +        vector.insertAt((size_t)0, size); +        reply.read(vector.editArray(), size); +    } + +    void writeVector(Parcel &data, Vector<uint8_t> const &vector) const { +        data.writeInt32(vector.size()); +        data.write(vector.array(), vector.size()); +    } +      DISALLOW_EVIL_CONSTRUCTORS(BpCrypto);  }; @@ -167,6 +191,17 @@ IMPLEMENT_META_INTERFACE(Crypto, "android.hardware.ICrypto");  //////////////////////////////////////////////////////////////////////////////// +void BnCrypto::readVector(const Parcel &data, Vector<uint8_t> &vector) const { +    uint32_t size = data.readInt32(); +    vector.insertAt((size_t)0, size); +    data.read(vector.editArray(), size); +} + +void BnCrypto::writeVector(Parcel *reply, Vector<uint8_t> const &vector) const { +    reply->writeInt32(vector.size()); +    reply->write(vector.array(), vector.size()); +} +  status_t BnCrypto::onTransact(      uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) {      switch (code) { @@ -245,8 +280,9 @@ status_t BnCrypto::onTransact(              data.read(iv, sizeof(iv));              size_t totalSize = data.readInt32(); -            void *srcData = malloc(totalSize); -            data.read(srcData, totalSize); +            sp<IMemory> sharedBuffer = +                interface_cast<IMemory>(data.readStrongBinder()); +            int32_t offset = data.readInt32();              int32_t numSubSamples = data.readInt32(); @@ -257,28 +293,33 @@ status_t BnCrypto::onTransact(                      subSamples,                      sizeof(CryptoPlugin::SubSample) * numSubSamples); -            void *dstPtr; +            void *secureBufferId, *dstPtr;              if (secure) { -                dstPtr = reinterpret_cast<void *>(static_cast<uintptr_t>(data.readInt64())); +                secureBufferId = reinterpret_cast<void *>(static_cast<uintptr_t>(data.readInt64()));              } else {                  dstPtr = malloc(totalSize);              }              AString errorDetailMsg; -            ssize_t result = decrypt( +            ssize_t result; + +            if (offset + totalSize > sharedBuffer->size()) { +                result = -EINVAL; +            } else { +                result = decrypt(                      secure,                      key,                      iv,                      mode, -                    srcData, +                    sharedBuffer, offset,                      subSamples, numSubSamples, -                    dstPtr, +                    secure ? secureBufferId : dstPtr,                      &errorDetailMsg); +            }              reply->writeInt32(result); -            if (result >= ERROR_DRM_VENDOR_MIN -                && result <= ERROR_DRM_VENDOR_MAX) { +            if (isCryptoError(result)) {                  reply->writeCString(errorDetailMsg.c_str());              } @@ -294,9 +335,6 @@ status_t BnCrypto::onTransact(              delete[] subSamples;              subSamples = NULL; -            free(srcData); -            srcData = NULL; -              return OK;          } @@ -311,6 +349,15 @@ status_t BnCrypto::onTransact(              return OK;          } +        case SET_MEDIADRM_SESSION: +        { +            CHECK_INTERFACE(IDrm, data, reply); +            Vector<uint8_t> sessionId; +            readVector(data, sessionId); +            reply->writeInt32(setMediaDrmSession(sessionId)); +            return OK; +        } +          default:              return BBinder::onTransact(code, data, reply, flags);      } diff --git a/media/libmedia/IDataSource.cpp b/media/libmedia/IDataSource.cpp new file mode 100644 index 0000000..76d1d68 --- /dev/null +++ b/media/libmedia/IDataSource.cpp @@ -0,0 +1,108 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IDataSource" +#include <utils/Log.h> +#include <utils/Timers.h> + +#include <media/IDataSource.h> + +#include <binder/IMemory.h> +#include <binder/Parcel.h> +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +enum { +    GET_IMEMORY = IBinder::FIRST_CALL_TRANSACTION, +    READ_AT, +    GET_SIZE, +    CLOSE, +}; + +struct BpDataSource : public BpInterface<IDataSource> { +    BpDataSource(const sp<IBinder>& impl) : BpInterface<IDataSource>(impl) {} + +    virtual sp<IMemory> getIMemory() { +        Parcel data, reply; +        data.writeInterfaceToken(IDataSource::getInterfaceDescriptor()); +        remote()->transact(GET_IMEMORY, data, &reply); +        sp<IBinder> binder = reply.readStrongBinder(); +        return interface_cast<IMemory>(binder); +    } + +    virtual ssize_t readAt(off64_t offset, size_t size) { +        Parcel data, reply; +        data.writeInterfaceToken(IDataSource::getInterfaceDescriptor()); +        data.writeInt64(offset); +        data.writeInt64(size); +        remote()->transact(READ_AT, data, &reply); +        return reply.readInt64(); +    } + +    virtual status_t getSize(off64_t* size) { +        Parcel data, reply; +        data.writeInterfaceToken(IDataSource::getInterfaceDescriptor()); +        remote()->transact(GET_SIZE, data, &reply); +        status_t err = reply.readInt32(); +        *size = reply.readInt64(); +        return err; +    } + +    virtual void close() { +        Parcel data, reply; +        data.writeInterfaceToken(IDataSource::getInterfaceDescriptor()); +        remote()->transact(CLOSE, data, &reply); +    } +}; + +IMPLEMENT_META_INTERFACE(DataSource, "android.media.IDataSource"); + +status_t BnDataSource::onTransact( +    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { +    switch (code) { +        case GET_IMEMORY: { +            CHECK_INTERFACE(IDataSource, data, reply); +            reply->writeStrongBinder(IInterface::asBinder(getIMemory())); +            return NO_ERROR; +        } break; +        case READ_AT: { +            CHECK_INTERFACE(IDataSource, data, reply); +            off64_t offset = (off64_t) data.readInt64(); +            size_t size = (size_t) data.readInt64(); +            reply->writeInt64(readAt(offset, size)); +            return NO_ERROR; +        } break; +        case GET_SIZE: { +            CHECK_INTERFACE(IDataSource, data, reply); +            off64_t size; +            status_t err = getSize(&size); +            reply->writeInt32(err); +            reply->writeInt64(size); +            return NO_ERROR; +        } break; +        case CLOSE: { +            CHECK_INTERFACE(IDataSource, data, reply); +            close(); +            return NO_ERROR; +        } break; +        default: +            return BBinder::onTransact(code, data, reply, flags); +    } +} + +}  // namespace android diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp index b08fa82..b1ad0c5 100644 --- a/media/libmedia/IDrm.cpp +++ b/media/libmedia/IDrm.cpp @@ -67,7 +67,10 @@ struct BpDrm : public BpInterface<IDrm> {      virtual status_t initCheck() const {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); -        remote()->transact(INIT_CHECK, data, &reply); +        status_t status = remote()->transact(INIT_CHECK, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -77,7 +80,11 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          data.write(uuid, 16);          data.writeString8(mimeType); -        remote()->transact(IS_CRYPTO_SUPPORTED, data, &reply); +        status_t status = remote()->transact(IS_CRYPTO_SUPPORTED, data, &reply); +        if (status != OK) { +            ALOGE("isCryptoSchemeSupported: binder call failed: %d", status); +            return false; +        }          return reply.readInt32() != 0;      } @@ -87,7 +94,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          data.write(uuid, 16); -        remote()->transact(CREATE_PLUGIN, data, &reply); +        status_t status = remote()->transact(CREATE_PLUGIN, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -95,7 +105,10 @@ struct BpDrm : public BpInterface<IDrm> {      virtual status_t destroyPlugin() {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); -        remote()->transact(DESTROY_PLUGIN, data, &reply); +        status_t status = remote()->transact(DESTROY_PLUGIN, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -104,7 +117,10 @@ struct BpDrm : public BpInterface<IDrm> {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); -        remote()->transact(OPEN_SESSION, data, &reply); +        status_t status = remote()->transact(OPEN_SESSION, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, sessionId);          return reply.readInt32(); @@ -115,7 +131,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, sessionId); -        remote()->transact(CLOSE_SESSION, data, &reply); +        status_t status = remote()->transact(CLOSE_SESSION, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -125,7 +144,8 @@ struct BpDrm : public BpInterface<IDrm> {                        Vector<uint8_t> const &initData,                        String8 const &mimeType, DrmPlugin::KeyType keyType,                        KeyedVector<String8, String8> const &optionalParameters, -                      Vector<uint8_t> &request, String8 &defaultUrl) { +                      Vector<uint8_t> &request, String8 &defaultUrl, +                      DrmPlugin::KeyRequestType *keyRequestType) {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); @@ -139,10 +159,15 @@ struct BpDrm : public BpInterface<IDrm> {              data.writeString8(optionalParameters.keyAt(i));              data.writeString8(optionalParameters.valueAt(i));          } -        remote()->transact(GET_KEY_REQUEST, data, &reply); + +        status_t status = remote()->transact(GET_KEY_REQUEST, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, request);          defaultUrl = reply.readString8(); +        *keyRequestType = static_cast<DrmPlugin::KeyRequestType>(reply.readInt32());          return reply.readInt32();      } @@ -154,7 +179,12 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, sessionId);          writeVector(data, response); -        remote()->transact(PROVIDE_KEY_RESPONSE, data, &reply); + +        status_t status = remote()->transact(PROVIDE_KEY_RESPONSE, data, &reply); +        if (status != OK) { +            return status; +        } +          readVector(reply, keySetId);          return reply.readInt32(); @@ -165,7 +195,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, keySetId); -        remote()->transact(REMOVE_KEYS, data, &reply); +        status_t status = remote()->transact(REMOVE_KEYS, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -177,7 +210,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, sessionId);          writeVector(data, keySetId); -        remote()->transact(RESTORE_KEYS, data, &reply); +        status_t status = remote()->transact(RESTORE_KEYS, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -188,7 +224,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, sessionId); -        remote()->transact(QUERY_KEY_STATUS, data, &reply); +        status_t status = remote()->transact(QUERY_KEY_STATUS, data, &reply); +        if (status != OK) { +            return status; +        }          infoMap.clear();          size_t count = reply.readInt32(); @@ -209,7 +248,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeString8(certType);          data.writeString8(certAuthority); -        remote()->transact(GET_PROVISION_REQUEST, data, &reply); +        status_t status = remote()->transact(GET_PROVISION_REQUEST, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, request);          defaultUrl = reply.readString8(); @@ -224,7 +266,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, response); -        remote()->transact(PROVIDE_PROVISION_RESPONSE, data, &reply); +        status_t status = remote()->transact(PROVIDE_PROVISION_RESPONSE, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, certificate);          readVector(reply, wrappedKey); @@ -236,7 +281,10 @@ struct BpDrm : public BpInterface<IDrm> {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); -        remote()->transact(UNPROVISION_DEVICE, data, &reply); +        status_t status = remote()->transact(UNPROVISION_DEVICE, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -245,7 +293,10 @@ struct BpDrm : public BpInterface<IDrm> {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); -        remote()->transact(GET_SECURE_STOPS, data, &reply); +        status_t status = remote()->transact(GET_SECURE_STOPS, data, &reply); +        if (status != OK) { +            return status; +        }          secureStops.clear();          uint32_t count = reply.readInt32(); @@ -262,7 +313,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, ssid); -        remote()->transact(GET_SECURE_STOP, data, &reply); +        status_t status = remote()->transact(GET_SECURE_STOP, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, secureStop);          return reply.readInt32(); @@ -273,7 +327,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          writeVector(data, ssRelease); -        remote()->transact(RELEASE_SECURE_STOPS, data, &reply); +        status_t status = remote()->transact(RELEASE_SECURE_STOPS, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -282,7 +339,10 @@ struct BpDrm : public BpInterface<IDrm> {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); -        remote()->transact(RELEASE_ALL_SECURE_STOPS, data, &reply); +        status_t status = remote()->transact(RELEASE_ALL_SECURE_STOPS, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -292,7 +352,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          data.writeString8(name); -        remote()->transact(GET_PROPERTY_STRING, data, &reply); +        status_t status = remote()->transact(GET_PROPERTY_STRING, data, &reply); +        if (status != OK) { +            return status; +        }          value = reply.readString8();          return reply.readInt32(); @@ -303,7 +366,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          data.writeString8(name); -        remote()->transact(GET_PROPERTY_BYTE_ARRAY, data, &reply); +        status_t status = remote()->transact(GET_PROPERTY_BYTE_ARRAY, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, value);          return reply.readInt32(); @@ -315,7 +381,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeString8(name);          data.writeString8(value); -        remote()->transact(SET_PROPERTY_STRING, data, &reply); +        status_t status = remote()->transact(SET_PROPERTY_STRING, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -327,7 +396,10 @@ struct BpDrm : public BpInterface<IDrm> {          data.writeString8(name);          writeVector(data, value); -        remote()->transact(SET_PROPERTY_BYTE_ARRAY, data, &reply); +        status_t status = remote()->transact(SET_PROPERTY_BYTE_ARRAY, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -340,7 +412,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, sessionId);          data.writeString8(algorithm); -        remote()->transact(SET_CIPHER_ALGORITHM, data, &reply); +        status_t status = remote()->transact(SET_CIPHER_ALGORITHM, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -351,7 +426,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, sessionId);          data.writeString8(algorithm); -        remote()->transact(SET_MAC_ALGORITHM, data, &reply); +        status_t status = remote()->transact(SET_MAC_ALGORITHM, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -368,7 +446,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, input);          writeVector(data, iv); -        remote()->transact(ENCRYPT, data, &reply); +        status_t status = remote()->transact(ENCRYPT, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, output);          return reply.readInt32(); @@ -387,7 +468,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, input);          writeVector(data, iv); -        remote()->transact(DECRYPT, data, &reply); +        status_t status = remote()->transact(DECRYPT, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, output);          return reply.readInt32(); @@ -404,7 +488,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, keyId);          writeVector(data, message); -        remote()->transact(SIGN, data, &reply); +        status_t status = remote()->transact(SIGN, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, signature);          return reply.readInt32(); @@ -423,7 +510,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, message);          writeVector(data, signature); -        remote()->transact(VERIFY, data, &reply); +        status_t status = remote()->transact(VERIFY, data, &reply); +        if (status != OK) { +            return status; +        }          match = (bool)reply.readInt32();          return reply.readInt32();      } @@ -441,7 +531,10 @@ struct BpDrm : public BpInterface<IDrm> {          writeVector(data, message);          writeVector(data, wrappedKey); -        remote()->transact(SIGN_RSA, data, &reply); +        status_t status = remote()->transact(SIGN_RSA, data, &reply); +        if (status != OK) { +            return status; +        }          readVector(reply, signature);          return reply.readInt32(); @@ -451,7 +544,10 @@ struct BpDrm : public BpInterface<IDrm> {          Parcel data, reply;          data.writeInterfaceToken(IDrm::getInterfaceDescriptor());          data.writeStrongBinder(IInterface::asBinder(listener)); -        remote()->transact(SET_LISTENER, data, &reply); +        status_t status = remote()->transact(SET_LISTENER, data, &reply); +        if (status != OK) { +            return status; +        }          return reply.readInt32();      } @@ -562,13 +658,15 @@ status_t BnDrm::onTransact(              Vector<uint8_t> request;              String8 defaultUrl; +            DrmPlugin::KeyRequestType keyRequestType; + +            status_t result = getKeyRequest(sessionId, initData, mimeType, +                    keyType, optionalParameters, request, defaultUrl, +                    &keyRequestType); -            status_t result = getKeyRequest(sessionId, initData, -                                            mimeType, keyType, -                                            optionalParameters, -                                            request, defaultUrl);              writeVector(reply, request);              reply->writeString8(defaultUrl); +            reply->writeInt32(static_cast<int32_t>(keyRequestType));              reply->writeInt32(result);              return OK;          } diff --git a/media/libmedia/IDrmClient.cpp b/media/libmedia/IDrmClient.cpp index f50715e..490c6ed 100644 --- a/media/libmedia/IDrmClient.cpp +++ b/media/libmedia/IDrmClient.cpp @@ -78,4 +78,4 @@ status_t BnDrmClient::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp index c2fff78..eb4b098 100644 --- a/media/libmedia/IEffect.cpp +++ b/media/libmedia/IEffect.cpp @@ -201,4 +201,4 @@ status_t BnEffect::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IEffectClient.cpp b/media/libmedia/IEffectClient.cpp index aef4371..1322e72 100644 --- a/media/libmedia/IEffectClient.cpp +++ b/media/libmedia/IEffectClient.cpp @@ -141,4 +141,4 @@ status_t BnEffectClient::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IHDCP.cpp b/media/libmedia/IHDCP.cpp index 79944ee..f3a8902 100644 --- a/media/libmedia/IHDCP.cpp +++ b/media/libmedia/IHDCP.cpp @@ -284,11 +284,17 @@ status_t BnHDCP::onTransact(              size_t offset = data.readInt32();              size_t size = data.readInt32();              uint32_t streamCTR = data.readInt32(); -            void *outData = malloc(size); +            void *outData = NULL;              uint64_t inputCTR; -            status_t err = encryptNative(graphicBuffer, offset, size, -                                         streamCTR, &inputCTR, outData); +            status_t err = ERROR_OUT_OF_RANGE; + +            outData = malloc(size); + +            if (outData != NULL) { +                err = encryptNative(graphicBuffer, offset, size, +                                             streamCTR, &inputCTR, outData); +            }              reply->writeInt32(err); diff --git a/media/libmedia/IMediaCodecList.cpp b/media/libmedia/IMediaCodecList.cpp index bf7c5ca..e2df104 100644 --- a/media/libmedia/IMediaCodecList.cpp +++ b/media/libmedia/IMediaCodecList.cpp @@ -30,6 +30,7 @@ enum {      CREATE = IBinder::FIRST_CALL_TRANSACTION,      COUNT_CODECS,      GET_CODEC_INFO, +    GET_GLOBAL_SETTINGS,      FIND_CODEC_BY_TYPE,      FIND_CODEC_BY_NAME,  }; @@ -64,6 +65,19 @@ public:          }      } +    virtual const sp<AMessage> getGlobalSettings() const +    { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaCodecList::getInterfaceDescriptor()); +        remote()->transact(GET_GLOBAL_SETTINGS, data, &reply); +        status_t err = reply.readInt32(); +        if (err == OK) { +            return AMessage::FromParcel(reply); +        } else { +            return NULL; +        } +    } +      virtual ssize_t findCodecByType(              const char *type, bool encoder, size_t startIndex = 0) const      { @@ -125,6 +139,20 @@ status_t BnMediaCodecList::onTransact(          }          break; +        case GET_GLOBAL_SETTINGS: +        { +            CHECK_INTERFACE(IMediaCodecList, data, reply); +            const sp<AMessage> info = getGlobalSettings(); +            if (info != NULL) { +                reply->writeInt32(OK); +                info->writeToParcel(reply); +            } else { +                reply->writeInt32(-ERANGE); +            } +            return NO_ERROR; +        } +        break; +          case FIND_CODEC_BY_TYPE:          {              CHECK_INTERFACE(IMediaCodecList, data, reply); @@ -160,4 +188,4 @@ status_t BnMediaCodecList::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp index 38e9ca0..d4360ea 100644 --- a/media/libmedia/IMediaDeathNotifier.cpp +++ b/media/libmedia/IMediaDeathNotifier.cpp @@ -108,4 +108,4 @@ IMediaDeathNotifier::DeathNotifier::~DeathNotifier()      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp index a5a3714..0dda0be 100644 --- a/media/libmedia/IMediaHTTPConnection.cpp +++ b/media/libmedia/IMediaHTTPConnection.cpp @@ -70,7 +70,7 @@ struct BpMediaHTTPConnection : public BpInterface<IMediaHTTPConnection> {          int32_t exceptionCode = reply.readExceptionCode();          if (exceptionCode) { -            return UNKNOWN_ERROR; +            return false;          }          sp<IBinder> binder = reply.readStrongBinder(); @@ -107,7 +107,14 @@ struct BpMediaHTTPConnection : public BpInterface<IMediaHTTPConnection> {              return UNKNOWN_ERROR;          } -        size_t len = reply.readInt32(); +        int32_t lenOrErrorCode = reply.readInt32(); + +        // Negative values are error codes +        if (lenOrErrorCode < 0) { +            return lenOrErrorCode; +        } + +        size_t len = lenOrErrorCode;          if (len > size) {              ALOGE("requested %zu, got %zu", size, len); @@ -186,5 +193,4 @@ private:  IMPLEMENT_META_INTERFACE(          MediaHTTPConnection, "android.media.IMediaHTTPConnection"); -}  // namespace android - +} // namespace android diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp index 1260582..0c16a2b 100644 --- a/media/libmedia/IMediaHTTPService.cpp +++ b/media/libmedia/IMediaHTTPService.cpp @@ -44,6 +44,7 @@ struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> {          status_t err = reply.readInt32();          if (err != OK) { +            ALOGE("Unable to make HTTP connection (err = %d)", err);              return NULL;          } @@ -54,5 +55,4 @@ struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> {  IMPLEMENT_META_INTERFACE(          MediaHTTPService, "android.media.IMediaHTTPService"); -}  // namespace android - +} // namespace android diff --git a/media/libmedia/IMediaLogService.cpp b/media/libmedia/IMediaLogService.cpp index a4af7b7..1536337 100644 --- a/media/libmedia/IMediaLogService.cpp +++ b/media/libmedia/IMediaLogService.cpp @@ -45,7 +45,7 @@ public:          data.writeStrongBinder(IInterface::asBinder(shared));          data.writeInt64((int64_t) size);          data.writeCString(name); -        status_t status = remote()->transact(REGISTER_WRITER, data, &reply); +        status_t status __unused = remote()->transact(REGISTER_WRITER, data, &reply);          // FIXME ignores status      } @@ -53,7 +53,7 @@ public:          Parcel data, reply;          data.writeInterfaceToken(IMediaLogService::getInterfaceDescriptor());          data.writeStrongBinder(IInterface::asBinder(shared)); -        status_t status = remote()->transact(UNREGISTER_WRITER, data, &reply); +        status_t status __unused = remote()->transact(UNREGISTER_WRITER, data, &reply);          // FIXME ignores status      } @@ -91,4 +91,4 @@ status_t BnMediaLogService::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp index aa2665a..9765f0d 100644 --- a/media/libmedia/IMediaMetadataRetriever.cpp +++ b/media/libmedia/IMediaMetadataRetriever.cpp @@ -20,6 +20,7 @@  #include <sys/types.h>  #include <binder/Parcel.h> +#include <media/IDataSource.h>  #include <media/IMediaHTTPService.h>  #include <media/IMediaMetadataRetriever.h>  #include <utils/String8.h> @@ -65,6 +66,7 @@ enum {      DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,      SET_DATA_SOURCE_URL,      SET_DATA_SOURCE_FD, +    SET_DATA_SOURCE_CALLBACK,      GET_FRAME_AT_TIME,      EXTRACT_ALBUM_ART,      EXTRACT_METADATA, @@ -125,6 +127,15 @@ public:          return reply.readInt32();      } +    status_t setDataSource(const sp<IDataSource>& source) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor()); +        data.writeStrongBinder(IInterface::asBinder(source)); +        remote()->transact(SET_DATA_SOURCE_CALLBACK, data, &reply); +        return reply.readInt32(); +    } +      sp<IMemory> getFrameAtTime(int64_t timeUs, int option)      {          ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option); @@ -235,6 +246,13 @@ status_t BnMediaMetadataRetriever::onTransact(              reply->writeInt32(setDataSource(fd, offset, length));              return NO_ERROR;          } break; +        case SET_DATA_SOURCE_CALLBACK: { +            CHECK_INTERFACE(IMediaMetadataRetriever, data, reply); +            sp<IDataSource> source = +                interface_cast<IDataSource>(data.readStrongBinder()); +            reply->writeInt32(setDataSource(source)); +            return NO_ERROR; +        } break;          case GET_FRAME_AT_TIME: {              CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);              int64_t timeUs = data.readInt64(); @@ -297,4 +315,4 @@ status_t BnMediaMetadataRetriever::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp index 7f3e5cc..bde35f2 100644 --- a/media/libmedia/IMediaPlayer.cpp +++ b/media/libmedia/IMediaPlayer.cpp @@ -21,6 +21,10 @@  #include <binder/Parcel.h> +#include <media/AudioResamplerPublic.h> +#include <media/AVSyncSettings.h> + +#include <media/IDataSource.h>  #include <media/IMediaHTTPService.h>  #include <media/IMediaPlayer.h>  #include <media/IStreamSource.h> @@ -35,10 +39,15 @@ enum {      SET_DATA_SOURCE_URL,      SET_DATA_SOURCE_FD,      SET_DATA_SOURCE_STREAM, +    SET_DATA_SOURCE_CALLBACK,      PREPARE_ASYNC,      START,      STOP,      IS_PLAYING, +    SET_PLAYBACK_SETTINGS, +    GET_PLAYBACK_SETTINGS, +    SET_SYNC_SETTINGS, +    GET_SYNC_SETTINGS,      PAUSE,      SEEK_TO,      GET_CURRENT_POSITION, @@ -120,6 +129,14 @@ public:          return reply.readInt32();      } +    status_t setDataSource(const sp<IDataSource> &source) { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); +        data.writeStrongBinder(IInterface::asBinder(source)); +        remote()->transact(SET_DATA_SOURCE_CALLBACK, data, &reply); +        return reply.readInt32(); +    } +      // pass the buffered IGraphicBufferProducer to the media player service      status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer)      { @@ -164,6 +181,63 @@ public:          return reply.readInt32();      } +    status_t setPlaybackSettings(const AudioPlaybackRate& rate) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); +        data.writeFloat(rate.mSpeed); +        data.writeFloat(rate.mPitch); +        data.writeInt32((int32_t)rate.mFallbackMode); +        data.writeInt32((int32_t)rate.mStretchMode); +        remote()->transact(SET_PLAYBACK_SETTINGS, data, &reply); +        return reply.readInt32(); +    } + +    status_t getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); +        remote()->transact(GET_PLAYBACK_SETTINGS, data, &reply); +        status_t err = reply.readInt32(); +        if (err == OK) { +            *rate = AUDIO_PLAYBACK_RATE_DEFAULT; +            rate->mSpeed = reply.readFloat(); +            rate->mPitch = reply.readFloat(); +            rate->mFallbackMode = (AudioTimestretchFallbackMode)reply.readInt32(); +            rate->mStretchMode = (AudioTimestretchStretchMode)reply.readInt32(); +        } +        return err; +    } + +    status_t setSyncSettings(const AVSyncSettings& sync, float videoFpsHint) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); +        data.writeInt32((int32_t)sync.mSource); +        data.writeInt32((int32_t)sync.mAudioAdjustMode); +        data.writeFloat(sync.mTolerance); +        data.writeFloat(videoFpsHint); +        remote()->transact(SET_SYNC_SETTINGS, data, &reply); +        return reply.readInt32(); +    } + +    status_t getSyncSettings(AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */) +    { +        Parcel data, reply; +        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); +        remote()->transact(GET_SYNC_SETTINGS, data, &reply); +        status_t err = reply.readInt32(); +        if (err == OK) { +            AVSyncSettings settings; +            settings.mSource = (AVSyncSource)reply.readInt32(); +            settings.mAudioAdjustMode = (AVSyncAudioAdjustMode)reply.readInt32(); +            settings.mTolerance = reply.readFloat(); +            *sync = settings; +            *videoFps = reply.readFloat(); +        } +        return err; +    } +      status_t pause()      {          Parcel data, reply; @@ -396,6 +470,13 @@ status_t BnMediaPlayer::onTransact(              reply->writeInt32(setDataSource(source));              return NO_ERROR;          } +        case SET_DATA_SOURCE_CALLBACK: { +            CHECK_INTERFACE(IMediaPlayer, data, reply); +            sp<IDataSource> source = +                interface_cast<IDataSource>(data.readStrongBinder()); +            reply->writeInt32(setDataSource(source)); +            return NO_ERROR; +        }          case SET_VIDEO_SURFACETEXTURE: {              CHECK_INTERFACE(IMediaPlayer, data, reply);              sp<IGraphicBufferProducer> bufferProducer = @@ -426,6 +507,53 @@ status_t BnMediaPlayer::onTransact(              reply->writeInt32(ret);              return NO_ERROR;          } break; +        case SET_PLAYBACK_SETTINGS: { +            CHECK_INTERFACE(IMediaPlayer, data, reply); +            AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT; +            rate.mSpeed = data.readFloat(); +            rate.mPitch = data.readFloat(); +            rate.mFallbackMode = (AudioTimestretchFallbackMode)data.readInt32(); +            rate.mStretchMode = (AudioTimestretchStretchMode)data.readInt32(); +            reply->writeInt32(setPlaybackSettings(rate)); +            return NO_ERROR; +        } break; +        case GET_PLAYBACK_SETTINGS: { +            CHECK_INTERFACE(IMediaPlayer, data, reply); +            AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT; +            status_t err = getPlaybackSettings(&rate); +            reply->writeInt32(err); +            if (err == OK) { +                reply->writeFloat(rate.mSpeed); +                reply->writeFloat(rate.mPitch); +                reply->writeInt32((int32_t)rate.mFallbackMode); +                reply->writeInt32((int32_t)rate.mStretchMode); +            } +            return NO_ERROR; +        } break; +        case SET_SYNC_SETTINGS: { +            CHECK_INTERFACE(IMediaPlayer, data, reply); +            AVSyncSettings sync; +            sync.mSource = (AVSyncSource)data.readInt32(); +            sync.mAudioAdjustMode = (AVSyncAudioAdjustMode)data.readInt32(); +            sync.mTolerance = data.readFloat(); +            float videoFpsHint = data.readFloat(); +            reply->writeInt32(setSyncSettings(sync, videoFpsHint)); +            return NO_ERROR; +        } break; +        case GET_SYNC_SETTINGS: { +            CHECK_INTERFACE(IMediaPlayer, data, reply); +            AVSyncSettings sync; +            float videoFps; +            status_t err = getSyncSettings(&sync, &videoFps); +            reply->writeInt32(err); +            if (err == OK) { +                reply->writeInt32((int32_t)sync.mSource); +                reply->writeInt32((int32_t)sync.mAudioAdjustMode); +                reply->writeFloat(sync.mTolerance); +                reply->writeFloat(videoFps); +            } +            return NO_ERROR; +        } break;          case PAUSE: {              CHECK_INTERFACE(IMediaPlayer, data, reply);              reply->writeInt32(pause()); @@ -559,4 +687,4 @@ status_t BnMediaPlayer::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaPlayerClient.cpp b/media/libmedia/IMediaPlayerClient.cpp index a670c96..d608386 100644 --- a/media/libmedia/IMediaPlayerClient.cpp +++ b/media/libmedia/IMediaPlayerClient.cpp @@ -75,4 +75,4 @@ status_t BnMediaPlayerClient::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index feea267..05f8670 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -78,10 +78,11 @@ public:          return interface_cast<IMediaPlayer>(reply.readStrongBinder());      } -    virtual sp<IMediaRecorder> createMediaRecorder() +    virtual sp<IMediaRecorder> createMediaRecorder(const String16 &opPackageName)      {          Parcel data, reply;          data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); +        data.writeString16(opPackageName);          remote()->transact(CREATE_MEDIA_RECORDER, data, &reply);          return interface_cast<IMediaRecorder>(reply.readStrongBinder());      } @@ -128,11 +129,12 @@ public:          return remote()->transact(PULL_BATTERY_DATA, data, reply);      } -    virtual sp<IRemoteDisplay> listenForRemoteDisplay(const sp<IRemoteDisplayClient>& client, -            const String8& iface) +    virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName, +            const sp<IRemoteDisplayClient>& client, const String8& iface)      {          Parcel data, reply;          data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); +        data.writeString16(opPackageName);          data.writeStrongBinder(IInterface::asBinder(client));          data.writeString8(iface);          remote()->transact(LISTEN_FOR_REMOTE_DISPLAY, data, &reply); @@ -166,7 +168,8 @@ status_t BnMediaPlayerService::onTransact(          } break;          case CREATE_MEDIA_RECORDER: {              CHECK_INTERFACE(IMediaPlayerService, data, reply); -            sp<IMediaRecorder> recorder = createMediaRecorder(); +            const String16 opPackageName = data.readString16(); +            sp<IMediaRecorder> recorder = createMediaRecorder(opPackageName);              reply->writeStrongBinder(IInterface::asBinder(recorder));              return NO_ERROR;          } break; @@ -214,10 +217,11 @@ status_t BnMediaPlayerService::onTransact(          } break;          case LISTEN_FOR_REMOTE_DISPLAY: {              CHECK_INTERFACE(IMediaPlayerService, data, reply); +            const String16 opPackageName = data.readString16();              sp<IRemoteDisplayClient> client(                      interface_cast<IRemoteDisplayClient>(data.readStrongBinder()));              String8 iface(data.readString8()); -            sp<IRemoteDisplay> display(listenForRemoteDisplay(client, iface)); +            sp<IRemoteDisplay> display(listenForRemoteDisplay(opPackageName, client, iface));              reply->writeStrongBinder(IInterface::asBinder(display));              return NO_ERROR;          } break; @@ -234,4 +238,4 @@ status_t BnMediaPlayerService::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index a733b68..ee3b584 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -35,6 +35,7 @@ enum {      RELEASE = IBinder::FIRST_CALL_TRANSACTION,      INIT,      CLOSE, +    SET_INPUT_SURFACE,      QUERY_SURFACE_MEDIASOURCE,      RESET,      STOP, @@ -46,7 +47,6 @@ enum {      SET_OUTPUT_FORMAT,      SET_VIDEO_ENCODER,      SET_AUDIO_ENCODER, -    SET_OUTPUT_FILE_PATH,      SET_OUTPUT_FILE_FD,      SET_VIDEO_SIZE,      SET_VIDEO_FRAMERATE, @@ -76,6 +76,16 @@ public:          return reply.readInt32();      } +    status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface) +    { +        ALOGV("setInputSurface(%p)", surface.get()); +        Parcel data, reply; +        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); +        data.writeStrongBinder(IInterface::asBinder(surface)); +        remote()->transact(SET_INPUT_SURFACE, data, &reply); +        return reply.readInt32(); +    } +      sp<IGraphicBufferProducer> querySurfaceMediaSource()      {          ALOGV("Query SurfaceMediaSource"); @@ -158,16 +168,6 @@ public:          return reply.readInt32();      } -    status_t setOutputFile(const char* path) -    { -        ALOGV("setOutputFile(%s)", path); -        Parcel data, reply; -        data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); -        data.writeCString(path); -        remote()->transact(SET_OUTPUT_FILE_PATH, data, &reply); -        return reply.readInt32(); -    } -      status_t setOutputFile(int fd, int64_t offset, int64_t length) {          ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);          Parcel data, reply; @@ -300,7 +300,8 @@ IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");  // ----------------------------------------------------------------------  status_t BnMediaRecorder::onTransact( -                                     uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +                                     uint32_t code, const Parcel& data, Parcel* reply, +                                     uint32_t flags)  {      switch (code) {          case RELEASE: { @@ -390,13 +391,6 @@ status_t BnMediaRecorder::onTransact(              return NO_ERROR;          } break; -        case SET_OUTPUT_FILE_PATH: { -            ALOGV("SET_OUTPUT_FILE_PATH"); -            CHECK_INTERFACE(IMediaRecorder, data, reply); -            const char* path = data.readCString(); -            reply->writeInt32(setOutputFile(path)); -            return NO_ERROR; -        } break;          case SET_OUTPUT_FILE_FD: {              ALOGV("SET_OUTPUT_FILE_FD");              CHECK_INTERFACE(IMediaRecorder, data, reply); @@ -445,7 +439,8 @@ status_t BnMediaRecorder::onTransact(          case SET_PREVIEW_SURFACE: {              ALOGV("SET_PREVIEW_SURFACE");              CHECK_INTERFACE(IMediaRecorder, data, reply); -            sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>(data.readStrongBinder()); +            sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>( +                    data.readStrongBinder());              reply->writeInt32(setPreviewSurface(surface));              return NO_ERROR;          } break; @@ -458,6 +453,14 @@ status_t BnMediaRecorder::onTransact(              reply->writeInt32(setCamera(camera, proxy));              return NO_ERROR;          } break; +        case SET_INPUT_SURFACE: { +            ALOGV("SET_INPUT_SURFACE"); +            CHECK_INTERFACE(IMediaRecorder, data, reply); +            sp<IGraphicBufferConsumer> surface = interface_cast<IGraphicBufferConsumer>( +                    data.readStrongBinder()); +            reply->writeInt32(setInputSurface(surface)); +            return NO_ERROR; +        } break;          case QUERY_SURFACE_MEDIASOURCE: {              ALOGV("QUERY_SURFACE_MEDIASOURCE");              CHECK_INTERFACE(IMediaRecorder, data, reply); @@ -479,4 +482,4 @@ status_t BnMediaRecorder::onTransact(  // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaRecorderClient.cpp b/media/libmedia/IMediaRecorderClient.cpp index e7907e3..6795d23 100644 --- a/media/libmedia/IMediaRecorderClient.cpp +++ b/media/libmedia/IMediaRecorderClient.cpp @@ -67,4 +67,4 @@ status_t BnMediaRecorderClient::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index e208df9..16da65e 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -41,6 +41,8 @@ enum {      USE_BUFFER,      USE_GRAPHIC_BUFFER,      CREATE_INPUT_SURFACE, +    CREATE_PERSISTENT_INPUT_SURFACE, +    SET_INPUT_SURFACE,      SIGNAL_END_OF_INPUT_STREAM,      STORE_META_DATA_IN_BUFFERS,      PREPARE_FOR_ADAPTIVE_PLAYBACK, @@ -243,12 +245,13 @@ public:      virtual status_t useBuffer(              node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -            buffer_id *buffer) { +            buffer_id *buffer, OMX_U32 allottedSize) {          Parcel data, reply;          data.writeInterfaceToken(IOMX::getInterfaceDescriptor());          data.writeInt32((int32_t)node);          data.writeInt32(port_index);          data.writeStrongBinder(IInterface::asBinder(params)); +        data.writeInt32(allottedSize);          remote()->transact(USE_BUFFER, data, &reply);          status_t err = reply.readInt32(); @@ -303,7 +306,7 @@ public:      virtual status_t createInputSurface(              node_id node, OMX_U32 port_index, -            sp<IGraphicBufferProducer> *bufferProducer) { +            sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {          Parcel data, reply;          status_t err;          data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); @@ -315,6 +318,12 @@ public:              return err;          } +        // read type even if createInputSurface failed +        int negotiatedType = reply.readInt32(); +        if (type != NULL) { +            *type = (MetadataBufferType)negotiatedType; +        } +          err = reply.readInt32();          if (err != OK) {              return err; @@ -326,6 +335,57 @@ public:          return err;      } +    virtual status_t createPersistentInputSurface( +            sp<IGraphicBufferProducer> *bufferProducer, +            sp<IGraphicBufferConsumer> *bufferConsumer) { +        Parcel data, reply; +        status_t err; +        data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); +        err = remote()->transact(CREATE_PERSISTENT_INPUT_SURFACE, data, &reply); +        if (err != OK) { +            ALOGW("binder transaction failed: %d", err); +            return err; +        } + +        err = reply.readInt32(); +        if (err != OK) { +            return err; +        } + +        *bufferProducer = IGraphicBufferProducer::asInterface( +                reply.readStrongBinder()); +        *bufferConsumer = IGraphicBufferConsumer::asInterface( +                reply.readStrongBinder()); + +        return err; +    } + +    virtual status_t setInputSurface( +            node_id node, OMX_U32 port_index, +            const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) { +        Parcel data, reply; +        data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); +        status_t err; +        data.writeInt32((int32_t)node); +        data.writeInt32(port_index); +        data.writeStrongBinder(IInterface::asBinder(bufferConsumer)); + +        err = remote()->transact(SET_INPUT_SURFACE, data, &reply); + +        if (err != OK) { +            ALOGW("binder transaction failed: %d", err); +            return err; +        } + +        // read type even if setInputSurface failed +        int negotiatedType = reply.readInt32(); +        if (type != NULL) { +            *type = (MetadataBufferType)negotiatedType; +        } + +        return reply.readInt32(); +    } +      virtual status_t signalEndOfInputStream(node_id node) {          Parcel data, reply;          status_t err; @@ -341,7 +401,7 @@ public:      }      virtual status_t storeMetaDataInBuffers( -            node_id node, OMX_U32 port_index, OMX_BOOL enable) { +            node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) {          Parcel data, reply;          data.writeInterfaceToken(IOMX::getInterfaceDescriptor());          data.writeInt32((int32_t)node); @@ -349,8 +409,13 @@ public:          data.writeInt32((uint32_t)enable);          remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply); -        status_t err = reply.readInt32(); -        return err; +        // read type even storeMetaDataInBuffers failed +        int negotiatedType = reply.readInt32(); +        if (type != NULL) { +            *type = (MetadataBufferType)negotiatedType; +        } + +        return reply.readInt32();      }      virtual status_t prepareForAdaptivePlayback( @@ -413,12 +478,13 @@ public:      virtual status_t allocateBufferWithBackup(              node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -            buffer_id *buffer) { +            buffer_id *buffer, OMX_U32 allottedSize) {          Parcel data, reply;          data.writeInterfaceToken(IOMX::getInterfaceDescriptor());          data.writeInt32((int32_t)node);          data.writeInt32(port_index);          data.writeStrongBinder(IInterface::asBinder(params)); +        data.writeInt32(allottedSize);          remote()->transact(ALLOC_BUFFER_WITH_BACKUP, data, &reply);          status_t err = reply.readInt32(); @@ -445,11 +511,15 @@ public:          return reply.readInt32();      } -    virtual status_t fillBuffer(node_id node, buffer_id buffer) { +    virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd) {          Parcel data, reply;          data.writeInterfaceToken(IOMX::getInterfaceDescriptor());          data.writeInt32((int32_t)node);          data.writeInt32((int32_t)buffer); +        data.writeInt32(fenceFd >= 0); +        if (fenceFd >= 0) { +            data.writeFileDescriptor(fenceFd, true /* takeOwnership */); +        }          remote()->transact(FILL_BUFFER, data, &reply);          return reply.readInt32(); @@ -459,7 +529,7 @@ public:              node_id node,              buffer_id buffer,              OMX_U32 range_offset, OMX_U32 range_length, -            OMX_U32 flags, OMX_TICKS timestamp) { +            OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {          Parcel data, reply;          data.writeInterfaceToken(IOMX::getInterfaceDescriptor());          data.writeInt32((int32_t)node); @@ -468,6 +538,10 @@ public:          data.writeInt32(range_length);          data.writeInt32(flags);          data.writeInt64(timestamp); +        data.writeInt32(fenceFd >= 0); +        if (fenceFd >= 0) { +            data.writeFileDescriptor(fenceFd, true /* takeOwnership */); +        }          remote()->transact(EMPTY_BUFFER, data, &reply);          return reply.readInt32(); @@ -711,9 +785,10 @@ status_t BnOMX::onTransact(              OMX_U32 port_index = data.readInt32();              sp<IMemory> params =                  interface_cast<IMemory>(data.readStrongBinder()); +            OMX_U32 allottedSize = data.readInt32();              buffer_id buffer; -            status_t err = useBuffer(node, port_index, params, &buffer); +            status_t err = useBuffer(node, port_index, params, &buffer, allottedSize);              reply->writeInt32(err);              if (err == OK) { @@ -769,18 +844,56 @@ status_t BnOMX::onTransact(              OMX_U32 port_index = data.readInt32();              sp<IGraphicBufferProducer> bufferProducer; -            status_t err = createInputSurface(node, port_index, -                    &bufferProducer); +            MetadataBufferType type; +            status_t err = createInputSurface(node, port_index, &bufferProducer, &type); + +            reply->writeInt32(type); +            reply->writeInt32(err); + +            if (err == OK) { +                reply->writeStrongBinder(IInterface::asBinder(bufferProducer)); +            } + +            return NO_ERROR; +        } + +        case CREATE_PERSISTENT_INPUT_SURFACE: +        { +            CHECK_OMX_INTERFACE(IOMX, data, reply); + +            sp<IGraphicBufferProducer> bufferProducer; +            sp<IGraphicBufferConsumer> bufferConsumer; +            status_t err = createPersistentInputSurface( +                    &bufferProducer, &bufferConsumer);              reply->writeInt32(err);              if (err == OK) {                  reply->writeStrongBinder(IInterface::asBinder(bufferProducer)); +                reply->writeStrongBinder(IInterface::asBinder(bufferConsumer));              }              return NO_ERROR;          } +        case SET_INPUT_SURFACE: +        { +            CHECK_OMX_INTERFACE(IOMX, data, reply); + +            node_id node = (node_id)data.readInt32(); +            OMX_U32 port_index = data.readInt32(); + +            sp<IGraphicBufferConsumer> bufferConsumer = +                    interface_cast<IGraphicBufferConsumer>(data.readStrongBinder()); + +            MetadataBufferType type; +            status_t err = setInputSurface(node, port_index, bufferConsumer, &type); + +            reply->writeInt32(type); +            reply->writeInt32(err); +            return NO_ERROR; +        } +          case SIGNAL_END_OF_INPUT_STREAM:          {              CHECK_OMX_INTERFACE(IOMX, data, reply); @@ -801,7 +914,9 @@ status_t BnOMX::onTransact(              OMX_U32 port_index = data.readInt32();              OMX_BOOL enable = (OMX_BOOL)data.readInt32(); -            status_t err = storeMetaDataInBuffers(node, port_index, enable); +            MetadataBufferType type; +            status_t err = storeMetaDataInBuffers(node, port_index, enable, &type); +            reply->writeInt32(type);              reply->writeInt32(err);              return NO_ERROR; @@ -872,10 +987,11 @@ status_t BnOMX::onTransact(              OMX_U32 port_index = data.readInt32();              sp<IMemory> params =                  interface_cast<IMemory>(data.readStrongBinder()); +            OMX_U32 allottedSize = data.readInt32();              buffer_id buffer;              status_t err = allocateBufferWithBackup( -                    node, port_index, params, &buffer); +                    node, port_index, params, &buffer, allottedSize);              reply->writeInt32(err); @@ -904,7 +1020,9 @@ status_t BnOMX::onTransact(              node_id node = (node_id)data.readInt32();              buffer_id buffer = (buffer_id)data.readInt32(); -            reply->writeInt32(fillBuffer(node, buffer)); +            bool haveFence = data.readInt32(); +            int fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1; +            reply->writeInt32(fillBuffer(node, buffer, fenceFd));              return NO_ERROR;          } @@ -919,11 +1037,10 @@ status_t BnOMX::onTransact(              OMX_U32 range_length = data.readInt32();              OMX_U32 flags = data.readInt32();              OMX_TICKS timestamp = data.readInt64(); - -            reply->writeInt32( -                    emptyBuffer( -                        node, buffer, range_offset, range_length, -                        flags, timestamp)); +            bool haveFence = data.readInt32(); +            int fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1; +            reply->writeInt32(emptyBuffer( +                    node, buffer, range_offset, range_length, flags, timestamp, fenceFd));              return NO_ERROR;          } @@ -960,14 +1077,29 @@ public:          : BpInterface<IOMXObserver>(impl) {      } -    virtual void onMessage(const omx_message &msg) { +    virtual void onMessages(const std::list<omx_message> &messages) {          Parcel data, reply; -        data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor()); -        data.write(&msg, sizeof(msg)); - -        ALOGV("onMessage writing message %d, size %zu", msg.type, sizeof(msg)); - -        remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY); +        std::list<omx_message>::const_iterator it = messages.cbegin(); +        bool first = true; +        while (it != messages.cend()) { +            const omx_message &msg = *it++; +            if (first) { +                data.writeInterfaceToken(IOMXObserver::getInterfaceDescriptor()); +                data.writeInt32(msg.node); +                first = false; +            } +            data.writeInt32(msg.fenceFd >= 0); +            if (msg.fenceFd >= 0) { +                data.writeFileDescriptor(msg.fenceFd, true /* takeOwnership */); +            } +            data.writeInt32(msg.type); +            data.write(&msg.u, sizeof(msg.u)); +            ALOGV("onMessage writing message %d, size %zu", msg.type, sizeof(msg)); +        } +        if (!first) { +            data.writeInt32(-1); // mark end +            remote()->transact(OBSERVER_ON_MSG, data, &reply, IBinder::FLAG_ONEWAY); +        }      }  }; @@ -979,16 +1111,28 @@ status_t BnOMXObserver::onTransact(          case OBSERVER_ON_MSG:          {              CHECK_OMX_INTERFACE(IOMXObserver, data, reply); +            IOMX::node_id node = data.readInt32(); +            std::list<omx_message> messages; +            status_t err = FAILED_TRANSACTION; // must receive at least one message +            do { +                int haveFence = data.readInt32(); +                if (haveFence < 0) { // we use -1 to mark end of messages +                    break; +                } +                omx_message msg; +                msg.node = node; +                msg.fenceFd = haveFence ? ::dup(data.readFileDescriptor()) : -1; +                msg.type = (typeof(msg.type))data.readInt32(); +                err = data.read(&msg.u, sizeof(msg.u)); +                ALOGV("onTransact reading message %d, size %zu", msg.type, sizeof(msg)); +                messages.push_back(msg); +            } while (err == OK); -            omx_message msg; -            data.read(&msg, sizeof(msg)); - -            ALOGV("onTransact reading message %d, size %zu", msg.type, sizeof(msg)); - -            // XXX Could use readInplace maybe? -            onMessage(msg); +            if (err == OK) { +                onMessages(messages); +            } -            return NO_ERROR; +            return err;          }          default: diff --git a/media/libmedia/IRemoteDisplay.cpp b/media/libmedia/IRemoteDisplay.cpp index 1e15434..869d11a 100644 --- a/media/libmedia/IRemoteDisplay.cpp +++ b/media/libmedia/IRemoteDisplay.cpp @@ -91,4 +91,4 @@ status_t BnRemoteDisplay::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp index 9d63bc9..bedeb6c 100644 --- a/media/libmedia/IRemoteDisplayClient.cpp +++ b/media/libmedia/IRemoteDisplayClient.cpp @@ -101,4 +101,4 @@ status_t BnRemoteDisplayClient::onTransact(      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IResourceManagerClient.cpp b/media/libmedia/IResourceManagerClient.cpp new file mode 100644 index 0000000..b3f56e8 --- /dev/null +++ b/media/libmedia/IResourceManagerClient.cpp @@ -0,0 +1,90 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +**     http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> + +#include <media/IResourceManagerClient.h> + +namespace android { + +enum { +    RECLAIM_RESOURCE = IBinder::FIRST_CALL_TRANSACTION, +    GET_NAME, +}; + +class BpResourceManagerClient: public BpInterface<IResourceManagerClient> +{ +public: +    BpResourceManagerClient(const sp<IBinder> &impl) +        : BpInterface<IResourceManagerClient>(impl) +    { +    } + +    virtual bool reclaimResource() { +        Parcel data, reply; +        data.writeInterfaceToken(IResourceManagerClient::getInterfaceDescriptor()); + +        bool ret = false; +        status_t status = remote()->transact(RECLAIM_RESOURCE, data, &reply); +        if (status == NO_ERROR) { +            ret = (bool)reply.readInt32(); +        } +        return ret; +    } + +    virtual String8 getName() { +        Parcel data, reply; +        data.writeInterfaceToken(IResourceManagerClient::getInterfaceDescriptor()); + +        String8 ret; +        status_t status = remote()->transact(GET_NAME, data, &reply); +        if (status == NO_ERROR) { +            ret = reply.readString8(); +        } +        return ret; +    } + +}; + +IMPLEMENT_META_INTERFACE(ResourceManagerClient, "android.media.IResourceManagerClient"); + +// ---------------------------------------------------------------------- + +status_t BnResourceManagerClient::onTransact( +    uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) +{ +    switch (code) { +        case RECLAIM_RESOURCE: { +            CHECK_INTERFACE(IResourceManagerClient, data, reply); +            bool ret = reclaimResource(); +            reply->writeInt32(ret); +            return NO_ERROR; +        } break; +        case GET_NAME: { +            CHECK_INTERFACE(IResourceManagerClient, data, reply); +            String8 ret = getName(); +            reply->writeString8(ret); +            return NO_ERROR; +        } break; +        default: +            return BBinder::onTransact(code, data, reply, flags); +    } +} + +}; // namespace android diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp new file mode 100644 index 0000000..4598686 --- /dev/null +++ b/media/libmedia/IResourceManagerService.cpp @@ -0,0 +1,166 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +**     http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IResourceManagerService" +#include <utils/Log.h> + +#include "media/IResourceManagerService.h" + +#include <binder/Parcel.h> + +#include <stdint.h> +#include <sys/types.h> + +namespace android { + +enum { +    CONFIG = IBinder::FIRST_CALL_TRANSACTION, +    ADD_RESOURCE, +    REMOVE_RESOURCE, +    RECLAIM_RESOURCE, +}; + +template <typename T> +static void writeToParcel(Parcel *data, const Vector<T> &items) { +    size_t size = items.size(); +    // truncates size, but should be okay for this usecase +    data->writeUint32(static_cast<uint32_t>(size)); +    for (size_t i = 0; i < size; i++) { +        items[i].writeToParcel(data); +    } +} + +template <typename T> +static void readFromParcel(const Parcel &data, Vector<T> *items) { +    size_t size = (size_t)data.readUint32(); +    for (size_t i = 0; i < size && data.dataAvail() > 0; i++) { +        T item; +        item.readFromParcel(data); +        items->add(item); +    } +} + +class BpResourceManagerService : public BpInterface<IResourceManagerService> +{ +public: +    BpResourceManagerService(const sp<IBinder> &impl) +        : BpInterface<IResourceManagerService>(impl) +    { +    } + +    virtual void config(const Vector<MediaResourcePolicy> &policies) { +        Parcel data, reply; +        data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); +        writeToParcel(&data, policies); +        remote()->transact(CONFIG, data, &reply); +    } + +    virtual void addResource( +            int pid, +            int64_t clientId, +            const sp<IResourceManagerClient> client, +            const Vector<MediaResource> &resources) { +        Parcel data, reply; +        data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); +        data.writeInt32(pid); +        data.writeInt64(clientId); +        data.writeStrongBinder(IInterface::asBinder(client)); +        writeToParcel(&data, resources); + +        remote()->transact(ADD_RESOURCE, data, &reply); +    } + +    virtual void removeResource(int pid, int64_t clientId) { +        Parcel data, reply; +        data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); +        data.writeInt32(pid); +        data.writeInt64(clientId); + +        remote()->transact(REMOVE_RESOURCE, data, &reply); +    } + +    virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources) { +        Parcel data, reply; +        data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); +        data.writeInt32(callingPid); +        writeToParcel(&data, resources); + +        bool ret = false; +        status_t status = remote()->transact(RECLAIM_RESOURCE, data, &reply); +        if (status == NO_ERROR) { +            ret = (bool)reply.readInt32(); +        } +        return ret; +    } +}; + +IMPLEMENT_META_INTERFACE(ResourceManagerService, "android.media.IResourceManagerService"); + +// ---------------------------------------------------------------------- + + +status_t BnResourceManagerService::onTransact( +    uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) +{ +    switch (code) { +        case CONFIG: { +            CHECK_INTERFACE(IResourceManagerService, data, reply); +            Vector<MediaResourcePolicy> policies; +            readFromParcel(data, &policies); +            config(policies); +            return NO_ERROR; +        } break; + +        case ADD_RESOURCE: { +            CHECK_INTERFACE(IResourceManagerService, data, reply); +            int pid = data.readInt32(); +            int64_t clientId = data.readInt64(); +            sp<IResourceManagerClient> client( +                    interface_cast<IResourceManagerClient>(data.readStrongBinder())); +            Vector<MediaResource> resources; +            readFromParcel(data, &resources); +            addResource(pid, clientId, client, resources); +            return NO_ERROR; +        } break; + +        case REMOVE_RESOURCE: { +            CHECK_INTERFACE(IResourceManagerService, data, reply); +            int pid = data.readInt32(); +            int64_t clientId = data.readInt64(); +            removeResource(pid, clientId); +            return NO_ERROR; +        } break; + +        case RECLAIM_RESOURCE: { +            CHECK_INTERFACE(IResourceManagerService, data, reply); +            int callingPid = data.readInt32(); +            Vector<MediaResource> resources; +            readFromParcel(data, &resources); +            bool ret = reclaimResource(callingPid, resources); +            reply->writeInt32(ret); +            return NO_ERROR; +        } break; + +        default: +            return BBinder::onTransact(code, data, reply, flags); +    } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp index d480aef..840e453 100644 --- a/media/libmedia/IStreamSource.cpp +++ b/media/libmedia/IStreamSource.cpp @@ -35,6 +35,9 @@ const char *const IStreamListener::kKeyDiscontinuityMask = "discontinuity-mask";  // static  const char *const IStreamListener::kKeyMediaTimeUs = "media-time-us"; +// static +const char *const IStreamListener::kKeyRecentMediaTimeUs = "recent-media-time-us"; +  enum {      // IStreamSource      SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION, diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp index 721d8d7..34deb59 100644 --- a/media/libmedia/JetPlayer.cpp +++ b/media/libmedia/JetPlayer.cpp @@ -85,12 +85,18 @@ int JetPlayer::init()      // create the output AudioTrack      mAudioTrack = new AudioTrack(); -    mAudioTrack->set(AUDIO_STREAM_MUSIC,  //TODO parameterize this +    status_t status = mAudioTrack->set(AUDIO_STREAM_MUSIC,  //TODO parameterize this              pLibConfig->sampleRate,              AUDIO_FORMAT_PCM_16_BIT,              audio_channel_out_mask_from_count(pLibConfig->numChannels),              (size_t) mTrackBufferSize,              AUDIO_OUTPUT_FLAG_NONE); +    if (status != OK) { +        ALOGE("JetPlayer::init(): Error initializing JET library; AudioTrack error %d", status); +        mAudioTrack.clear(); +        mState = EAS_STATE_ERROR; +        return EAS_FAILURE; +    }      // create render and playback thread      { @@ -408,7 +414,8 @@ int JetPlayer::queueSegment(int segmentNum, int libNum, int repeatCount, int tra      ALOGV("JetPlayer::queueSegment segmentNum=%d, libNum=%d, repeatCount=%d, transpose=%d",          segmentNum, libNum, repeatCount, transpose);      Mutex::Autolock lock(mMutex); -    return JET_QueueSegment(mEasData, segmentNum, libNum, repeatCount, transpose, muteFlags, userID); +    return JET_QueueSegment(mEasData, segmentNum, libNum, repeatCount, transpose, muteFlags, +            userID);  }  //------------------------------------------------------------------------------------------------- @@ -449,7 +456,8 @@ void JetPlayer::dump()  void JetPlayer::dumpJetStatus(S_JET_STATUS* pJetStatus)  {      if (pJetStatus!=NULL) -        ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d paused=%d", +        ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d " +                "paused=%d",                  pJetStatus->currentUserID, pJetStatus->segmentRepeatCount,                  pJetStatus->numQueuedSegments, pJetStatus->paused);      else diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp index 7b4c4e2..8d3fa7b 100644 --- a/media/libmedia/MediaCodecInfo.cpp +++ b/media/libmedia/MediaCodecInfo.cpp @@ -206,6 +206,17 @@ status_t MediaCodecInfo::addMime(const char *mime) {      return OK;  } +status_t MediaCodecInfo::updateMime(const char *mime) { +    ssize_t ix = getCapabilityIndex(mime); +    if (ix < 0) { +        ALOGE("updateMime mime not found %s", mime); +        return -EINVAL; +    } + +    mCurrentCaps = mCaps.valueAt(ix); +    return OK; +} +  void MediaCodecInfo::removeMime(const char *mime) {      ssize_t ix = getCapabilityIndex(mime);      if (ix >= 0) { diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp index e2e6042..c5790fb 100644 --- a/media/libmedia/MediaProfiles.cpp +++ b/media/libmedia/MediaProfiles.cpp @@ -152,18 +152,9 @@ MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap UNUS      ALOGV("codec = %d", cap.mCodec);  } -/*static*/ void -MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED) -{ -    ALOGV("videoeditor cap:"); -    ALOGV("mMaxInputFrameWidth = %d", cap.mMaxInputFrameWidth); -    ALOGV("mMaxInputFrameHeight = %d", cap.mMaxInputFrameHeight); -    ALOGV("mMaxOutputFrameWidth = %d", cap.mMaxOutputFrameWidth); -    ALOGV("mMaxOutputFrameHeight = %d", cap.mMaxOutputFrameHeight); -} -  /*static*/ int -MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings, const char *name) +MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings, +        const char *name)  {      int tag = -1;      for (size_t i = 0; i < nMappings; ++i) { @@ -295,9 +286,8 @@ MediaProfiles::createAudioEncoderCap(const char **atts)      CHECK(codec != -1);      MediaProfiles::AudioEncoderCap *cap = -        new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]), atoi(atts[7]), -            atoi(atts[9]), atoi(atts[11]), atoi(atts[13]), -            atoi(atts[15])); +        new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]), +            atoi(atts[7]), atoi(atts[9]), atoi(atts[11]), atoi(atts[13]), atoi(atts[15]));      logAudioEncoderCap(*cap);      return cap;  } @@ -330,7 +320,8 @@ MediaProfiles::createCamcorderProfile(int cameraId, const char **atts, Vector<in            !strcmp("fileFormat", atts[2]) &&            !strcmp("duration",   atts[4])); -    const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/sizeof(sCamcorderQualityNameMap[0]); +    const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/ +            sizeof(sCamcorderQualityNameMap[0]);      const int quality = findTagForName(sCamcorderQualityNameMap, nProfileMappings, atts[1]);      CHECK(quality != -1); @@ -397,42 +388,6 @@ void MediaProfiles::addStartTimeOffset(int cameraId, const char** atts)      ALOGV("%s: cameraId=%d, offset=%d ms", __func__, cameraId, offsetTimeMs);      mStartTimeOffsets.replaceValueFor(cameraId, offsetTimeMs);  } -/*static*/ MediaProfiles::ExportVideoProfile* -MediaProfiles::createExportVideoProfile(const char **atts) -{ -    CHECK(!strcmp("name", atts[0]) && -          !strcmp("profile", atts[2]) && -          !strcmp("level", atts[4])); - -    const size_t nMappings = -        sizeof(sVideoEncoderNameMap)/sizeof(sVideoEncoderNameMap[0]); -    const int codec = findTagForName(sVideoEncoderNameMap, nMappings, atts[1]); -    CHECK(codec != -1); - -    MediaProfiles::ExportVideoProfile *profile = -        new MediaProfiles::ExportVideoProfile( -            codec, atoi(atts[3]), atoi(atts[5])); - -    return profile; -} -/*static*/ MediaProfiles::VideoEditorCap* -MediaProfiles::createVideoEditorCap(const char **atts, MediaProfiles *profiles) -{ -    CHECK(!strcmp("maxInputFrameWidth", atts[0]) && -          !strcmp("maxInputFrameHeight", atts[2])  && -          !strcmp("maxOutputFrameWidth", atts[4]) && -          !strcmp("maxOutputFrameHeight", atts[6]) && -          !strcmp("maxPrefetchYUVFrames", atts[8])); - -    MediaProfiles::VideoEditorCap *pVideoEditorCap = -        new MediaProfiles::VideoEditorCap(atoi(atts[1]), atoi(atts[3]), -                atoi(atts[5]), atoi(atts[7]), atoi(atts[9])); - -    logVideoEditorCap(*pVideoEditorCap); -    profiles->mVideoEditorCap = pVideoEditorCap; - -    return pVideoEditorCap; -}  /*static*/ void  MediaProfiles::startElementHandler(void *userData, const char *name, const char **atts) @@ -464,10 +419,6 @@ MediaProfiles::startElementHandler(void *userData, const char *name, const char              createCamcorderProfile(profiles->mCurrentCameraId, atts, profiles->mCameraIds));      } else if (strcmp("ImageEncoding", name) == 0) {          profiles->addImageEncodingQualityLevel(profiles->mCurrentCameraId, atts); -    } else if (strcmp("VideoEditorCap", name) == 0) { -        createVideoEditorCap(atts, profiles); -    } else if (strcmp("ExportVideoProfile", name) == 0) { -        profiles->mVideoEditorExportProfiles.add(createExportVideoProfile(atts));      }  } @@ -531,7 +482,6 @@ void MediaProfiles::checkAndAddRequiredProfilesIfNecessary() {          CHECK(refIndex != -1);          RequiredProfileRefInfo *info;          camcorder_quality refQuality; -        VideoCodec *codec = NULL;          // Check high and low from either camcorder profile, timelapse profile          // or high speed profile, but not all of them. Default, check camcorder profile @@ -722,16 +672,20 @@ MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality qual  MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(          MediaProfiles::CamcorderProfile **lowTimeLapseProfile,          MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) { -    *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW); -    *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF); +    *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile( +            CAMCORDER_QUALITY_TIME_LAPSE_LOW); +    *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile( +            CAMCORDER_QUALITY_TIME_LAPSE_QCIF);  }  /*static*/ void  MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(          MediaProfiles::CamcorderProfile **highTimeLapseProfile,          MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) { -    *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH); -    *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P); +    *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile( +            CAMCORDER_QUALITY_TIME_LAPSE_HIGH); +    *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile( +            CAMCORDER_QUALITY_TIME_LAPSE_480P);  }  /*static*/ MediaProfiles::CamcorderProfile* @@ -809,7 +763,8 @@ MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)      // high camcorder time lapse profiles.      MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile; -    createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile); +    createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, +            &highSpecificTimeLapseProfile);      profiles->mCamcorderProfiles.add(highTimeLapseProfile);      profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile); @@ -868,32 +823,6 @@ MediaProfiles::createDefaultImageEncodingQualityLevels(MediaProfiles *profiles)      profiles->mImageEncodingQualityLevels.add(levels);  } -/*static*/ void -MediaProfiles::createDefaultVideoEditorCap(MediaProfiles *profiles) -{ -    profiles->mVideoEditorCap = -        new MediaProfiles::VideoEditorCap( -                VIDEOEDITOR_DEFAULT_MAX_INPUT_FRAME_WIDTH, -                VIDEOEDITOR_DEFUALT_MAX_INPUT_FRAME_HEIGHT, -                VIDEOEDITOR_DEFAULT_MAX_OUTPUT_FRAME_WIDTH, -                VIDEOEDITOR_DEFUALT_MAX_OUTPUT_FRAME_HEIGHT, -                VIDEOEDITOR_DEFAULT_MAX_PREFETCH_YUV_FRAMES); -} -/*static*/ void -MediaProfiles::createDefaultExportVideoProfiles(MediaProfiles *profiles) -{ -    // Create default video export profiles -    profiles->mVideoEditorExportProfiles.add( -        new ExportVideoProfile(VIDEO_ENCODER_H263, -            OMX_VIDEO_H263ProfileBaseline, OMX_VIDEO_H263Level10)); -    profiles->mVideoEditorExportProfiles.add( -        new ExportVideoProfile(VIDEO_ENCODER_MPEG_4_SP, -            OMX_VIDEO_MPEG4ProfileSimple, OMX_VIDEO_MPEG4Level1)); -    profiles->mVideoEditorExportProfiles.add( -        new ExportVideoProfile(VIDEO_ENCODER_H264, -            OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13)); -} -  /*static*/ MediaProfiles*  MediaProfiles::createDefaultInstance()  { @@ -905,8 +834,6 @@ MediaProfiles::createDefaultInstance()      createDefaultAudioDecoders(profiles);      createDefaultEncoderOutputFileFormats(profiles);      createDefaultImageEncodingQualityLevels(profiles); -    createDefaultVideoEditorCap(profiles); -    createDefaultExportVideoProfiles(profiles);      return profiles;  } @@ -1004,54 +931,6 @@ int MediaProfiles::getVideoEncoderParamByName(const char *name, video_encoder co      ALOGE("The given video encoder param name %s is not found", name);      return -1;  } -int MediaProfiles::getVideoEditorExportParamByName( -    const char *name, int codec) const -{ -    ALOGV("getVideoEditorExportParamByName: name %s codec %d", name, codec); -    ExportVideoProfile *exportProfile = NULL; -    int index = -1; -    for (size_t i =0; i < mVideoEditorExportProfiles.size(); i++) { -        exportProfile = mVideoEditorExportProfiles[i]; -        if (exportProfile->mCodec == codec) { -            index = i; -            break; -        } -    } -    if (index == -1) { -        ALOGE("The given video decoder %d is not found", codec); -        return -1; -    } -    if (!strcmp("videoeditor.export.profile", name)) -        return exportProfile->mProfile; -    if (!strcmp("videoeditor.export.level", name)) -        return exportProfile->mLevel; - -    ALOGE("The given video editor export param name %s is not found", name); -    return -1; -} -int MediaProfiles::getVideoEditorCapParamByName(const char *name) const -{ -    ALOGV("getVideoEditorCapParamByName: %s", name); - -    if (mVideoEditorCap == NULL) { -        ALOGE("The mVideoEditorCap is not created, then create default cap."); -        createDefaultVideoEditorCap(sInstance); -    } - -    if (!strcmp("videoeditor.input.width.max", name)) -        return mVideoEditorCap->mMaxInputFrameWidth; -    if (!strcmp("videoeditor.input.height.max", name)) -        return mVideoEditorCap->mMaxInputFrameHeight; -    if (!strcmp("videoeditor.output.width.max", name)) -        return mVideoEditorCap->mMaxOutputFrameWidth; -    if (!strcmp("videoeditor.output.height.max", name)) -        return mVideoEditorCap->mMaxOutputFrameHeight; -    if (!strcmp("maxPrefetchYUVFrames", name)) -        return mVideoEditorCap->mMaxPrefetchYUVFrames; - -    ALOGE("The given video editor param name %s is not found", name); -    return -1; -}  Vector<audio_encoder> MediaProfiles::getAudioEncoders() const  { diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp new file mode 100644 index 0000000..40ec0cb --- /dev/null +++ b/media/libmedia/MediaResource.cpp @@ -0,0 +1,67 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaResource" +#include <utils/Log.h> +#include <media/MediaResource.h> + +namespace android { + +const char kResourceSecureCodec[] = "secure-codec"; +const char kResourceNonSecureCodec[] = "non-secure-codec"; +const char kResourceAudioCodec[] = "audio-codec"; +const char kResourceVideoCodec[] = "video-codec"; +const char kResourceGraphicMemory[] = "graphic-memory"; + +MediaResource::MediaResource() : mValue(0) {} + +MediaResource::MediaResource(String8 type, uint64_t value) +        : mType(type), +          mValue(value) {} + +MediaResource::MediaResource(String8 type, String8 subType, uint64_t value) +        : mType(type), +          mSubType(subType), +          mValue(value) {} + +void MediaResource::readFromParcel(const Parcel &parcel) { +    mType = parcel.readString8(); +    mSubType = parcel.readString8(); +    mValue = parcel.readUint64(); +} + +void MediaResource::writeToParcel(Parcel *parcel) const { +    parcel->writeString8(mType); +    parcel->writeString8(mSubType); +    parcel->writeUint64(mValue); +} + +String8 MediaResource::toString() const { +    String8 str; +    str.appendFormat("%s/%s:%llu", mType.string(), mSubType.string(), (unsigned long long)mValue); +    return str; +} + +bool MediaResource::operator==(const MediaResource &other) const { +    return (other.mType == mType) && (other.mSubType == mSubType) && (other.mValue == mValue); +} + +bool MediaResource::operator!=(const MediaResource &other) const { +    return !(*this == other); +} + +}; // namespace android diff --git a/media/libmedia/MediaResourcePolicy.cpp b/media/libmedia/MediaResourcePolicy.cpp new file mode 100644 index 0000000..5210825 --- /dev/null +++ b/media/libmedia/MediaResourcePolicy.cpp @@ -0,0 +1,49 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaResourcePolicy" +#include <utils/Log.h> +#include <media/MediaResourcePolicy.h> + +namespace android { + +const char kPolicySupportsMultipleSecureCodecs[] = "supports-multiple-secure-codecs"; +const char kPolicySupportsSecureWithNonSecureCodec[] = "supports-secure-with-non-secure-codec"; + +MediaResourcePolicy::MediaResourcePolicy() {} + +MediaResourcePolicy::MediaResourcePolicy(String8 type, String8 value) +        : mType(type), +          mValue(value) {} + +void MediaResourcePolicy::readFromParcel(const Parcel &parcel) { +    mType = parcel.readString8(); +    mValue = parcel.readString8(); +} + +void MediaResourcePolicy::writeToParcel(Parcel *parcel) const { +    parcel->writeString8(mType); +    parcel->writeString8(mValue); +} + +String8 MediaResourcePolicy::toString() const { +    String8 str; +    str.appendFormat("%s:%s", mType.string(), mValue.string()); +    return str; +} + +}; // namespace android diff --git a/media/libmedia/MemoryLeakTrackUtil.cpp b/media/libmedia/MemoryLeakTrackUtil.cpp index d31f721..554dbae 100644 --- a/media/libmedia/MemoryLeakTrackUtil.cpp +++ b/media/libmedia/MemoryLeakTrackUtil.cpp @@ -173,7 +173,7 @@ void dumpMemoryAddresses(int fd)  #else  // Does nothing -void dumpMemoryAddresses(int fd) {} +void dumpMemoryAddresses(int fd __unused) {}  #endif  }  // namespace android diff --git a/media/libmedia/SingleStateQueue.cpp b/media/libmedia/SingleStateQueue.cpp deleted file mode 100644 index c241184..0000000 --- a/media/libmedia/SingleStateQueue.cpp +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - *      http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <new> -#include <cutils/atomic.h> -#include <media/SingleStateQueue.h> - -namespace android { - -template<typename T> SingleStateQueue<T>::Mutator::Mutator(Shared *shared) -    : mSequence(0), mShared((Shared *) shared) -{ -    // exactly one of Mutator and Observer must initialize, currently it is Observer -    //shared->init(); -} - -template<typename T> int32_t SingleStateQueue<T>::Mutator::push(const T& value) -{ -    Shared *shared = mShared; -    int32_t sequence = mSequence; -    sequence++; -    android_atomic_acquire_store(sequence, &shared->mSequence); -    shared->mValue = value; -    sequence++; -    android_atomic_release_store(sequence, &shared->mSequence); -    mSequence = sequence; -    // consider signalling a futex here, if we know that observer is waiting -    return sequence; -} - -template<typename T> bool SingleStateQueue<T>::Mutator::ack() -{ -    return mShared->mAck - mSequence == 0; -} - -template<typename T> bool SingleStateQueue<T>::Mutator::ack(int32_t sequence) -{ -    // this relies on 2's complement rollover to detect an ancient sequence number -    return mShared->mAck - sequence >= 0; -} - -template<typename T> SingleStateQueue<T>::Observer::Observer(Shared *shared) -    : mSequence(0), mSeed(1), mShared((Shared *) shared) -{ -    // exactly one of Mutator and Observer must initialize, currently it is Observer -    shared->init(); -} - -template<typename T> bool SingleStateQueue<T>::Observer::poll(T& value) -{ -    Shared *shared = mShared; -    int32_t before = shared->mSequence; -    if (before == mSequence) { -        return false; -    } -    for (int tries = 0; ; ) { -        const int MAX_TRIES = 5; -        if (before & 1) { -            if (++tries >= MAX_TRIES) { -                return false; -            } -            before = shared->mSequence; -        } else { -            android_memory_barrier(); -            T temp = shared->mValue; -            int32_t after = android_atomic_release_load(&shared->mSequence); -            if (after == before) { -                value = temp; -                shared->mAck = before; -                mSequence = before; -                return true; -            } -            if (++tries >= MAX_TRIES) { -                return false; -            } -            before = after; -        } -    } -} - -#if 0 -template<typename T> SingleStateQueue<T>::SingleStateQueue(void /*Shared*/ *shared) -{ -    ((Shared *) shared)->init(); -} -#endif - -}   // namespace android - -// hack for gcc -#ifdef SINGLE_STATE_QUEUE_INSTANTIATIONS -#include SINGLE_STATE_QUEUE_INSTANTIATIONS -#endif diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp index 5f5b57a..b2e5907 100644 --- a/media/libmedia/StringArray.cpp +++ b/media/libmedia/StringArray.cpp @@ -16,7 +16,7 @@  //  // Sortable array of strings.  STL-ish, but STL-free. -//   +//  #include <stdlib.h>  #include <string.h> @@ -110,4 +110,4 @@ void StringArray::setEntry(int idx, const char* str) {  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp index 2cc4685..6da5348 100644 --- a/media/libmedia/ToneGenerator.cpp +++ b/media/libmedia/ToneGenerator.cpp @@ -984,7 +984,6 @@ void ToneGenerator::stopTone() {              if ((mStartTime.tv_sec != 0) && (clock_gettime(CLOCK_MONOTONIC, &stopTime) == 0)) {                  time_t sec = stopTime.tv_sec - mStartTime.tv_sec;                  long nsec = stopTime.tv_nsec - mStartTime.tv_nsec; -                long durationMs;                  if (nsec < 0) {                      --sec;                      nsec += 1000000000; diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp index f91e3e4..f5c1b1f 100644 --- a/media/libmedia/Visualizer.cpp +++ b/media/libmedia/Visualizer.cpp @@ -34,11 +34,12 @@ namespace android {  // --------------------------------------------------------------------------- -Visualizer::Visualizer (int32_t priority, +Visualizer::Visualizer (const String16& opPackageName, +         int32_t priority,           effect_callback_t cbf,           void* user,           int sessionId) -    :   AudioEffect(SL_IID_VISUALIZATION, NULL, priority, cbf, user, sessionId), +    :   AudioEffect(SL_IID_VISUALIZATION, opPackageName, NULL, priority, cbf, user, sessionId),          mCaptureRate(CAPTURE_RATE_DEF),          mCaptureSize(CAPTURE_SIZE_DEF),          mSampleRate(44100000), @@ -53,12 +54,8 @@ Visualizer::Visualizer (int32_t priority,  Visualizer::~Visualizer()  {      ALOGV("Visualizer::~Visualizer()"); -    if (mCaptureThread != NULL) { -        mCaptureThread->requestExitAndWait(); -        mCaptureThread.clear(); -    } -    mCaptureCallBack = NULL; -    mCaptureFlags = 0; +    setEnabled(false); +    setCaptureCallBack(NULL, NULL, 0, 0, true);  }  status_t Visualizer::setEnabled(bool enabled) @@ -98,14 +95,14 @@ status_t Visualizer::setEnabled(bool enabled)  }  status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags, -        uint32_t rate) +        uint32_t rate, bool force)  {      if (rate > CAPTURE_RATE_MAX) {          return BAD_VALUE;      }      Mutex::Autolock _l(mCaptureLock); -    if (mEnabled) { +    if (force || mEnabled) {          return INVALID_OPERATION;      } @@ -429,4 +426,4 @@ bool Visualizer::CaptureThread::threadLoop()      return false;  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/docs/Makefile b/media/libmedia/docs/Makefile new file mode 100644 index 0000000..bddbc9b --- /dev/null +++ b/media/libmedia/docs/Makefile @@ -0,0 +1,2 @@ +paused.png : paused.dot +	dot -Tpng < $< > $@ diff --git a/media/libmedia/docs/paused.dot b/media/libmedia/docs/paused.dot new file mode 100644 index 0000000..11e1777 --- /dev/null +++ b/media/libmedia/docs/paused.dot @@ -0,0 +1,85 @@ +digraph paused { +initial [label="INITIAL\n\ +mIgnoreNextPausedInt = false\n\ +mPaused = false\n\ +mPausedInt = false"]; + +resume_body [label="mIgnoreNextPausedInt = true\nif (mPaused || mPausedInt)"]; +resume_paused [label="mPaused = false\nmPausedInt = false\nsignal()"]; +resume_paused -> resume_merged; +resume_merged [label="return"]; + +Application -> ATstop; +ATstop [label="AudioTrack::stop()"]; +ATstop -> pause; +Application -> ATpause; +ATpause [label="AudioTrack::pause()"]; +ATpause -> pause; +ATstart -> resume; +ATstart [label="AudioTrack::start()"]; +destructor [label="~AudioTrack()"]; +destructor -> requestExit; +requestExit [label="AudioTrackThread::requestExit()"]; +requestExit -> resume; +Application -> ATsetMarkerPosition +ATsetMarkerPosition [label="AudioTrack::setMarkerPosition()\n[sets marker variables]"]; +ATsetMarkerPosition -> ATTwake +Application -> ATsetPositionUpdatePeriod +ATsetPositionUpdatePeriod [label="AudioTrack::setPositionUpdatePeriod()\n[sets update period variables]"]; +ATsetPositionUpdatePeriod -> ATTwake +Application -> ATstart; + +resume [label="AudioTrackThread::resume()"]; +resume -> resume_body; + +resume_body -> resume_paused [label="true"]; +resume_body -> resume_merged [label="false"]; + +ATTwake [label="AudioTrackThread::wake()\nif (!mPaused && mPausedInt && mPausedNs > 0)"]; +ATTwake-> ATTWake_wakeable [label="true"]; +ATTWake_wakeable [label="mIgnoreNextPausedInt = true\nmPausedInt = false\nsignal()"]; +ATTwake-> ATTWake_cannotwake [label="false"] +ATTWake_cannotwake [label="ignore"]; + +pause [label="mPaused = true"]; +pause -> return; + +threadLoop [label="AudioTrackThread::threadLoop()\nENTRY"]; +threadLoop -> threadLoop_1; +threadLoop_1 [label="if (mPaused)"]; +threadLoop_1 -> threadLoop_1_true [label="true"]; +threadLoop_1 -> threadLoop_2 [label="false"]; +threadLoop_1_true [label="wait()\nreturn true"]; +threadLoop_2 [label="if (mIgnoreNextPausedInt)"]; +threadLoop_2 -> threadLoop_2_true [label="true"]; +threadLoop_2 -> threadLoop_3 [label="false"]; +threadLoop_2_true [label="mIgnoreNextPausedInt = false\nmPausedInt = false"]; +threadLoop_2_true -> threadLoop_3; +threadLoop_3 [label="if (mPausedInt)"]; +threadLoop_3 -> threadLoop_3_true [label="true"]; +threadLoop_3 -> threadLoop_4 [label="false"]; +threadLoop_3_true [label="wait()\nmPausedInt = false\nreturn true"]; +threadLoop_4 [label="if (exitPending)"]; +threadLoop_4 -> threadLoop_4_true [label="true"]; +threadLoop_4 -> threadLoop_5 [label="false"]; +threadLoop_4_true [label="return false"]; +threadLoop_5 [label="ns = processAudioBuffer()"]; +threadLoop_5 -> threadLoop_6; +threadLoop_6 [label="case ns"]; +threadLoop_6 -> threadLoop_6_0 [label="0"]; +threadLoop_6 -> threadLoop_6_NS_INACTIVE [label="NS_INACTIVE"]; +threadLoop_6 -> threadLoop_6_NS_NEVER [label="NS_NEVER"]; +threadLoop_6 -> threadLoop_6_NS_WHENEVER [label="NS_WHENEVER"]; +threadLoop_6 -> threadLoop_6_default [label="default"]; +threadLoop_6_default [label="if (ns < 0)"]; +threadLoop_6_default -> threadLoop_6_default_true [label="true"]; +threadLoop_6_default -> threadLoop_6_default_false [label="false"]; +threadLoop_6_default_true [label="FATAL"]; +threadLoop_6_default_false [label="pauseInternal(ns) [wake()-able]\nmPausedInternal = true\nmPausedNs = ns\nreturn true"]; +threadLoop_6_0 [label="return true"]; +threadLoop_6_NS_INACTIVE [label="pauseInternal()\nmPausedInternal = true\nmPausedNs = 0\nreturn true"]; +threadLoop_6_NS_NEVER [label="return false"]; +threadLoop_6_NS_WHENEVER [label="ns = 1s"]; +threadLoop_6_NS_WHENEVER -> threadLoop_6_default_false; + +} diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp index 8e8a1ed..9a76f58 100644 --- a/media/libmedia/mediametadataretriever.cpp +++ b/media/libmedia/mediametadataretriever.cpp @@ -129,6 +129,18 @@ status_t MediaMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t l      return mRetriever->setDataSource(fd, offset, length);  } +status_t MediaMetadataRetriever::setDataSource( +    const sp<IDataSource>& dataSource) +{ +    ALOGV("setDataSource(IDataSource)"); +    Mutex::Autolock _l(mLock); +    if (mRetriever == 0) { +        ALOGE("retriever is not initialized"); +        return INVALID_OPERATION; +    } +    return mRetriever->setDataSource(dataSource); +} +  sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)  {      ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d)", timeUs, option); @@ -176,4 +188,4 @@ MediaMetadataRetriever::DeathNotifier::~DeathNotifier()      }  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 05c89ed..502ab2d 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -32,7 +32,10 @@  #include <gui/Surface.h>  #include <media/mediaplayer.h> +#include <media/AudioResamplerPublic.h>  #include <media/AudioSystem.h> +#include <media/AVSyncSettings.h> +#include <media/IDataSource.h>  #include <binder/MemoryBase.h> @@ -194,6 +197,22 @@ status_t MediaPlayer::setDataSource(const sp<IStreamSource> &source)      return err;  } +status_t MediaPlayer::setDataSource(const sp<IDataSource> &source) +{ +    ALOGV("setDataSource(IDataSource)"); +    status_t err = UNKNOWN_ERROR; +    const sp<IMediaPlayerService>& service(getMediaPlayerService()); +    if (service != 0) { +        sp<IMediaPlayer> player(service->create(this, mAudioSessionId)); +        if ((NO_ERROR != doSetRetransmitEndpoint(player)) || +            (NO_ERROR != player->setDataSource(source))) { +            player.clear(); +        } +        err = attachNewPlayer(player); +    } +    return err; +} +  status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)  {      Mutex::Autolock _l(mLock); @@ -240,10 +259,11 @@ status_t MediaPlayer::setVideoSurfaceTexture(  // must call with lock held  status_t MediaPlayer::prepareAsync_l()  { -    if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) { -        mPlayer->setAudioStreamType(mStreamType); +    if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {          if (mAudioAttributesParcel != NULL) {              mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel); +        } else { +            mPlayer->setAudioStreamType(mStreamType);          }          mCurrentState = MEDIA_PLAYER_PREPARING;          return mPlayer->prepareAsync(); @@ -371,6 +391,9 @@ bool MediaPlayer::isPlaying()          if ((mCurrentState & MEDIA_PLAYER_STARTED) && ! temp) {              ALOGE("internal/external state mismatch corrected");              mCurrentState = MEDIA_PLAYER_PAUSED; +        } else if ((mCurrentState & MEDIA_PLAYER_PAUSED) && temp) { +            ALOGE("internal/external state mismatch corrected"); +            mCurrentState = MEDIA_PLAYER_STARTED;          }          return temp;      } @@ -378,6 +401,52 @@ bool MediaPlayer::isPlaying()      return false;  } +status_t MediaPlayer::setPlaybackSettings(const AudioPlaybackRate& rate) +{ +    ALOGV("setPlaybackSettings: %f %f %d %d", +            rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode); +    // Negative speed and pitch does not make sense. Further validation will +    // be done by the respective mediaplayers. +    if (rate.mSpeed < 0.f || rate.mPitch < 0.f) { +        return BAD_VALUE; +    } +    Mutex::Autolock _l(mLock); +    if (mPlayer == 0) return INVALID_OPERATION; +    status_t err = mPlayer->setPlaybackSettings(rate); +    if (err == OK) { +        if (rate.mSpeed == 0.f && mCurrentState == MEDIA_PLAYER_STARTED) { +            mCurrentState = MEDIA_PLAYER_PAUSED; +        } else if (rate.mSpeed != 0.f && mCurrentState == MEDIA_PLAYER_PAUSED) { +            mCurrentState = MEDIA_PLAYER_STARTED; +        } +    } +    return err; +} + +status_t MediaPlayer::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) +{ +    Mutex::Autolock _l(mLock); +    if (mPlayer == 0) return INVALID_OPERATION; +    return mPlayer->getPlaybackSettings(rate); +} + +status_t MediaPlayer::setSyncSettings(const AVSyncSettings& sync, float videoFpsHint) +{ +    ALOGV("setSyncSettings: %u %u %f %f", +            sync.mSource, sync.mAudioAdjustMode, sync.mTolerance, videoFpsHint); +    Mutex::Autolock _l(mLock); +    if (mPlayer == 0) return INVALID_OPERATION; +    return mPlayer->setSyncSettings(sync, videoFpsHint); +} + +status_t MediaPlayer::getSyncSettings( +        AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */) +{ +    Mutex::Autolock _l(mLock); +    if (mPlayer == 0) return INVALID_OPERATION; +    return mPlayer->getSyncSettings(sync, videoFps); +} +  status_t MediaPlayer::getVideoWidth(int *w)  {      ALOGV("getVideoWidth"); @@ -414,7 +483,8 @@ status_t MediaPlayer::getCurrentPosition(int *msec)  status_t MediaPlayer::getDuration_l(int *msec)  {      ALOGV("getDuration_l"); -    bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE)); +    bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED | +            MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE));      if (mPlayer != 0 && isValidState) {          int durationMs;          status_t ret = mPlayer->getDuration(&durationMs); @@ -443,7 +513,8 @@ status_t MediaPlayer::getDuration(int *msec)  status_t MediaPlayer::seekTo_l(int msec)  {      ALOGV("seekTo %d", msec); -    if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED |  MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) { +    if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED | +            MEDIA_PLAYER_PAUSED |  MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) {          if ( msec < 0 ) {              ALOGW("Attempt to seek to invalid position: %d", msec);              msec = 0; @@ -477,7 +548,8 @@ status_t MediaPlayer::seekTo_l(int msec)              return NO_ERROR;          }      } -    ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(), mCurrentState); +    ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(), +            mCurrentState);      return INVALID_OPERATION;  } @@ -502,6 +574,7 @@ status_t MediaPlayer::reset_l()              ALOGE("reset() failed with return code (%d)", ret);              mCurrentState = MEDIA_PLAYER_STATE_ERROR;          } else { +            mPlayer->disconnect();              mCurrentState = MEDIA_PLAYER_IDLE;          }          // setDataSource has to be called again to create a @@ -663,24 +736,28 @@ status_t MediaPlayer::checkStateForKeySet_l(int key)  status_t MediaPlayer::setParameter(int key, const Parcel& request)  {      ALOGV("MediaPlayer::setParameter(%d)", key); +    status_t status = INVALID_OPERATION;      Mutex::Autolock _l(mLock);      if (checkStateForKeySet_l(key) != OK) { -        return INVALID_OPERATION; -    } -    if (mPlayer != NULL) { -        return  mPlayer->setParameter(key, request); +        return status;      }      switch (key) {      case KEY_PARAMETER_AUDIO_ATTRIBUTES: -        // no player, save the marshalled audio attributes +        // save the marshalled audio attributes          if (mAudioAttributesParcel != NULL) { delete mAudioAttributesParcel; };          mAudioAttributesParcel = new Parcel();          mAudioAttributesParcel->appendFrom(&request, 0, request.dataSize()); -        return OK; +        status = OK; +        break;      default: -        ALOGV("setParameter: no active player"); -        return INVALID_OPERATION; +        ALOGV_IF(mPlayer == NULL, "setParameter: no active player"); +        break; +    } + +    if (mPlayer != NULL) { +        status = mPlayer->setParameter(key, request);      } +    return status;  }  status_t MediaPlayer::getParameter(int key, Parcel *reply) @@ -818,6 +895,9 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj)      case MEDIA_SUBTITLE_DATA:          ALOGV("Received subtitle data message");          break; +    case MEDIA_META_DATA: +        ALOGV("Received timed metadata message"); +        break;      default:          ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);          break; @@ -855,4 +935,4 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) {      return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);  } -}; // namespace android +} // namespace android diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 1952b86..8bbd8f1 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -27,6 +27,7 @@  #include <media/IMediaPlayerService.h>  #include <media/IMediaRecorder.h>  #include <media/mediaplayer.h>  // for MEDIA_ERROR_SERVER_DIED +#include <media/stagefright/PersistentSurface.h>  #include <gui/IGraphicBufferProducer.h>  namespace android { @@ -264,32 +265,6 @@ status_t MediaRecorder::setAudioEncoder(int ae)      return ret;  } -status_t MediaRecorder::setOutputFile(const char* path) -{ -    ALOGV("setOutputFile(%s)", path); -    if (mMediaRecorder == NULL) { -        ALOGE("media recorder is not initialized yet"); -        return INVALID_OPERATION; -    } -    if (mIsOutputFileSet) { -        ALOGE("output file has already been set"); -        return INVALID_OPERATION; -    } -    if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) { -        ALOGE("setOutputFile called in an invalid state(%d)", mCurrentState); -        return INVALID_OPERATION; -    } - -    status_t ret = mMediaRecorder->setOutputFile(path); -    if (OK != ret) { -        ALOGV("setOutputFile failed: %d", ret); -        mCurrentState = MEDIA_RECORDER_ERROR; -        return ret; -    } -    mIsOutputFileSet = true; -    return ret; -} -  status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)  {      ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length); @@ -370,6 +345,24 @@ sp<IGraphicBufferProducer> MediaRecorder:: +status_t MediaRecorder::setInputSurface(const sp<PersistentSurface>& surface) +{ +    ALOGV("setInputSurface"); +    if (mMediaRecorder == NULL) { +        ALOGE("media recorder is not initialized yet"); +        return INVALID_OPERATION; +    } +    bool isInvalidState = (mCurrentState & +                           (MEDIA_RECORDER_PREPARED | +                            MEDIA_RECORDER_RECORDING)); +    if (isInvalidState) { +        ALOGE("setInputSurface is called in an invalid state: %d", mCurrentState); +        return INVALID_OPERATION; +    } + +    return mMediaRecorder->setInputSurface(surface->getBufferConsumer()); +} +  status_t MediaRecorder::setVideoFrameRate(int frames_per_second)  {      ALOGV("setVideoFrameRate(%d)", frames_per_second); @@ -620,13 +613,13 @@ status_t MediaRecorder::release()      return INVALID_OPERATION;  } -MediaRecorder::MediaRecorder() : mSurfaceMediaSource(NULL) +MediaRecorder::MediaRecorder(const String16& opPackageName) : mSurfaceMediaSource(NULL)  {      ALOGV("constructor");      const sp<IMediaPlayerService>& service(getMediaPlayerService());      if (service != NULL) { -        mMediaRecorder = service->createMediaRecorder(); +        mMediaRecorder = service->createMediaRecorder(opPackageName);      }      if (mMediaRecorder != NULL) {          mCurrentState = MEDIA_RECORDER_IDLE; @@ -706,4 +699,4 @@ void MediaRecorder::died()      notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_ERROR_SERVER_DIED, 0);  } -}; // namespace android +} // namespace android diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 9d8fe62..4d1b587 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -10,6 +10,7 @@ LOCAL_SRC_FILES:=               \      ActivityManager.cpp         \      Crypto.cpp                  \      Drm.cpp                     \ +    DrmSessionManager.cpp       \      HDCP.cpp                    \      MediaPlayerFactory.cpp      \      MediaPlayerService.cpp      \ @@ -20,7 +21,6 @@ LOCAL_SRC_FILES:=               \      StagefrightPlayer.cpp       \      StagefrightRecorder.cpp     \      TestPlayerStub.cpp          \ -    VideoFrameScheduler.cpp     \  LOCAL_SHARED_LIBRARIES :=       \      libbinder                   \ @@ -32,6 +32,7 @@ LOCAL_SHARED_LIBRARIES :=       \      libdl                       \      libgui                      \      libmedia                    \ +    libmediautils               \      libsonivox                  \      libstagefright              \      libstagefright_foundation   \ @@ -53,6 +54,9 @@ LOCAL_C_INCLUDES :=                                                 \      $(TOP)/frameworks/native/include/media/openmax                  \      $(TOP)/external/tremolo/Tremolo                                 \ +LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall +LOCAL_CLANG := true +  LOCAL_MODULE:= libmediaplayerservice  LOCAL_32_BIT_ONLY := true diff --git a/media/libmediaplayerservice/Crypto.cpp b/media/libmediaplayerservice/Crypto.cpp index 8ee7c0b..147d35f 100644 --- a/media/libmediaplayerservice/Crypto.cpp +++ b/media/libmediaplayerservice/Crypto.cpp @@ -22,6 +22,7 @@  #include "Crypto.h" +#include <binder/IMemory.h>  #include <media/hardware/CryptoAPI.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AString.h> @@ -88,7 +89,7 @@ void Crypto::findFactoryForScheme(const uint8_t uuid[16]) {      // first check cache      Vector<uint8_t> uuidVector; -    uuidVector.appendArray(uuid, sizeof(uuid)); +    uuidVector.appendArray(uuid, sizeof(uuid[0]) * 16);      ssize_t index = mUUIDToLibraryPathMap.indexOfKey(uuidVector);      if (index >= 0) {          if (loadLibraryForScheme(mUUIDToLibraryPathMap[index], uuid)) { @@ -238,7 +239,7 @@ ssize_t Crypto::decrypt(          const uint8_t key[16],          const uint8_t iv[16],          CryptoPlugin::Mode mode, -        const void *srcPtr, +        const sp<IMemory> &sharedBuffer, size_t offset,          const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,          void *dstPtr,          AString *errorDetailMsg) { @@ -252,6 +253,8 @@ ssize_t Crypto::decrypt(          return -EINVAL;      } +    const void *srcPtr = static_cast<uint8_t *>(sharedBuffer->pointer()) + offset; +      return mPlugin->decrypt(              secure, key, iv, mode, srcPtr, subSamples, numSubSamples, dstPtr,              errorDetailMsg); @@ -265,4 +268,14 @@ void Crypto::notifyResolution(uint32_t width, uint32_t height) {      }  } +status_t Crypto::setMediaDrmSession(const Vector<uint8_t> &sessionId) { +    Mutex::Autolock autoLock(mLock); + +    status_t result = NO_INIT; +    if (mInitCheck == OK && mPlugin != NULL) { +        result = mPlugin->setMediaDrmSession(sessionId); +    } +    return result; +} +  }  // namespace android diff --git a/media/libmediaplayerservice/Crypto.h b/media/libmediaplayerservice/Crypto.h index 0037c2e..99ea95d 100644 --- a/media/libmediaplayerservice/Crypto.h +++ b/media/libmediaplayerservice/Crypto.h @@ -47,12 +47,14 @@ struct Crypto : public BnCrypto {      virtual void notifyResolution(uint32_t width, uint32_t height); +    virtual status_t setMediaDrmSession(const Vector<uint8_t> &sessionId); +      virtual ssize_t decrypt(              bool secure,              const uint8_t key[16],              const uint8_t iv[16],              CryptoPlugin::Mode mode, -            const void *srcPtr, +            const sp<IMemory> &sharedBuffer, size_t offset,              const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,              void *dstPtr,              AString *errorDetailMsg); diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp index 9d5ba15..a7f6f8b 100644 --- a/media/libmediaplayerservice/Drm.cpp +++ b/media/libmediaplayerservice/Drm.cpp @@ -23,6 +23,8 @@  #include "Drm.h" +#include "DrmSessionClientInterface.h" +#include "DrmSessionManager.h"  #include <media/drm/DrmAPI.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AString.h> @@ -33,6 +35,10 @@  namespace android { +static inline int getCallingPid() { +    return IPCThreadState::self()->getCallingPid(); +} +  static bool checkPermission(const char* permissionString) {  #ifndef HAVE_ANDROID_OS      return true; @@ -58,14 +64,41 @@ static bool operator<(const Vector<uint8_t> &lhs, const Vector<uint8_t> &rhs) {      return memcmp((void *)lhs.array(), (void *)rhs.array(), rhs.size()) < 0;  } +struct DrmSessionClient : public DrmSessionClientInterface { +    DrmSessionClient(Drm* drm) : mDrm(drm) {} + +    virtual bool reclaimSession(const Vector<uint8_t>& sessionId) { +        sp<Drm> drm = mDrm.promote(); +        if (drm == NULL) { +            return true; +        } +        status_t err = drm->closeSession(sessionId); +        if (err != OK) { +            return false; +        } +        drm->sendEvent(DrmPlugin::kDrmPluginEventSessionReclaimed, 0, &sessionId, NULL); +        return true; +    } + +protected: +    virtual ~DrmSessionClient() {} + +private: +    wp<Drm> mDrm; + +    DISALLOW_EVIL_CONSTRUCTORS(DrmSessionClient); +}; +  Drm::Drm()      : mInitCheck(NO_INIT), +      mDrmSessionClient(new DrmSessionClient(this)),        mListener(NULL),        mFactory(NULL),        mPlugin(NULL) {  }  Drm::~Drm() { +    DrmSessionManager::Instance()->removeDrm(mDrmSessionClient);      delete mPlugin;      mPlugin = NULL;      closeFactory(); @@ -104,22 +137,54 @@ void Drm::sendEvent(DrmPlugin::EventType eventType, int extra,      if (listener != NULL) {          Parcel obj; -        if (sessionId && sessionId->size()) { -            obj.writeInt32(sessionId->size()); -            obj.write(sessionId->array(), sessionId->size()); -        } else { -            obj.writeInt32(0); -        } +        writeByteArray(obj, sessionId); +        writeByteArray(obj, data); -        if (data && data->size()) { -            obj.writeInt32(data->size()); -            obj.write(data->array(), data->size()); -        } else { -            obj.writeInt32(0); +        Mutex::Autolock lock(mNotifyLock); +        listener->notify(eventType, extra, &obj); +    } +} + +void Drm::sendExpirationUpdate(Vector<uint8_t> const *sessionId, +                               int64_t expiryTimeInMS) +{ +    mEventLock.lock(); +    sp<IDrmClient> listener = mListener; +    mEventLock.unlock(); + +    if (listener != NULL) { +        Parcel obj; +        writeByteArray(obj, sessionId); +        obj.writeInt64(expiryTimeInMS); + +        Mutex::Autolock lock(mNotifyLock); +        listener->notify(DrmPlugin::kDrmPluginEventExpirationUpdate, 0, &obj); +    } +} + +void Drm::sendKeysChange(Vector<uint8_t> const *sessionId, +                         Vector<DrmPlugin::KeyStatus> const *keyStatusList, +                         bool hasNewUsableKey) +{ +    mEventLock.lock(); +    sp<IDrmClient> listener = mListener; +    mEventLock.unlock(); + +    if (listener != NULL) { +        Parcel obj; +        writeByteArray(obj, sessionId); + +        size_t nkeys = keyStatusList->size(); +        obj.writeInt32(keyStatusList->size()); +        for (size_t i = 0; i < nkeys; ++i) { +            const DrmPlugin::KeyStatus *keyStatus = &keyStatusList->itemAt(i); +            writeByteArray(obj, &keyStatus->mKeyId); +            obj.writeInt32(keyStatus->mType);          } +        obj.writeInt32(hasNewUsableKey);          Mutex::Autolock lock(mNotifyLock); -        listener->notify(eventType, extra, &obj); +        listener->notify(DrmPlugin::kDrmPluginEventKeysChange, 0, &obj);      }  } @@ -145,7 +210,7 @@ void Drm::findFactoryForScheme(const uint8_t uuid[16]) {      // first check cache      Vector<uint8_t> uuidVector; -    uuidVector.appendArray(uuid, sizeof(uuid)); +    uuidVector.appendArray(uuid, sizeof(uuid[0]) * 16);      ssize_t index = mUUIDToLibraryPathMap.indexOfKey(uuidVector);      if (index >= 0) {          if (loadLibraryForScheme(mUUIDToLibraryPathMap[index], uuid)) { @@ -290,7 +355,29 @@ status_t Drm::openSession(Vector<uint8_t> &sessionId) {          return -EINVAL;      } -    return mPlugin->openSession(sessionId); +    status_t err = mPlugin->openSession(sessionId); +    if (err == ERROR_DRM_RESOURCE_BUSY) { +        bool retry = false; +        mLock.unlock(); +        // reclaimSession may call back to closeSession, since mLock is shared between Drm +        // instances, we should unlock here to avoid deadlock. +        retry = DrmSessionManager::Instance()->reclaimSession(getCallingPid()); +        mLock.lock(); +        if (mInitCheck != OK) { +            return mInitCheck; +        } + +        if (mPlugin == NULL) { +            return -EINVAL; +        } +        if (retry) { +            err = mPlugin->openSession(sessionId); +        } +    } +    if (err == OK) { +        DrmSessionManager::Instance()->addSession(getCallingPid(), mDrmSessionClient, sessionId); +    } +    return err;  }  status_t Drm::closeSession(Vector<uint8_t> const &sessionId) { @@ -304,14 +391,19 @@ status_t Drm::closeSession(Vector<uint8_t> const &sessionId) {          return -EINVAL;      } -    return mPlugin->closeSession(sessionId); +    status_t err = mPlugin->closeSession(sessionId); +    if (err == OK) { +        DrmSessionManager::Instance()->removeSession(sessionId); +    } +    return err;  }  status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId,                              Vector<uint8_t> const &initData,                              String8 const &mimeType, DrmPlugin::KeyType keyType,                              KeyedVector<String8, String8> const &optionalParameters, -                            Vector<uint8_t> &request, String8 &defaultUrl) { +                            Vector<uint8_t> &request, String8 &defaultUrl, +                            DrmPlugin::KeyRequestType *keyRequestType) {      Mutex::Autolock autoLock(mLock);      if (mInitCheck != OK) { @@ -322,8 +414,11 @@ status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->getKeyRequest(sessionId, initData, mimeType, keyType, -                                  optionalParameters, request, defaultUrl); +                                  optionalParameters, request, defaultUrl, +                                  keyRequestType);  }  status_t Drm::provideKeyResponse(Vector<uint8_t> const &sessionId, @@ -339,6 +434,8 @@ status_t Drm::provideKeyResponse(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->provideKeyResponse(sessionId, response, keySetId);  } @@ -368,6 +465,8 @@ status_t Drm::restoreKeys(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->restoreKeys(sessionId, keySetId);  } @@ -383,6 +482,8 @@ status_t Drm::queryKeyStatus(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->queryKeyStatus(sessionId, infoMap);  } @@ -562,6 +663,8 @@ status_t Drm::setCipherAlgorithm(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->setCipherAlgorithm(sessionId, algorithm);  } @@ -577,6 +680,8 @@ status_t Drm::setMacAlgorithm(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->setMacAlgorithm(sessionId, algorithm);  } @@ -595,6 +700,8 @@ status_t Drm::encrypt(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->encrypt(sessionId, keyId, input, iv, output);  } @@ -613,6 +720,8 @@ status_t Drm::decrypt(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->decrypt(sessionId, keyId, input, iv, output);  } @@ -630,6 +739,8 @@ status_t Drm::sign(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->sign(sessionId, keyId, message, signature);  } @@ -648,6 +759,8 @@ status_t Drm::verify(Vector<uint8_t> const &sessionId,          return -EINVAL;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->verify(sessionId, keyId, message, signature, match);  } @@ -670,10 +783,12 @@ status_t Drm::signRSA(Vector<uint8_t> const &sessionId,          return -EPERM;      } +    DrmSessionManager::Instance()->useSession(sessionId); +      return mPlugin->signRSA(sessionId, algorithm, message, wrappedKey, signature);  } -void Drm::binderDied(const wp<IBinder> &the_late_who) +void Drm::binderDied(const wp<IBinder> &the_late_who __unused)  {      mEventLock.lock();      mListener.clear(); @@ -685,4 +800,14 @@ void Drm::binderDied(const wp<IBinder> &the_late_who)      closeFactory();  } +void Drm::writeByteArray(Parcel &obj, Vector<uint8_t> const *array) +{ +    if (array && array->size()) { +        obj.writeInt32(array->size()); +        obj.write(array->array(), array->size()); +    } else { +        obj.writeInt32(0); +    } +} +  }  // namespace android diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h index 2997da1..056723c 100644 --- a/media/libmediaplayerservice/Drm.h +++ b/media/libmediaplayerservice/Drm.h @@ -26,8 +26,9 @@  namespace android { -struct DrmFactory; -struct DrmPlugin; +class DrmFactory; +class DrmPlugin; +struct DrmSessionClientInterface;  struct Drm : public BnDrm,               public IBinder::DeathRecipient, @@ -52,7 +53,8 @@ struct Drm : public BnDrm,                        Vector<uint8_t> const &initData,                        String8 const &mimeType, DrmPlugin::KeyType keyType,                        KeyedVector<String8, String8> const &optionalParameters, -                      Vector<uint8_t> &request, String8 &defaultUrl); +                      Vector<uint8_t> &request, String8 &defaultUrl, +                      DrmPlugin::KeyRequestType *keyRequestType);      virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,                                          Vector<uint8_t> const &response, @@ -131,6 +133,13 @@ struct Drm : public BnDrm,                             Vector<uint8_t> const *sessionId,                             Vector<uint8_t> const *data); +    virtual void sendExpirationUpdate(Vector<uint8_t> const *sessionId, +                                      int64_t expiryTimeInMS); + +    virtual void sendKeysChange(Vector<uint8_t> const *sessionId, +                                Vector<DrmPlugin::KeyStatus> const *keyStatusList, +                                bool hasNewUsableKey); +      virtual void binderDied(const wp<IBinder> &the_late_who);  private: @@ -138,6 +147,8 @@ private:      status_t mInitCheck; +    sp<DrmSessionClientInterface> mDrmSessionClient; +      sp<IDrmClient> mListener;      mutable Mutex mEventLock;      mutable Mutex mNotifyLock; @@ -153,7 +164,7 @@ private:      void findFactoryForScheme(const uint8_t uuid[16]);      bool loadLibraryForScheme(const String8 &path, const uint8_t uuid[16]);      void closeFactory(); - +    void writeByteArray(Parcel &obj, Vector<uint8_t> const *array);      DISALLOW_EVIL_CONSTRUCTORS(Drm);  }; diff --git a/media/libmedia/SingleStateQueueInstantiations.cpp b/media/libmediaplayerservice/DrmSessionClientInterface.h index 0265c8c..17faf08 100644 --- a/media/libmedia/SingleStateQueueInstantiations.cpp +++ b/media/libmediaplayerservice/DrmSessionClientInterface.h @@ -1,5 +1,5 @@  /* - * Copyright (C) 2012 The Android Open Source Project + * Copyright (C) 2015 The Android Open Source Project   *   * Licensed under the Apache License, Version 2.0 (the "License");   * you may not use this file except in compliance with the License. @@ -14,15 +14,21 @@   * limitations under the License.   */ -#include <media/SingleStateQueue.h> -#include <private/media/StaticAudioTrackState.h> -#include <media/AudioTimestamp.h> +#ifndef DRM_PROXY_INTERFACE_H_ +#define DRM_PROXY_INTERFACE_H_ -// FIXME hack for gcc +#include <utils/RefBase.h> +#include <utils/Vector.h>  namespace android { -template class SingleStateQueue<StaticAudioTrackState>; // typedef StaticAudioTrackSingleStateQueue -template class SingleStateQueue<AudioTimestamp>;        // typedef AudioTimestampSingleStateQueue +struct DrmSessionClientInterface : public RefBase { +    virtual bool reclaimSession(const Vector<uint8_t>& sessionId) = 0; -} +protected: +    virtual ~DrmSessionClientInterface() {} +}; + +}  // namespace android + +#endif  // DRM_PROXY_INTERFACE_H_ diff --git a/media/libmediaplayerservice/DrmSessionManager.cpp b/media/libmediaplayerservice/DrmSessionManager.cpp new file mode 100644 index 0000000..641f881 --- /dev/null +++ b/media/libmediaplayerservice/DrmSessionManager.cpp @@ -0,0 +1,240 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DrmSessionManager" +#include <utils/Log.h> + +#include "DrmSessionManager.h" + +#include "DrmSessionClientInterface.h" +#include <binder/IPCThreadState.h> +#include <binder/IProcessInfoService.h> +#include <binder/IServiceManager.h> +#include <media/stagefright/ProcessInfo.h> +#include <unistd.h> +#include <utils/String8.h> + +namespace android { + +static String8 GetSessionIdString(const Vector<uint8_t> &sessionId) { +    String8 sessionIdStr; +    for (size_t i = 0; i < sessionId.size(); ++i) { +        sessionIdStr.appendFormat("%u ", sessionId[i]); +    } +    return sessionIdStr; +} + +bool isEqualSessionId(const Vector<uint8_t> &sessionId1, const Vector<uint8_t> &sessionId2) { +    if (sessionId1.size() != sessionId2.size()) { +        return false; +    } +    for (size_t i = 0; i < sessionId1.size(); ++i) { +        if (sessionId1[i] != sessionId2[i]) { +            return false; +        } +    } +    return true; +} + +sp<DrmSessionManager> DrmSessionManager::Instance() { +    static sp<DrmSessionManager> drmSessionManager = new DrmSessionManager(); +    return drmSessionManager; +} + +DrmSessionManager::DrmSessionManager() +    : mProcessInfo(new ProcessInfo()), +      mTime(0) {} + +DrmSessionManager::DrmSessionManager(sp<ProcessInfoInterface> processInfo) +    : mProcessInfo(processInfo), +      mTime(0) {} + +DrmSessionManager::~DrmSessionManager() {} + +void DrmSessionManager::addSession( +        int pid, sp<DrmSessionClientInterface> drm, const Vector<uint8_t> &sessionId) { +    ALOGV("addSession(pid %d, drm %p, sessionId %s)", pid, drm.get(), +            GetSessionIdString(sessionId).string()); + +    Mutex::Autolock lock(mLock); +    SessionInfo info; +    info.drm = drm; +    info.sessionId = sessionId; +    info.timeStamp = getTime_l(); +    ssize_t index = mSessionMap.indexOfKey(pid); +    if (index < 0) { +        // new pid +        SessionInfos infosForPid; +        infosForPid.push_back(info); +        mSessionMap.add(pid, infosForPid); +    } else { +        mSessionMap.editValueAt(index).push_back(info); +    } +} + +void DrmSessionManager::useSession(const Vector<uint8_t> &sessionId) { +    ALOGV("useSession(%s)", GetSessionIdString(sessionId).string()); + +    Mutex::Autolock lock(mLock); +    for (size_t i = 0; i < mSessionMap.size(); ++i) { +        SessionInfos& infos = mSessionMap.editValueAt(i); +        for (size_t j = 0; j < infos.size(); ++j) { +            SessionInfo& info = infos.editItemAt(j); +            if (isEqualSessionId(sessionId, info.sessionId)) { +                info.timeStamp = getTime_l(); +                return; +            } +        } +    } +} + +void DrmSessionManager::removeSession(const Vector<uint8_t> &sessionId) { +    ALOGV("removeSession(%s)", GetSessionIdString(sessionId).string()); + +    Mutex::Autolock lock(mLock); +    for (size_t i = 0; i < mSessionMap.size(); ++i) { +        SessionInfos& infos = mSessionMap.editValueAt(i); +        for (size_t j = 0; j < infos.size(); ++j) { +            if (isEqualSessionId(sessionId, infos[j].sessionId)) { +                infos.removeAt(j); +                return; +            } +        } +    } +} + +void DrmSessionManager::removeDrm(sp<DrmSessionClientInterface> drm) { +    ALOGV("removeDrm(%p)", drm.get()); + +    Mutex::Autolock lock(mLock); +    bool found = false; +    for (size_t i = 0; i < mSessionMap.size(); ++i) { +        SessionInfos& infos = mSessionMap.editValueAt(i); +        for (size_t j = 0; j < infos.size();) { +            if (infos[j].drm == drm) { +                ALOGV("removed session (%s)", GetSessionIdString(infos[j].sessionId).string()); +                j = infos.removeAt(j); +                found = true; +            } else { +                ++j; +            } +        } +        if (found) { +            break; +        } +    } +} + +bool DrmSessionManager::reclaimSession(int callingPid) { +    ALOGV("reclaimSession(%d)", callingPid); + +    sp<DrmSessionClientInterface> drm; +    Vector<uint8_t> sessionId; +    int lowestPriorityPid; +    int lowestPriority; +    { +        Mutex::Autolock lock(mLock); +        int callingPriority; +        if (!mProcessInfo->getPriority(callingPid, &callingPriority)) { +            return false; +        } +        if (!getLowestPriority_l(&lowestPriorityPid, &lowestPriority)) { +            return false; +        } +        if (lowestPriority <= callingPriority) { +            return false; +        } + +        if (!getLeastUsedSession_l(lowestPriorityPid, &drm, &sessionId)) { +            return false; +        } +    } + +    if (drm == NULL) { +        return false; +    } + +    ALOGV("reclaim session(%s) opened by pid %d", +            GetSessionIdString(sessionId).string(), lowestPriorityPid); + +    return drm->reclaimSession(sessionId); +} + +int64_t DrmSessionManager::getTime_l() { +    return mTime++; +} + +bool DrmSessionManager::getLowestPriority_l(int* lowestPriorityPid, int* lowestPriority) { +    int pid = -1; +    int priority = -1; +    for (size_t i = 0; i < mSessionMap.size(); ++i) { +        if (mSessionMap.valueAt(i).size() == 0) { +            // no opened session by this process. +            continue; +        } +        int tempPid = mSessionMap.keyAt(i); +        int tempPriority; +        if (!mProcessInfo->getPriority(tempPid, &tempPriority)) { +            // shouldn't happen. +            return false; +        } +        if (pid == -1) { +            pid = tempPid; +            priority = tempPriority; +        } else { +            if (tempPriority > priority) { +                pid = tempPid; +                priority = tempPriority; +            } +        } +    } +    if (pid != -1) { +        *lowestPriorityPid = pid; +        *lowestPriority = priority; +    } +    return (pid != -1); +} + +bool DrmSessionManager::getLeastUsedSession_l( +        int pid, sp<DrmSessionClientInterface>* drm, Vector<uint8_t>* sessionId) { +    ssize_t index = mSessionMap.indexOfKey(pid); +    if (index < 0) { +        return false; +    } + +    int leastUsedIndex = -1; +    int64_t minTs = LLONG_MAX; +    const SessionInfos& infos = mSessionMap.valueAt(index); +    for (size_t j = 0; j < infos.size(); ++j) { +        if (leastUsedIndex == -1) { +            leastUsedIndex = j; +            minTs = infos[j].timeStamp; +        } else { +            if (infos[j].timeStamp < minTs) { +                leastUsedIndex = j; +                minTs = infos[j].timeStamp; +            } +        } +    } +    if (leastUsedIndex != -1) { +        *drm = infos[leastUsedIndex].drm; +        *sessionId = infos[leastUsedIndex].sessionId; +    } +    return (leastUsedIndex != -1); +} + +}  // namespace android diff --git a/media/libmediaplayerservice/DrmSessionManager.h b/media/libmediaplayerservice/DrmSessionManager.h new file mode 100644 index 0000000..ba5c268 --- /dev/null +++ b/media/libmediaplayerservice/DrmSessionManager.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DRM_SESSION_MANAGER_H_ + +#define DRM_SESSION_MANAGER_H_ + +#include <media/stagefright/foundation/ABase.h> +#include <utils/RefBase.h> +#include <utils/KeyedVector.h> +#include <utils/threads.h> +#include <utils/Vector.h> + +namespace android { + +class DrmSessionManagerTest; +struct DrmSessionClientInterface; +struct ProcessInfoInterface; + +bool isEqualSessionId(const Vector<uint8_t> &sessionId1, const Vector<uint8_t> &sessionId2); + +struct SessionInfo { +    sp<DrmSessionClientInterface> drm; +    Vector<uint8_t> sessionId; +    int64_t timeStamp; +}; + +typedef Vector<SessionInfo > SessionInfos; +typedef KeyedVector<int, SessionInfos > PidSessionInfosMap; + +struct DrmSessionManager : public RefBase { +    static sp<DrmSessionManager> Instance(); + +    DrmSessionManager(); +    DrmSessionManager(sp<ProcessInfoInterface> processInfo); + +    void addSession(int pid, sp<DrmSessionClientInterface> drm, const Vector<uint8_t>& sessionId); +    void useSession(const Vector<uint8_t>& sessionId); +    void removeSession(const Vector<uint8_t>& sessionId); +    void removeDrm(sp<DrmSessionClientInterface> drm); +    bool reclaimSession(int callingPid); + +protected: +    virtual ~DrmSessionManager(); + +private: +    friend class DrmSessionManagerTest; + +    int64_t getTime_l(); +    bool getLowestPriority_l(int* lowestPriorityPid, int* lowestPriority); +    bool getLeastUsedSession_l( +            int pid, sp<DrmSessionClientInterface>* drm, Vector<uint8_t>* sessionId); + +    sp<ProcessInfoInterface> mProcessInfo; +    mutable Mutex mLock; +    PidSessionInfosMap mSessionMap; +    int64_t mTime; + +    DISALLOW_EVIL_CONSTRUCTORS(DrmSessionManager); +}; + +}  // namespace android + +#endif  // DRM_SESSION_MANAGER_H_ diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp index 48884b9..d5d12f7 100644 --- a/media/libmediaplayerservice/MediaPlayerFactory.cpp +++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp @@ -70,12 +70,6 @@ static player_type getDefaultPlayerType() {          return STAGEFRIGHT_PLAYER;      } -    // TODO: remove this EXPERIMENTAL developer settings property -    if (property_get("persist.sys.media.use-awesome", value, NULL) -            && !strcasecmp("true", value)) { -        return STAGEFRIGHT_PLAYER; -    } -      return NU_PLAYER;  } @@ -131,12 +125,18 @@ player_type MediaPlayerFactory::getPlayerType(const sp<IMediaPlayer>& client,      GET_PLAYER_TYPE_IMPL(client, source);  } +player_type MediaPlayerFactory::getPlayerType(const sp<IMediaPlayer>& client, +                                              const sp<DataSource> &source) { +    GET_PLAYER_TYPE_IMPL(client, source); +} +  #undef GET_PLAYER_TYPE_IMPL  sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(          player_type playerType,          void* cookie, -        notify_callback_f notifyFunc) { +        notify_callback_f notifyFunc, +        pid_t pid) {      sp<MediaPlayerBase> p;      IFactory* factory;      status_t init_result; @@ -150,7 +150,7 @@ sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(      factory = sFactoryMap.valueFor(playerType);      CHECK(NULL != factory); -    p = factory->createPlayer(); +    p = factory->createPlayer(pid);      if (p == NULL) {          ALOGE("Failed to create player object of type %d, create failed", @@ -218,7 +218,7 @@ class StagefrightPlayerFactory :          return 0.0;      } -    virtual sp<MediaPlayerBase> createPlayer() { +    virtual sp<MediaPlayerBase> createPlayer(pid_t /* pid */) {          ALOGV(" create StagefrightPlayer");          return new StagefrightPlayer();      } @@ -273,9 +273,16 @@ class NuPlayerFactory : public MediaPlayerFactory::IFactory {          return 1.0;      } -    virtual sp<MediaPlayerBase> createPlayer() { +    virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/, +                               const sp<DataSource>& /*source*/, +                               float /*curScore*/) { +        // Only NuPlayer supports setting a DataSource source directly. +        return 1.0; +    } + +    virtual sp<MediaPlayerBase> createPlayer(pid_t pid) {          ALOGV(" create NuPlayer"); -        return new NuPlayerDriver; +        return new NuPlayerDriver(pid);      }  }; @@ -291,7 +298,7 @@ class TestPlayerFactory : public MediaPlayerFactory::IFactory {          return 0.0;      } -    virtual sp<MediaPlayerBase> createPlayer() { +    virtual sp<MediaPlayerBase> createPlayer(pid_t /* pid */) {          ALOGV("Create Test Player stub");          return new TestPlayerStub();      } diff --git a/media/libmediaplayerservice/MediaPlayerFactory.h b/media/libmediaplayerservice/MediaPlayerFactory.h index 55ff918..e22a56f 100644 --- a/media/libmediaplayerservice/MediaPlayerFactory.h +++ b/media/libmediaplayerservice/MediaPlayerFactory.h @@ -43,7 +43,11 @@ class MediaPlayerFactory {                                     const sp<IStreamSource> &/*source*/,                                     float /*curScore*/) { return 0.0; } -        virtual sp<MediaPlayerBase> createPlayer() = 0; +        virtual float scoreFactory(const sp<IMediaPlayer>& /*client*/, +                                   const sp<DataSource> &/*source*/, +                                   float /*curScore*/) { return 0.0; } + +        virtual sp<MediaPlayerBase> createPlayer(pid_t pid) = 0;      };      static status_t registerFactory(IFactory* factory, @@ -57,10 +61,13 @@ class MediaPlayerFactory {                                       int64_t length);      static player_type getPlayerType(const sp<IMediaPlayer>& client,                                       const sp<IStreamSource> &source); +    static player_type getPlayerType(const sp<IMediaPlayer>& client, +                                     const sp<DataSource> &source);      static sp<MediaPlayerBase> createPlayer(player_type playerType,                                              void* cookie, -                                            notify_callback_f notifyFunc); +                                            notify_callback_f notifyFunc, +                                            pid_t pid);      static void registerBuiltinFactories(); diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 694f1a4..56521a2 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -34,7 +34,6 @@  #include <utils/misc.h> -#include <binder/IBatteryStats.h>  #include <binder/IPCThreadState.h>  #include <binder/IServiceManager.h>  #include <binder/MemoryHeapBase.h> @@ -46,6 +45,7 @@  #include <utils/Timers.h>  #include <utils/Vector.h> +#include <media/AudioPolicyHelper.h>  #include <media/IMediaHTTPService.h>  #include <media/IRemoteDisplay.h>  #include <media/IRemoteDisplayClient.h> @@ -60,6 +60,7 @@  #include <media/stagefright/AudioPlayer.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/ALooperRoster.h> +#include <mediautils/BatteryNotifier.h>  #include <system/audio.h> @@ -287,16 +288,9 @@ MediaPlayerService::MediaPlayerService()      // reset battery stats      // if the mediaserver has crashed, battery stats could be left      // in bad state, reset the state upon service start. -    const sp<IServiceManager> sm(defaultServiceManager()); -    if (sm != NULL) { -        const String16 name("batterystats"); -        sp<IBatteryStats> batteryStats = -                interface_cast<IBatteryStats>(sm->getService(name)); -        if (batteryStats != NULL) { -            batteryStats->noteResetVideo(); -            batteryStats->noteResetAudio(); -        } -    } +    BatteryNotifier& notifier(BatteryNotifier::getInstance()); +    notifier.noteResetVideo(); +    notifier.noteResetAudio();      MediaPlayerFactory::registerBuiltinFactories();  } @@ -306,10 +300,10 @@ MediaPlayerService::~MediaPlayerService()      ALOGV("MediaPlayerService destroyed");  } -sp<IMediaRecorder> MediaPlayerService::createMediaRecorder() +sp<IMediaRecorder> MediaPlayerService::createMediaRecorder(const String16 &opPackageName)  {      pid_t pid = IPCThreadState::self()->getCallingPid(); -    sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid); +    sp<MediaRecorderClient> recorder = new MediaRecorderClient(this, pid, opPackageName);      wp<MediaRecorderClient> w = recorder;      Mutex::Autolock lock(mLock);      mMediaRecorderClients.add(w); @@ -380,12 +374,13 @@ sp<IHDCP> MediaPlayerService::makeHDCP(bool createEncryptionModule) {  }  sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay( +        const String16 &opPackageName,          const sp<IRemoteDisplayClient>& client, const String8& iface) {      if (!checkPermission("android.permission.CONTROL_WIFI_DISPLAY")) {          return NULL;      } -    return new RemoteDisplay(client, iface.string()); +    return new RemoteDisplay(opPackageName, client, iface.string());  }  status_t MediaPlayerService::AudioOutput::dump(int fd, const Vector<String16>& args) const @@ -412,7 +407,7 @@ status_t MediaPlayerService::AudioOutput::dump(int fd, const Vector<String16>& a      return NO_ERROR;  } -status_t MediaPlayerService::Client::dump(int fd, const Vector<String16>& args) const +status_t MediaPlayerService::Client::dump(int fd, const Vector<String16>& args)  {      const size_t SIZE = 256;      char buffer[SIZE]; @@ -441,6 +436,9 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)      const size_t SIZE = 256;      char buffer[SIZE];      String8 result; +    SortedVector< sp<Client> > clients; //to serialise the mutex unlock & client destruction. +    SortedVector< sp<MediaRecorderClient> > mediaRecorderClients; +      if (checkCallingPermission(String16("android.permission.DUMP")) == false) {          snprintf(buffer, SIZE, "Permission Denial: "                  "can't dump MediaPlayerService from pid=%d, uid=%d\n", @@ -452,6 +450,7 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)          for (int i = 0, n = mClients.size(); i < n; ++i) {              sp<Client> c = mClients[i].promote();              if (c != 0) c->dump(fd, args); +            clients.add(c);          }          if (mMediaRecorderClients.size() == 0) {                  result.append(" No media recorder client\n\n"); @@ -464,6 +463,7 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args)                      write(fd, result.string(), result.size());                      result = "\n";                      c->dump(fd, args); +                    mediaRecorderClients.add(c);                  }              }          } @@ -633,7 +633,7 @@ sp<MediaPlayerBase> MediaPlayerService::Client::createPlayer(player_type playerT          p.clear();      }      if (p == NULL) { -        p = MediaPlayerFactory::createPlayer(playerType, this, notify); +        p = MediaPlayerFactory::createPlayer(playerType, this, notify, mPid);      }      if (p != NULL) { @@ -655,6 +655,7 @@ sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(      }      if (!p->hardwareOutput()) { +        Mutex::Autolock l(mLock);          mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),                  mPid, mAudioAttributes);          static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput); @@ -740,7 +741,7 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64          return UNKNOWN_ERROR;      } -    ALOGV("st_dev  = %llu", sb.st_dev); +    ALOGV("st_dev  = %llu", static_cast<uint64_t>(sb.st_dev));      ALOGV("st_mode = %u", sb.st_mode);      ALOGV("st_uid  = %lu", static_cast<unsigned long>(sb.st_uid));      ALOGV("st_gid  = %lu", static_cast<unsigned long>(sb.st_gid)); @@ -784,6 +785,19 @@ status_t MediaPlayerService::Client::setDataSource(      return mStatus;  } +status_t MediaPlayerService::Client::setDataSource( +        const sp<IDataSource> &source) { +    sp<DataSource> dataSource = DataSource::CreateFromIDataSource(source); +    player_type playerType = MediaPlayerFactory::getPlayerType(this, dataSource); +    sp<MediaPlayerBase> p = setDataSource_pre(playerType); +    if (p == NULL) { +        return NO_INIT; +    } +    // now set data source +    setDataSource_post(p, p->setDataSource(dataSource)); +    return mStatus; +} +  void MediaPlayerService::Client::disconnectNativeWindow() {      if (mConnectedWindow != NULL) {          status_t err = native_window_api_disconnect(mConnectedWindow.get(), @@ -961,6 +975,54 @@ status_t MediaPlayerService::Client::isPlaying(bool* state)      return NO_ERROR;  } +status_t MediaPlayerService::Client::setPlaybackSettings(const AudioPlaybackRate& rate) +{ +    ALOGV("[%d] setPlaybackSettings(%f, %f, %d, %d)", +            mConnId, rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode); +    sp<MediaPlayerBase> p = getPlayer(); +    if (p == 0) return UNKNOWN_ERROR; +    return p->setPlaybackSettings(rate); +} + +status_t MediaPlayerService::Client::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) +{ +    sp<MediaPlayerBase> p = getPlayer(); +    if (p == 0) return UNKNOWN_ERROR; +    status_t ret = p->getPlaybackSettings(rate); +    if (ret == NO_ERROR) { +        ALOGV("[%d] getPlaybackSettings(%f, %f, %d, %d)", +                mConnId, rate->mSpeed, rate->mPitch, rate->mFallbackMode, rate->mStretchMode); +    } else { +        ALOGV("[%d] getPlaybackSettings returned %d", mConnId, ret); +    } +    return ret; +} + +status_t MediaPlayerService::Client::setSyncSettings( +        const AVSyncSettings& sync, float videoFpsHint) +{ +    ALOGV("[%d] setSyncSettings(%u, %u, %f, %f)", +            mConnId, sync.mSource, sync.mAudioAdjustMode, sync.mTolerance, videoFpsHint); +    sp<MediaPlayerBase> p = getPlayer(); +    if (p == 0) return UNKNOWN_ERROR; +    return p->setSyncSettings(sync, videoFpsHint); +} + +status_t MediaPlayerService::Client::getSyncSettings( +        AVSyncSettings* sync /* nonnull */, float* videoFps /* nonnull */) +{ +    sp<MediaPlayerBase> p = getPlayer(); +    if (p == 0) return UNKNOWN_ERROR; +    status_t ret = p->getSyncSettings(sync, videoFps); +    if (ret == NO_ERROR) { +        ALOGV("[%d] getSyncSettings(%u, %u, %f, %f)", +                mConnId, sync->mSource, sync->mAudioAdjustMode, sync->mTolerance, *videoFps); +    } else { +        ALOGV("[%d] getSyncSettings returned %d", mConnId, ret); +    } +    return ret; +} +  status_t MediaPlayerService::Client::getCurrentPosition(int *msec)  {      ALOGV("getCurrentPosition"); @@ -1040,6 +1102,9 @@ status_t MediaPlayerService::Client::setAudioAttributes_l(const Parcel &parcel)  {      if (mAudioAttributes != NULL) { free(mAudioAttributes); }      mAudioAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t)); +    if (mAudioAttributes == NULL) { +        return NO_MEMORY; +    }      unmarshallAudioAttributes(parcel, mAudioAttributes);      ALOGV("setAudioAttributes_l() usage=%d content=%d flags=0x%x tags=%s", @@ -1275,29 +1340,42 @@ MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid, int pid,        mCallbackCookie(NULL),        mCallbackData(NULL),        mBytesWritten(0), +      mStreamType(AUDIO_STREAM_MUSIC), +      mLeftVolume(1.0), +      mRightVolume(1.0), +      mPlaybackRate(AUDIO_PLAYBACK_RATE_DEFAULT), +      mSampleRateHz(0), +      mMsecsPerFrame(0), +      mFrameSize(0),        mSessionId(sessionId),        mUid(uid),        mPid(pid), -      mFlags(AUDIO_OUTPUT_FLAG_NONE) { +      mSendLevel(0.0), +      mAuxEffectId(0), +      mFlags(AUDIO_OUTPUT_FLAG_NONE) +{      ALOGV("AudioOutput(%d)", sessionId); -    mStreamType = AUDIO_STREAM_MUSIC; -    mLeftVolume = 1.0; -    mRightVolume = 1.0; -    mPlaybackRatePermille = 1000; -    mSampleRateHz = 0; -    mMsecsPerFrame = 0; -    mAuxEffectId = 0; -    mSendLevel = 0.0; +    if (attr != NULL) { +        mAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t)); +        if (mAttributes != NULL) { +            memcpy(mAttributes, attr, sizeof(audio_attributes_t)); +            mStreamType = audio_attributes_to_stream_type(attr); +        } +    } else { +        mAttributes = NULL; +    } +      setMinBufferCount(); -    mAttributes = attr;  }  MediaPlayerService::AudioOutput::~AudioOutput()  {      close(); +    free(mAttributes);      delete mCallbackData;  } +//static  void MediaPlayerService::AudioOutput::setMinBufferCount()  {      char value[PROPERTY_VALUE_MAX]; @@ -1307,92 +1385,123 @@ void MediaPlayerService::AudioOutput::setMinBufferCount()      }  } +// static  bool MediaPlayerService::AudioOutput::isOnEmulator()  { -    setMinBufferCount(); +    setMinBufferCount(); // benign race wrt other threads      return mIsOnEmulator;  } +// static  int MediaPlayerService::AudioOutput::getMinBufferCount()  { -    setMinBufferCount(); +    setMinBufferCount(); // benign race wrt other threads      return mMinBufferCount;  }  ssize_t MediaPlayerService::AudioOutput::bufferSize() const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT; -    return mTrack->frameCount() * frameSize(); +    return mTrack->frameCount() * mFrameSize;  }  ssize_t MediaPlayerService::AudioOutput::frameCount() const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT;      return mTrack->frameCount();  }  ssize_t MediaPlayerService::AudioOutput::channelCount() const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT;      return mTrack->channelCount();  }  ssize_t MediaPlayerService::AudioOutput::frameSize() const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT; -    return mTrack->frameSize(); +    return mFrameSize;  }  uint32_t MediaPlayerService::AudioOutput::latency () const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return 0;      return mTrack->latency();  }  float MediaPlayerService::AudioOutput::msecsPerFrame() const  { +    Mutex::Autolock lock(mLock);      return mMsecsPerFrame;  }  status_t MediaPlayerService::AudioOutput::getPosition(uint32_t *position) const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT;      return mTrack->getPosition(position);  }  status_t MediaPlayerService::AudioOutput::getTimestamp(AudioTimestamp &ts) const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT;      return mTrack->getTimestamp(ts);  }  status_t MediaPlayerService::AudioOutput::getFramesWritten(uint32_t *frameswritten) const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT; -    *frameswritten = mBytesWritten / frameSize(); +    *frameswritten = mBytesWritten / mFrameSize;      return OK;  }  status_t MediaPlayerService::AudioOutput::setParameters(const String8& keyValuePairs)  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return NO_INIT;      return mTrack->setParameters(keyValuePairs);  }  String8  MediaPlayerService::AudioOutput::getParameters(const String8& keys)  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return String8::empty();      return mTrack->getParameters(keys);  }  void MediaPlayerService::AudioOutput::setAudioAttributes(const audio_attributes_t * attributes) { -    mAttributes = attributes; +    Mutex::Autolock lock(mLock); +    if (attributes == NULL) { +        free(mAttributes); +        mAttributes = NULL; +    } else { +        if (mAttributes == NULL) { +            mAttributes = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t)); +        } +        memcpy(mAttributes, attributes, sizeof(audio_attributes_t)); +        mStreamType = audio_attributes_to_stream_type(attributes); +    }  } -void MediaPlayerService::AudioOutput::deleteRecycledTrack() +void MediaPlayerService::AudioOutput::setAudioStreamType(audio_stream_type_t streamType)  { -    ALOGV("deleteRecycledTrack"); +    Mutex::Autolock lock(mLock); +    // do not allow direct stream type modification if attributes have been set +    if (mAttributes == NULL) { +        mStreamType = streamType; +    } +} +void MediaPlayerService::AudioOutput::deleteRecycledTrack_l() +{ +    ALOGV("deleteRecycledTrack_l");      if (mRecycledTrack != 0) {          if (mCallbackData != NULL) { @@ -1410,33 +1519,28 @@ void MediaPlayerService::AudioOutput::deleteRecycledTrack()          // AudioFlinger to drain the track.          mRecycledTrack.clear(); +        close_l();          delete mCallbackData;          mCallbackData = NULL; -        close();      }  } +void MediaPlayerService::AudioOutput::close_l() +{ +    mTrack.clear(); +} +  status_t MediaPlayerService::AudioOutput::open(          uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,          audio_format_t format, int bufferCount,          AudioCallback cb, void *cookie,          audio_output_flags_t flags, -        const audio_offload_info_t *offloadInfo) +        const audio_offload_info_t *offloadInfo, +        bool doNotReconnect, +        uint32_t suggestedFrameCount)  { -    mCallback = cb; -    mCallbackCookie = cookie; - -    // Check argument "bufferCount" against the mininum buffer count -    if (bufferCount < mMinBufferCount) { -        ALOGD("bufferCount (%d) is too small and increased to %d", bufferCount, mMinBufferCount); -        bufferCount = mMinBufferCount; - -    }      ALOGV("open(%u, %d, 0x%x, 0x%x, %d, %d 0x%x)", sampleRate, channelCount, channelMask,                  format, bufferCount, mSessionId, flags); -    uint32_t afSampleRate; -    size_t afFrameCount; -    size_t frameCount;      // offloading is only supported in callback mode for now.      // offloadInfo must be present if offload flag is set @@ -1445,20 +1549,36 @@ status_t MediaPlayerService::AudioOutput::open(          return BAD_VALUE;      } +    // compute frame count for the AudioTrack internal buffer +    size_t frameCount;      if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {          frameCount = 0; // AudioTrack will get frame count from AudioFlinger      } else { +        // try to estimate the buffer processing fetch size from AudioFlinger. +        // framesPerBuffer is approximate and generally correct, except when it's not :-).          uint32_t afSampleRate;          size_t afFrameCount; -          if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) != NO_ERROR) {              return NO_INIT;          }          if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) != NO_ERROR) {              return NO_INIT;          } +        const size_t framesPerBuffer = +                (unsigned long long)sampleRate * afFrameCount / afSampleRate; -        frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate; +        if (bufferCount == 0) { +            // use suggestedFrameCount +            bufferCount = (suggestedFrameCount + framesPerBuffer - 1) / framesPerBuffer; +        } +        // Check argument bufferCount against the mininum buffer count +        if (bufferCount != 0 && bufferCount < mMinBufferCount) { +            ALOGV("bufferCount (%d) increased to %d", bufferCount, mMinBufferCount); +            bufferCount = mMinBufferCount; +        } +        // if frameCount is 0, then AudioTrack will get frame count from AudioFlinger +        // which will be the minimum size permitted. +        frameCount = bufferCount * framesPerBuffer;      }      if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) { @@ -1469,6 +1589,10 @@ status_t MediaPlayerService::AudioOutput::open(          }      } +    Mutex::Autolock lock(mLock); +    mCallback = cb; +    mCallbackCookie = cookie; +      // Check whether we can recycle the track      bool reuse = false;      bool bothOffloaded = false; @@ -1508,7 +1632,7 @@ status_t MediaPlayerService::AudioOutput::open(      // we must close the previous output before opening a new one      if (bothOffloaded && !reuse) {          ALOGV("both offloaded and not recycling"); -        deleteRecycledTrack(); +        deleteRecycledTrack_l();      }      sp<AudioTrack> t; @@ -1539,7 +1663,8 @@ status_t MediaPlayerService::AudioOutput::open(                      offloadInfo,                      mUid,                      mPid, -                    mAttributes); +                    mAttributes, +                    doNotReconnect);          } else {              t = new AudioTrack(                      mStreamType, @@ -1556,12 +1681,14 @@ status_t MediaPlayerService::AudioOutput::open(                      NULL, // offload info                      mUid,                      mPid, -                    mAttributes); +                    mAttributes, +                    doNotReconnect);          }          if ((t == 0) || (t->initCheck() != NO_ERROR)) {              ALOGE("Unable to create audio track");              delete newcbd; +            // t goes out of scope, so reference count drops to zero              return NO_INIT;          } else {              // successful AudioTrack initialization implies a legacy stream type was generated @@ -1583,7 +1710,7 @@ status_t MediaPlayerService::AudioOutput::open(          if (reuse) {              ALOGV("chaining to next output and recycling track"); -            close(); +            close_l();              mTrack = mRecycledTrack;              mRecycledTrack.clear();              if (mCallbackData != NULL) { @@ -1597,7 +1724,7 @@ status_t MediaPlayerService::AudioOutput::open(      // we're not going to reuse the track, unblock and flush it      // this was done earlier if both tracks are offloaded      if (!bothOffloaded) { -        deleteRecycledTrack(); +        deleteRecycledTrack_l();      }      CHECK((t != NULL) && ((mCallback == NULL) || (newcbd != NULL))); @@ -1607,17 +1734,20 @@ status_t MediaPlayerService::AudioOutput::open(      t->setVolume(mLeftVolume, mRightVolume);      mSampleRateHz = sampleRate; -    mFlags = flags; -    mMsecsPerFrame = mPlaybackRatePermille / (float) sampleRate; +    mFlags = t->getFlags(); // we suggest the flags above, but new AudioTrack() may not grant it. +    mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate); +    mFrameSize = t->frameSize();      uint32_t pos;      if (t->getPosition(&pos) == OK) { -        mBytesWritten = uint64_t(pos) * t->frameSize(); +        mBytesWritten = uint64_t(pos) * mFrameSize;      }      mTrack = t;      status_t res = NO_ERROR; -    if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) { -        res = t->setSampleRate(mPlaybackRatePermille * mSampleRateHz / 1000); +    // Note some output devices may give us a direct track even though we don't specify it. +    // Example: Line application b/17459982. +    if ((mFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) { +        res = t->setPlaybackRate(mPlaybackRate);          if (res == NO_ERROR) {              t->setAuxEffectSendLevel(mSendLevel);              res = t->attachAuxEffect(mAuxEffectId); @@ -1630,6 +1760,7 @@ status_t MediaPlayerService::AudioOutput::open(  status_t MediaPlayerService::AudioOutput::start()  {      ALOGV("start"); +    Mutex::Autolock lock(mLock);      if (mCallbackData != NULL) {          mCallbackData->endTrackSwitch();      } @@ -1642,35 +1773,93 @@ status_t MediaPlayerService::AudioOutput::start()  }  void MediaPlayerService::AudioOutput::setNextOutput(const sp<AudioOutput>& nextOutput) { +    Mutex::Autolock lock(mLock);      mNextOutput = nextOutput;  } -  void MediaPlayerService::AudioOutput::switchToNextOutput() {      ALOGV("switchToNextOutput"); -    if (mNextOutput != NULL) { -        if (mCallbackData != NULL) { -            mCallbackData->beginTrackSwitch(); + +    // Try to acquire the callback lock before moving track (without incurring deadlock). +    const unsigned kMaxSwitchTries = 100; +    Mutex::Autolock lock(mLock); +    for (unsigned tries = 0;;) { +        if (mTrack == 0) { +            return;          } -        delete mNextOutput->mCallbackData; -        mNextOutput->mCallbackData = mCallbackData; -        mCallbackData = NULL; -        mNextOutput->mRecycledTrack = mTrack; -        mTrack.clear(); -        mNextOutput->mSampleRateHz = mSampleRateHz; -        mNextOutput->mMsecsPerFrame = mMsecsPerFrame; -        mNextOutput->mBytesWritten = mBytesWritten; -        mNextOutput->mFlags = mFlags; +        if (mNextOutput != NULL && mNextOutput != this) { +            if (mCallbackData != NULL) { +                // two alternative approaches +#if 1 +                CallbackData *callbackData = mCallbackData; +                mLock.unlock(); +                // proper acquisition sequence +                callbackData->lock(); +                mLock.lock(); +                // Caution: it is unlikely that someone deleted our callback or changed our target +                if (callbackData != mCallbackData || mNextOutput == NULL || mNextOutput == this) { +                    // fatal if we are starved out. +                    LOG_ALWAYS_FATAL_IF(++tries > kMaxSwitchTries, +                            "switchToNextOutput() cannot obtain correct lock sequence"); +                    callbackData->unlock(); +                    continue; +                } +                callbackData->mSwitching = true; // begin track switch +#else +                // tryBeginTrackSwitch() returns false if the callback has the lock. +                if (!mCallbackData->tryBeginTrackSwitch()) { +                    // fatal if we are starved out. +                    LOG_ALWAYS_FATAL_IF(++tries > kMaxSwitchTries, +                            "switchToNextOutput() cannot obtain callback lock"); +                    mLock.unlock(); +                    usleep(5 * 1000 /* usec */); // allow callback to use AudioOutput +                    mLock.lock(); +                    continue; +                } +#endif +            } + +            Mutex::Autolock nextLock(mNextOutput->mLock); + +            // If the next output track is not NULL, then it has been +            // opened already for playback. +            // This is possible even without the next player being started, +            // for example, the next player could be prepared and seeked. +            // +            // Presuming it isn't advisable to force the track over. +             if (mNextOutput->mTrack == NULL) { +                ALOGD("Recycling track for gapless playback"); +                delete mNextOutput->mCallbackData; +                mNextOutput->mCallbackData = mCallbackData; +                mNextOutput->mRecycledTrack = mTrack; +                mNextOutput->mSampleRateHz = mSampleRateHz; +                mNextOutput->mMsecsPerFrame = mMsecsPerFrame; +                mNextOutput->mBytesWritten = mBytesWritten; +                mNextOutput->mFlags = mFlags; +                mNextOutput->mFrameSize = mFrameSize; +                close_l(); +                mCallbackData = NULL;  // destruction handled by mNextOutput +            } else { +                ALOGW("Ignoring gapless playback because next player has already started"); +                // remove track in case resource needed for future players. +                if (mCallbackData != NULL) { +                    mCallbackData->endTrackSwitch();  // release lock for callbacks before close. +                } +                close_l(); +            } +        } +        break;      }  } -ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size) +ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size, bool blocking)  { +    Mutex::Autolock lock(mLock);      LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");      //ALOGV("write(%p, %u)", buffer, size);      if (mTrack != 0) { -        ssize_t ret = mTrack->write(buffer, size); +        ssize_t ret = mTrack->write(buffer, size, blocking);          if (ret >= 0) {              mBytesWritten += ret;          } @@ -1682,30 +1871,37 @@ ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)  void MediaPlayerService::AudioOutput::stop()  {      ALOGV("stop"); +    Mutex::Autolock lock(mLock); +    mBytesWritten = 0;      if (mTrack != 0) mTrack->stop();  }  void MediaPlayerService::AudioOutput::flush()  {      ALOGV("flush"); +    Mutex::Autolock lock(mLock); +    mBytesWritten = 0;      if (mTrack != 0) mTrack->flush();  }  void MediaPlayerService::AudioOutput::pause()  {      ALOGV("pause"); +    Mutex::Autolock lock(mLock);      if (mTrack != 0) mTrack->pause();  }  void MediaPlayerService::AudioOutput::close()  {      ALOGV("close"); -    mTrack.clear(); +    Mutex::Autolock lock(mLock); +    close_l();  }  void MediaPlayerService::AudioOutput::setVolume(float left, float right)  {      ALOGV("setVolume(%f, %f)", left, right); +    Mutex::Autolock lock(mLock);      mLeftVolume = left;      mRightVolume = right;      if (mTrack != 0) { @@ -1713,25 +1909,44 @@ void MediaPlayerService::AudioOutput::setVolume(float left, float right)      }  } -status_t MediaPlayerService::AudioOutput::setPlaybackRatePermille(int32_t ratePermille) +status_t MediaPlayerService::AudioOutput::setPlaybackRate(const AudioPlaybackRate &rate)  { -    ALOGV("setPlaybackRatePermille(%d)", ratePermille); -    status_t res = NO_ERROR; -    if (mTrack != 0) { -        res = mTrack->setSampleRate(ratePermille * mSampleRateHz / 1000); -    } else { -        res = NO_INIT; +    ALOGV("setPlaybackRate(%f %f %d %d)", +                rate.mSpeed, rate.mPitch, rate.mFallbackMode, rate.mStretchMode); +    Mutex::Autolock lock(mLock); +    if (mTrack == 0) { +        // remember rate so that we can set it when the track is opened +        mPlaybackRate = rate; +        return OK;      } -    mPlaybackRatePermille = ratePermille; +    status_t res = mTrack->setPlaybackRate(rate); +    if (res != NO_ERROR) { +        return res; +    } +    // rate.mSpeed is always greater than 0 if setPlaybackRate succeeded +    CHECK_GT(rate.mSpeed, 0.f); +    mPlaybackRate = rate;      if (mSampleRateHz != 0) { -        mMsecsPerFrame = mPlaybackRatePermille / (float) mSampleRateHz; +        mMsecsPerFrame = 1E3f / (rate.mSpeed * mSampleRateHz);      }      return res;  } +status_t MediaPlayerService::AudioOutput::getPlaybackRate(AudioPlaybackRate *rate) +{ +    ALOGV("setPlaybackRate"); +    Mutex::Autolock lock(mLock); +    if (mTrack == 0) { +        return NO_INIT; +    } +    *rate = mTrack->getPlaybackRate(); +    return NO_ERROR; +} +  status_t MediaPlayerService::AudioOutput::setAuxEffectSendLevel(float level)  {      ALOGV("setAuxEffectSendLevel(%f)", level); +    Mutex::Autolock lock(mLock);      mSendLevel = level;      if (mTrack != 0) {          return mTrack->setAuxEffectSendLevel(level); @@ -1742,6 +1957,7 @@ status_t MediaPlayerService::AudioOutput::setAuxEffectSendLevel(float level)  status_t MediaPlayerService::AudioOutput::attachAuxEffect(int effectId)  {      ALOGV("attachAuxEffect(%d)", effectId); +    Mutex::Autolock lock(mLock);      mAuxEffectId = effectId;      if (mTrack != 0) {          return mTrack->attachAuxEffect(effectId); @@ -1754,6 +1970,7 @@ void MediaPlayerService::AudioOutput::CallbackWrapper(          int event, void *cookie, void *info) {      //ALOGV("callbackwrapper");      CallbackData *data = (CallbackData*)cookie; +    // lock to ensure we aren't caught in the middle of a track switch.      data->lock();      AudioOutput *me = data->getOutput();      AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info; @@ -1773,20 +1990,23 @@ void MediaPlayerService::AudioOutput::CallbackWrapper(                  me, buffer->raw, buffer->size, me->mCallbackCookie,                  CB_EVENT_FILL_BUFFER); -        if ((me->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0 && -            actualSize == 0 && buffer->size > 0 && me->mNextOutput == NULL) { -            // We've reached EOS but the audio track is not stopped yet, -            // keep playing silence. +        // Log when no data is returned from the callback. +        // (1) We may have no data (especially with network streaming sources). +        // (2) We may have reached the EOS and the audio track is not stopped yet. +        // Note that AwesomePlayer/AudioPlayer will only return zero size when it reaches the EOS. +        // NuPlayerRenderer will return zero when it doesn't have data (it doesn't block to fill). +        // +        // This is a benign busy-wait, with the next data request generated 10 ms or more later; +        // nevertheless for power reasons, we don't want to see too many of these. -            memset(buffer->raw, 0, buffer->size); -            actualSize = buffer->size; -        } +        ALOGV_IF(actualSize == 0 && buffer->size > 0, "callbackwrapper: empty buffer returned"); +        me->mBytesWritten += actualSize;  // benign race with reader.          buffer->size = actualSize;          } break; -      case AudioTrack::EVENT_STREAM_END: +        // currently only occurs for offloaded callbacks          ALOGV("callbackwrapper: deliver EVENT_STREAM_END");          (*me->mCallback)(me, NULL /* buffer */, 0 /* size */,                  me->mCallbackCookie, CB_EVENT_STREAM_END); @@ -1798,6 +2018,19 @@ void MediaPlayerService::AudioOutput::CallbackWrapper(                  me->mCallbackCookie, CB_EVENT_TEAR_DOWN);          break; +    case AudioTrack::EVENT_UNDERRUN: +        // This occurs when there is no data available, typically +        // when there is a failure to supply data to the AudioTrack.  It can also +        // occur in non-offloaded mode when the audio device comes out of standby. +        // +        // If an AudioTrack underruns it outputs silence. Since this happens suddenly +        // it may sound like an audible pop or glitch. +        // +        // The underrun event is sent once per track underrun; the condition is reset +        // when more data is sent to the AudioTrack. +        ALOGI("callbackwrapper: EVENT_UNDERRUN (discarded)"); +        break; +      default:          ALOGE("received unknown event type: %d inside CallbackWrapper !", event);      } @@ -1807,11 +2040,13 @@ void MediaPlayerService::AudioOutput::CallbackWrapper(  int MediaPlayerService::AudioOutput::getSessionId() const  { +    Mutex::Autolock lock(mLock);      return mSessionId;  }  uint32_t MediaPlayerService::AudioOutput::getSampleRate() const  { +    Mutex::Autolock lock(mLock);      if (mTrack == 0) return 0;      return mTrack->getSampleRate();  } diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index fad3447..60d4617 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -34,7 +34,10 @@  namespace android { +struct AudioPlaybackRate;  class AudioTrack; +struct AVSyncSettings; +class IDataSource;  class IMediaRecorder;  class IMediaMetadataRetriever;  class IOMX; @@ -94,21 +97,24 @@ class MediaPlayerService : public BnMediaPlayerService                  audio_format_t format, int bufferCount,                  AudioCallback cb, void *cookie,                  audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, -                const audio_offload_info_t *offloadInfo = NULL); +                const audio_offload_info_t *offloadInfo = NULL, +                bool doNotReconnect = false, +                uint32_t suggestedFrameCount = 0);          virtual status_t        start(); -        virtual ssize_t         write(const void* buffer, size_t size); +        virtual ssize_t         write(const void* buffer, size_t size, bool blocking = true);          virtual void            stop();          virtual void            flush();          virtual void            pause();          virtual void            close(); -                void            setAudioStreamType(audio_stream_type_t streamType) { -                                                                        mStreamType = streamType; } +                void            setAudioStreamType(audio_stream_type_t streamType);          virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }                  void            setAudioAttributes(const audio_attributes_t * attributes);                  void            setVolume(float left, float right); -        virtual status_t        setPlaybackRatePermille(int32_t ratePermille); +        virtual status_t        setPlaybackRate(const AudioPlaybackRate& rate); +        virtual status_t        getPlaybackRate(AudioPlaybackRate* rate /* nonnull */); +                  status_t        setAuxEffectSendLevel(float level);                  status_t        attachAuxEffect(int effectId);          virtual status_t        dump(int fd, const Vector<String16>& args) const; @@ -125,7 +131,8 @@ class MediaPlayerService : public BnMediaPlayerService          static void             setMinBufferCount();          static void             CallbackWrapper(                  int event, void *me, void *info); -               void             deleteRecycledTrack(); +               void             deleteRecycledTrack_l(); +               void             close_l();          sp<AudioTrack>          mTrack;          sp<AudioTrack>          mRecycledTrack; @@ -135,38 +142,53 @@ class MediaPlayerService : public BnMediaPlayerService          CallbackData *          mCallbackData;          uint64_t                mBytesWritten;          audio_stream_type_t     mStreamType; -        const audio_attributes_t *mAttributes; +        audio_attributes_t *    mAttributes;          float                   mLeftVolume;          float                   mRightVolume; -        int32_t                 mPlaybackRatePermille; +        AudioPlaybackRate       mPlaybackRate;          uint32_t                mSampleRateHz; // sample rate of the content, as set in open()          float                   mMsecsPerFrame; +        size_t                  mFrameSize;          int                     mSessionId;          int                     mUid;          int                     mPid;          float                   mSendLevel;          int                     mAuxEffectId; +        audio_output_flags_t    mFlags; +        mutable Mutex           mLock; + +        // static variables below not protected by mutex          static bool             mIsOnEmulator;          static int              mMinBufferCount;  // 12 for emulator; otherwise 4 -        audio_output_flags_t    mFlags;          // CallbackData is what is passed to the AudioTrack as the "user" data.          // We need to be able to target this to a different Output on the fly,          // so we can't use the Output itself for this.          class CallbackData { +            friend AudioOutput;          public:              CallbackData(AudioOutput *cookie) {                  mData = cookie;                  mSwitching = false;              } -            AudioOutput *   getOutput() { return mData;} +            AudioOutput *   getOutput() const { return mData; }              void            setOutput(AudioOutput* newcookie) { mData = newcookie; }              // lock/unlock are used by the callback before accessing the payload of this object -            void            lock() { mLock.lock(); } -            void            unlock() { mLock.unlock(); } -            // beginTrackSwitch/endTrackSwitch are used when this object is being handed over +            void            lock() const { mLock.lock(); } +            void            unlock() const { mLock.unlock(); } + +            // tryBeginTrackSwitch/endTrackSwitch are used when the CallbackData is handed over              // to the next sink. -            void            beginTrackSwitch() { mLock.lock(); mSwitching = true; } + +            // tryBeginTrackSwitch() returns true only if it obtains the lock. +            bool            tryBeginTrackSwitch() { +                LOG_ALWAYS_FATAL_IF(mSwitching, "tryBeginTrackSwitch() already called"); +                if (mLock.tryLock() != OK) { +                    return false; +                } +                mSwitching = true; +                return true; +            }              void            endTrackSwitch() {                  if (mSwitching) {                      mLock.unlock(); @@ -175,7 +197,7 @@ class MediaPlayerService : public BnMediaPlayerService              }          private:              AudioOutput *   mData; -            mutable Mutex   mLock; +            mutable Mutex   mLock; // a recursive mutex might make this unnecessary.              bool            mSwitching;              DISALLOW_EVIL_CONSTRUCTORS(CallbackData);          }; @@ -187,7 +209,7 @@ public:      static  void                instantiate();      // IMediaPlayerService interface -    virtual sp<IMediaRecorder>  createMediaRecorder(); +    virtual sp<IMediaRecorder>  createMediaRecorder(const String16 &opPackageName);      void    removeMediaRecorderClient(wp<MediaRecorderClient> client);      virtual sp<IMediaMetadataRetriever> createMetadataRetriever(); @@ -199,8 +221,8 @@ public:      virtual sp<IDrm>            makeDrm();      virtual sp<IHDCP>           makeHDCP(bool createEncryptionModule); -    virtual sp<IRemoteDisplay> listenForRemoteDisplay(const sp<IRemoteDisplayClient>& client, -            const String8& iface); +    virtual sp<IRemoteDisplay> listenForRemoteDisplay(const String16 &opPackageName, +            const sp<IRemoteDisplayClient>& client, const String8& iface);      virtual status_t            dump(int fd, const Vector<String16>& args);              void                removeClient(wp<Client> client); @@ -261,6 +283,11 @@ private:          virtual status_t        stop();          virtual status_t        pause();          virtual status_t        isPlaying(bool* state); +        virtual status_t        setPlaybackSettings(const AudioPlaybackRate& rate); +        virtual status_t        getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */); +        virtual status_t        setSyncSettings(const AVSyncSettings& rate, float videoFpsHint); +        virtual status_t        getSyncSettings(AVSyncSettings* rate /* nonnull */, +                                                float* videoFps /* nonnull */);          virtual status_t        seekTo(int msec);          virtual status_t        getCurrentPosition(int* msec);          virtual status_t        getDuration(int* msec); @@ -291,6 +318,8 @@ private:          virtual status_t        setDataSource(int fd, int64_t offset, int64_t length);          virtual status_t        setDataSource(const sp<IStreamSource> &source); +        virtual status_t        setDataSource(const sp<IDataSource> &source); +          sp<MediaPlayerBase>     setDataSource_pre(player_type playerType);          void                    setDataSource_post(const sp<MediaPlayerBase>& p, @@ -300,7 +329,7 @@ private:                                         int ext1, int ext2, const Parcel *obj);                  pid_t           pid() const { return mPid; } -        virtual status_t        dump(int fd, const Vector<String16>& args) const; +        virtual status_t        dump(int fd, const Vector<String16>& args);                  int             getAudioSessionId() { return mAudioSessionId; } diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index 194abbb..f761dec 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -55,6 +55,16 @@ static bool checkPermission(const char* permissionString) {      return ok;  } +status_t MediaRecorderClient::setInputSurface(const sp<IGraphicBufferConsumer>& surface) +{ +    ALOGV("setInputSurface"); +    Mutex::Autolock lock(mLock); +    if (mRecorder == NULL) { +        ALOGE("recorder is not initialized"); +        return NO_INIT; +    } +    return mRecorder->setInputSurface(surface); +}  sp<IGraphicBufferProducer> MediaRecorderClient::querySurfaceMediaSource()  { @@ -154,17 +164,6 @@ status_t MediaRecorderClient::setAudioEncoder(int ae)      return mRecorder->setAudioEncoder((audio_encoder)ae);  } -status_t MediaRecorderClient::setOutputFile(const char* path) -{ -    ALOGV("setOutputFile(%s)", path); -    Mutex::Autolock lock(mLock); -    if (mRecorder == NULL) { -        ALOGE("recorder is not initialized"); -        return NO_INIT; -    } -    return mRecorder->setOutputFile(path); -} -  status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t length)  {      ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length); @@ -301,11 +300,12 @@ status_t MediaRecorderClient::release()      return NO_ERROR;  } -MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid) +MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service, pid_t pid, +        const String16& opPackageName)  {      ALOGV("Client constructor");      mPid = pid; -    mRecorder = new StagefrightRecorder; +    mRecorder = new StagefrightRecorder(opPackageName);      mMediaPlayerService = service;  } @@ -336,7 +336,7 @@ status_t MediaRecorderClient::setClientName(const String16& clientName) {      return mRecorder->setClientName(clientName);  } -status_t MediaRecorderClient::dump(int fd, const Vector<String16>& args) const { +status_t MediaRecorderClient::dump(int fd, const Vector<String16>& args) {      if (mRecorder != NULL) {          return mRecorder->dump(fd, args);      } diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index a65ec9f..05130d4 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -38,7 +38,6 @@ public:      virtual     status_t   setOutputFormat(int of);      virtual     status_t   setVideoEncoder(int ve);      virtual     status_t   setAudioEncoder(int ae); -    virtual     status_t   setOutputFile(const char* path);      virtual     status_t   setOutputFile(int fd, int64_t offset,                                                    int64_t length);      virtual     status_t   setVideoSize(int width, int height); @@ -55,7 +54,8 @@ public:      virtual     status_t   init();      virtual     status_t   close();      virtual     status_t   release(); -    virtual     status_t   dump(int fd, const Vector<String16>& args) const; +    virtual     status_t   dump(int fd, const Vector<String16>& args); +    virtual     status_t   setInputSurface(const sp<IGraphicBufferConsumer>& surface);      virtual     sp<IGraphicBufferProducer> querySurfaceMediaSource();  private: @@ -63,7 +63,8 @@ private:                             MediaRecorderClient(                                     const sp<MediaPlayerService>& service, -                                                               pid_t pid); +                                                               pid_t pid, +                                                               const String16& opPackageName);      virtual                ~MediaRecorderClient();      pid_t                  mPid; diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp index 715cc0c..a5a1fa5 100644 --- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp +++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp @@ -34,6 +34,7 @@  #include <media/IMediaHTTPService.h>  #include <media/MediaMetadataRetrieverInterface.h>  #include <media/MediaPlayerInterface.h> +#include <media/stagefright/DataSource.h>  #include <private/media/VideoFrame.h>  #include "MetadataRetrieverClient.h"  #include "StagefrightMetadataRetriever.h" @@ -56,7 +57,7 @@ MetadataRetrieverClient::~MetadataRetrieverClient()      disconnect();  } -status_t MetadataRetrieverClient::dump(int fd, const Vector<String16>& /*args*/) const +status_t MetadataRetrieverClient::dump(int fd, const Vector<String16>& /*args*/)  {      const size_t SIZE = 256;      char buffer[SIZE]; @@ -140,7 +141,7 @@ status_t MetadataRetrieverClient::setDataSource(int fd, int64_t offset, int64_t          ALOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno));          return BAD_VALUE;      } -    ALOGV("st_dev  = %llu", sb.st_dev); +    ALOGV("st_dev  = %llu", static_cast<uint64_t>(sb.st_dev));      ALOGV("st_mode = %u", sb.st_mode);      ALOGV("st_uid  = %lu", static_cast<unsigned long>(sb.st_uid));      ALOGV("st_gid  = %lu", static_cast<unsigned long>(sb.st_gid)); @@ -173,10 +174,30 @@ status_t MetadataRetrieverClient::setDataSource(int fd, int64_t offset, int64_t      return status;  } +status_t MetadataRetrieverClient::setDataSource( +        const sp<IDataSource>& source) +{ +    ALOGV("setDataSource(IDataSource)"); +    Mutex::Autolock lock(mLock); + +    sp<DataSource> dataSource = DataSource::CreateFromIDataSource(source); +    player_type playerType = +        MediaPlayerFactory::getPlayerType(NULL /* client */, dataSource); +    ALOGV("player type = %d", playerType); +    sp<MediaMetadataRetrieverBase> p = createRetriever(playerType); +    if (p == NULL) return NO_INIT; +    status_t ret = p->setDataSource(dataSource); +    if (ret == NO_ERROR) mRetriever = p; +    return ret; +} + +Mutex MetadataRetrieverClient::sLock; +  sp<IMemory> MetadataRetrieverClient::getFrameAtTime(int64_t timeUs, int option)  {      ALOGV("getFrameAtTime: time(%lld us) option(%d)", timeUs, option);      Mutex::Autolock lock(mLock); +    Mutex::Autolock glock(sLock);      mThumbnail.clear();      if (mRetriever == NULL) {          ALOGE("retriever is not initialized"); diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h index 9d3fbe9..fe7547c 100644 --- a/media/libmediaplayerservice/MetadataRetrieverClient.h +++ b/media/libmediaplayerservice/MetadataRetrieverClient.h @@ -49,11 +49,12 @@ public:              const KeyedVector<String8, String8> *headers);      virtual status_t                setDataSource(int fd, int64_t offset, int64_t length); +    virtual status_t                setDataSource(const sp<IDataSource>& source);      virtual sp<IMemory>             getFrameAtTime(int64_t timeUs, int option);      virtual sp<IMemory>             extractAlbumArt();      virtual const char*             extractMetadata(int keyCode); -    virtual status_t                dump(int fd, const Vector<String16>& args) const; +    virtual status_t                dump(int fd, const Vector<String16>& args);  private:      friend class MediaPlayerService; @@ -62,6 +63,7 @@ private:      virtual ~MetadataRetrieverClient();      mutable Mutex                          mLock; +    static  Mutex                          sLock;      sp<MediaMetadataRetrieverBase>         mRetriever;      pid_t                                  mPid; diff --git a/media/libmediaplayerservice/RemoteDisplay.cpp b/media/libmediaplayerservice/RemoteDisplay.cpp index eb959b4..0eb4b5d 100644 --- a/media/libmediaplayerservice/RemoteDisplay.cpp +++ b/media/libmediaplayerservice/RemoteDisplay.cpp @@ -26,13 +26,14 @@  namespace android {  RemoteDisplay::RemoteDisplay( +        const String16 &opPackageName,          const sp<IRemoteDisplayClient> &client,          const char *iface)      : mLooper(new ALooper),        mNetSession(new ANetworkSession) {      mLooper->setName("wfd_looper"); -    mSource = new WifiDisplaySource(mNetSession, client); +    mSource = new WifiDisplaySource(opPackageName, mNetSession, client);      mLooper->registerHandler(mSource);      mNetSession->start(); diff --git a/media/libmediaplayerservice/RemoteDisplay.h b/media/libmediaplayerservice/RemoteDisplay.h index 82a0116..d4573e9 100644 --- a/media/libmediaplayerservice/RemoteDisplay.h +++ b/media/libmediaplayerservice/RemoteDisplay.h @@ -28,11 +28,12 @@ namespace android {  struct ALooper;  struct ANetworkSession; -struct IRemoteDisplayClient; +class IRemoteDisplayClient;  struct WifiDisplaySource;  struct RemoteDisplay : public BnRemoteDisplay {      RemoteDisplay( +            const String16 &opPackageName,              const sp<IRemoteDisplayClient> &client,              const char *iface); diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp index b37aee3..3fedd9b 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.cpp +++ b/media/libmediaplayerservice/StagefrightPlayer.cpp @@ -188,6 +188,14 @@ status_t StagefrightPlayer::getParameter(int key, Parcel *reply) {      return mPlayer->getParameter(key, reply);  } +status_t StagefrightPlayer::setPlaybackSettings(const AudioPlaybackRate &rate) { +    return mPlayer->setPlaybackSettings(rate); +} + +status_t StagefrightPlayer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) { +    return mPlayer->getPlaybackSettings(rate); +} +  status_t StagefrightPlayer::getMetadata(          const media::Metadata::Filter& /* ids */, Parcel *records) {      using media::Metadata; diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h index e6c30ff..96013df 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.h +++ b/media/libmediaplayerservice/StagefrightPlayer.h @@ -60,6 +60,8 @@ public:      virtual void setAudioSink(const sp<AudioSink> &audioSink);      virtual status_t setParameter(int key, const Parcel &request);      virtual status_t getParameter(int key, Parcel *reply); +    virtual status_t setPlaybackSettings(const AudioPlaybackRate &rate); +    virtual status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);      virtual status_t getMetadata(              const media::Metadata::Filter& ids, Parcel *records); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 86639cb..e521fae 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -69,12 +69,12 @@ static void addBatteryData(uint32_t params) {  } -StagefrightRecorder::StagefrightRecorder() -    : mWriter(NULL), +StagefrightRecorder::StagefrightRecorder(const String16 &opPackageName) +    : MediaRecorderBase(opPackageName), +      mWriter(NULL),        mOutputFd(-1),        mAudioSource(AUDIO_SOURCE_CNT),        mVideoSource(VIDEO_SOURCE_LIST_END), -      mCaptureTimeLapse(false),        mStarted(false) {      ALOGV("Constructor"); @@ -206,7 +206,7 @@ status_t StagefrightRecorder::setVideoSize(int width, int height) {  status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {      ALOGV("setVideoFrameRate: %d", frames_per_second);      if ((frames_per_second <= 0 && frames_per_second != -1) || -        frames_per_second > 120) { +        frames_per_second > kMaxHighSpeedFps) {          ALOGE("Invalid video frame rate: %d", frames_per_second);          return BAD_VALUE;      } @@ -241,12 +241,11 @@ status_t StagefrightRecorder::setPreviewSurface(const sp<IGraphicBufferProducer>      return OK;  } -status_t StagefrightRecorder::setOutputFile(const char * /* path */) { -    ALOGE("setOutputFile(const char*) must not be called"); -    // We don't actually support this at all, as the media_server process -    // no longer has permissions to create files. +status_t StagefrightRecorder::setInputSurface( +        const sp<IGraphicBufferConsumer>& surface) { +    mPersistentSurface = surface; -    return -EPERM; +    return OK;  }  status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) { @@ -271,6 +270,31 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng      return OK;  } +// Attempt to parse an float literal optionally surrounded by whitespace, +// returns true on success, false otherwise. +static bool safe_strtof(const char *s, float *val) { +    char *end; + +    // It is lame, but according to man page, we have to set errno to 0 +    // before calling strtof(). +    errno = 0; +    *val = strtof(s, &end); + +    if (end == s || errno == ERANGE) { +        return false; +    } + +    // Skip trailing whitespace +    while (isspace(*end)) { +        ++end; +    } + +    // For a successful return, the string must contain nothing but a valid +    // float literal optionally surrounded by whitespace. + +    return *end == '\0'; +} +  // Attempt to parse an int64 literal optionally surrounded by whitespace,  // returns true on success, false otherwise.  static bool safe_strtoi64(const char *s, int64_t *val) { @@ -541,29 +565,32 @@ status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) {      return OK;  } -status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) { -    ALOGV("setParamTimeLapseEnable: %d", timeLapseEnable); +status_t StagefrightRecorder::setParamCaptureFpsEnable(int32_t captureFpsEnable) { +    ALOGV("setParamCaptureFpsEnable: %d", captureFpsEnable); -    if(timeLapseEnable == 0) { -        mCaptureTimeLapse = false; -    } else if (timeLapseEnable == 1) { -        mCaptureTimeLapse = true; +    if(captureFpsEnable == 0) { +        mCaptureFpsEnable = false; +    } else if (captureFpsEnable == 1) { +        mCaptureFpsEnable = true;      } else {          return BAD_VALUE;      }      return OK;  } -status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) { -    ALOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs); +status_t StagefrightRecorder::setParamCaptureFps(float fps) { +    ALOGV("setParamCaptureFps: %.2f", fps); + +    int64_t timeUs = (int64_t) (1000000.0 / fps + 0.5f);      // Not allowing time more than a day      if (timeUs <= 0 || timeUs > 86400*1E6) { -        ALOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs); +        ALOGE("Time between frame capture (%lld) is out of range [0, 1 Day]", timeUs);          return BAD_VALUE;      } -    mTimeBetweenTimeLapseFrameCaptureUs = timeUs; +    mCaptureFps = fps; +    mTimeBetweenCaptureUs = timeUs;      return OK;  } @@ -686,15 +713,14 @@ status_t StagefrightRecorder::setParameter(              return setParamVideoTimeScale(timeScale);          }      } else if (key == "time-lapse-enable") { -        int32_t timeLapseEnable; -        if (safe_strtoi32(value.string(), &timeLapseEnable)) { -            return setParamTimeLapseEnable(timeLapseEnable); +        int32_t captureFpsEnable; +        if (safe_strtoi32(value.string(), &captureFpsEnable)) { +            return setParamCaptureFpsEnable(captureFpsEnable);          } -    } else if (key == "time-between-time-lapse-frame-capture") { -        int64_t timeBetweenTimeLapseFrameCaptureUs; -        if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureUs)) { -            return setParamTimeBetweenTimeLapseFrameCapture( -                    timeBetweenTimeLapseFrameCaptureUs); +    } else if (key == "time-lapse-fps") { +        float fps; +        if (safe_strtof(value.string(), &fps)) { +            return setParamCaptureFps(fps);          }      } else {          ALOGE("setParameter: failed to find key %s", key.string()); @@ -882,11 +908,32 @@ status_t StagefrightRecorder::start() {  }  sp<MediaSource> StagefrightRecorder::createAudioSource() { +    int32_t sourceSampleRate = mSampleRate; + +    if (mCaptureFpsEnable && mCaptureFps >= mFrameRate) { +        // Upscale the sample rate for slow motion recording. +        // Fail audio source creation if source sample rate is too high, as it could +        // cause out-of-memory due to large input buffer size. And audio recording +        // probably doesn't make sense in the scenario, since the slow-down factor +        // is probably huge (eg. mSampleRate=48K, mCaptureFps=240, mFrameRate=1). +        const static int32_t SAMPLE_RATE_HZ_MAX = 192000; +        sourceSampleRate = +                (mSampleRate * mCaptureFps + mFrameRate / 2) / mFrameRate; +        if (sourceSampleRate < mSampleRate || sourceSampleRate > SAMPLE_RATE_HZ_MAX) { +            ALOGE("source sample rate out of range! " +                    "(mSampleRate %d, mCaptureFps %.2f, mFrameRate %d", +                    mSampleRate, mCaptureFps, mFrameRate); +            return NULL; +        } +    } +      sp<AudioSource> audioSource =          new AudioSource(                  mAudioSource, -                mSampleRate, -                mAudioChannels); +                mOpPackageName, +                sourceSampleRate, +                mAudioChannels, +                mSampleRate);      status_t err = audioSource->initCheck(); @@ -896,7 +943,6 @@ sp<MediaSource> StagefrightRecorder::createAudioSource() {      }      sp<AMessage> format = new AMessage; -    const char *mime;      switch (mAudioEncoder) {          case AUDIO_ENCODER_AMR_NB:          case AUDIO_ENCODER_DEFAULT: @@ -934,6 +980,7 @@ sp<MediaSource> StagefrightRecorder::createAudioSource() {      if (mAudioTimeScale > 0) {          format->setInt32("time-scale", mAudioTimeScale);      } +    format->setInt32("priority", 0 /* realtime */);      sp<MediaSource> audioEncoder =              MediaCodecSource::Create(mLooper, format, audioSource); @@ -1165,8 +1212,7 @@ void StagefrightRecorder::clipVideoFrameWidth() {      }  } -status_t StagefrightRecorder::checkVideoEncoderCapabilities( -        bool *supportsCameraSourceMetaDataMode) { +status_t StagefrightRecorder::checkVideoEncoderCapabilities() {      /* hardware codecs must support camera source meta data mode */      Vector<CodecCapabilities> codecs;      OMXClient client; @@ -1178,11 +1224,8 @@ status_t StagefrightRecorder::checkVideoEncoderCapabilities(               mVideoEncoder == VIDEO_ENCODER_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 :               mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),              false /* decoder */, true /* hwCodec */, &codecs); -    *supportsCameraSourceMetaDataMode = codecs.size() > 0; -    ALOGV("encoder %s camera source meta-data mode", -            *supportsCameraSourceMetaDataMode ? "supports" : "DOES NOT SUPPORT"); -    if (!mCaptureTimeLapse) { +    if (!mCaptureFpsEnable) {          // Dont clip for time lapse capture as encoder will have enough          // time to encode because of slow capture rate of time lapse.          clipVideoBitRate(); @@ -1389,32 +1432,29 @@ status_t StagefrightRecorder::setupMediaSource(  status_t StagefrightRecorder::setupCameraSource(          sp<CameraSource> *cameraSource) {      status_t err = OK; -    bool encoderSupportsCameraSourceMetaDataMode; -    if ((err = checkVideoEncoderCapabilities( -                &encoderSupportsCameraSourceMetaDataMode)) != OK) { +    if ((err = checkVideoEncoderCapabilities()) != OK) {          return err;      }      Size videoSize;      videoSize.width = mVideoWidth;      videoSize.height = mVideoHeight; -    if (mCaptureTimeLapse) { -        if (mTimeBetweenTimeLapseFrameCaptureUs < 0) { +    if (mCaptureFpsEnable) { +        if (mTimeBetweenCaptureUs < 0) {              ALOGE("Invalid mTimeBetweenTimeLapseFrameCaptureUs value: %lld", -                mTimeBetweenTimeLapseFrameCaptureUs); +                mTimeBetweenCaptureUs);              return BAD_VALUE;          }          mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(                  mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,                  videoSize, mFrameRate, mPreviewSurface, -                mTimeBetweenTimeLapseFrameCaptureUs, -                encoderSupportsCameraSourceMetaDataMode); +                mTimeBetweenCaptureUs);          *cameraSource = mCameraSourceTimeLapse;      } else {          *cameraSource = CameraSource::CreateFromCamera(                  mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,                  videoSize, mFrameRate, -                mPreviewSurface, encoderSupportsCameraSourceMetaDataMode); +                mPreviewSurface);      }      mCamera.clear();      mCameraProxy.clear(); @@ -1499,14 +1539,13 @@ status_t StagefrightRecorder::setupVideoEncoder(          format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);          // set up time lapse/slow motion for surface source -        if (mCaptureTimeLapse) { -            if (mTimeBetweenTimeLapseFrameCaptureUs <= 0) { -                ALOGE("Invalid mTimeBetweenTimeLapseFrameCaptureUs value: %lld", -                    mTimeBetweenTimeLapseFrameCaptureUs); +        if (mCaptureFpsEnable) { +            if (mTimeBetweenCaptureUs <= 0) { +                ALOGE("Invalid mTimeBetweenCaptureUs value: %lld", +                        mTimeBetweenCaptureUs);                  return BAD_VALUE;              } -            format->setInt64("time-lapse", -                    mTimeBetweenTimeLapseFrameCaptureUs); +            format->setInt64("time-lapse", mTimeBetweenCaptureUs);          }      } @@ -1524,6 +1563,11 @@ status_t StagefrightRecorder::setupVideoEncoder(          format->setInt32("level", mVideoEncoderLevel);      } +    format->setInt32("priority", 0 /* realtime */); +    if (mCaptureFpsEnable) { +        format->setFloat("operating-rate", mCaptureFps); +    } +      uint32_t flags = 0;      if (mIsMetaDataStoredInVideoBuffers) {          flags |= MediaCodecSource::FLAG_USE_METADATA_INPUT; @@ -1533,8 +1577,8 @@ status_t StagefrightRecorder::setupVideoEncoder(          flags |= MediaCodecSource::FLAG_USE_SURFACE_INPUT;      } -    sp<MediaCodecSource> encoder = -            MediaCodecSource::Create(mLooper, format, cameraSource, flags); +    sp<MediaCodecSource> encoder = MediaCodecSource::Create( +            mLooper, format, cameraSource, mPersistentSurface, flags);      if (encoder == NULL) {          ALOGE("Failed to create video encoder");          // When the encoder fails to be created, we need @@ -1589,10 +1633,11 @@ status_t StagefrightRecorder::setupMPEG4orWEBMRecording() {      status_t err = OK;      sp<MediaWriter> writer; +    sp<MPEG4Writer> mp4writer;      if (mOutputFormat == OUTPUT_FORMAT_WEBM) {          writer = new WebmWriter(mOutputFd);      } else { -        writer = new MPEG4Writer(mOutputFd); +        writer = mp4writer = new MPEG4Writer(mOutputFd);      }      if (mVideoSource < VIDEO_SOURCE_LIST_END) { @@ -1619,19 +1664,23 @@ status_t StagefrightRecorder::setupMPEG4orWEBMRecording() {          // This help make sure that the "recoding" sound is suppressed for          // camcorder applications in the recorded files.          // TODO Audio source is currently unsupported for webm output; vorbis encoder needed. -        if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) { +        // disable audio for time lapse recording +        bool disableAudio = mCaptureFpsEnable && mCaptureFps < mFrameRate; +        if (!disableAudio && mAudioSource != AUDIO_SOURCE_CNT) {              err = setupAudioEncoder(writer);              if (err != OK) return err;              mTotalBitRate += mAudioBitRate;          } +        if (mCaptureFpsEnable) { +            mp4writer->setCaptureRate(mCaptureFps); +        } +          if (mInterleaveDurationUs > 0) { -            reinterpret_cast<MPEG4Writer *>(writer.get())-> -                setInterleaveDuration(mInterleaveDurationUs); +            mp4writer->setInterleaveDuration(mInterleaveDurationUs);          }          if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) { -            reinterpret_cast<MPEG4Writer *>(writer.get())-> -                setGeoData(mLatitudex10000, mLongitudex10000); +            mp4writer->setGeoData(mLatitudex10000, mLongitudex10000);          }      }      if (mMaxFileDurationUs != 0) { @@ -1704,7 +1753,7 @@ status_t StagefrightRecorder::stop() {      ALOGV("stop");      status_t err = OK; -    if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) { +    if (mCaptureFpsEnable && mCameraSourceTimeLapse != NULL) {          mCameraSourceTimeLapse->startQuickReadReturns();          mCameraSourceTimeLapse = NULL;      } @@ -1715,6 +1764,7 @@ status_t StagefrightRecorder::stop() {      }      mGraphicBufferProducer.clear(); +    mPersistentSurface.clear();      if (mOutputFd >= 0) {          ::close(mOutputFd); @@ -1778,8 +1828,9 @@ status_t StagefrightRecorder::reset() {      mMaxFileDurationUs = 0;      mMaxFileSizeBytes = 0;      mTrackEveryTimeDurationUs = 0; -    mCaptureTimeLapse = false; -    mTimeBetweenTimeLapseFrameCaptureUs = -1; +    mCaptureFpsEnable = false; +    mCaptureFps = 0.0f; +    mTimeBetweenCaptureUs = -1;      mCameraSourceTimeLapse = NULL;      mIsMetaDataStoredInVideoBuffers = false;      mEncoderProfiles = MediaProfiles::getInstance(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index 54c38d3..da00bc7 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -35,12 +35,13 @@ struct MediaWriter;  class MetaData;  struct AudioSource;  class MediaProfiles; +class IGraphicBufferConsumer;  class IGraphicBufferProducer;  class SurfaceMediaSource; -class ALooper; +struct ALooper;  struct StagefrightRecorder : public MediaRecorderBase { -    StagefrightRecorder(); +    StagefrightRecorder(const String16 &opPackageName);      virtual ~StagefrightRecorder();      virtual status_t init(); @@ -53,7 +54,7 @@ struct StagefrightRecorder : public MediaRecorderBase {      virtual status_t setVideoFrameRate(int frames_per_second);      virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy);      virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface); -    virtual status_t setOutputFile(const char *path); +    virtual status_t setInputSurface(const sp<IGraphicBufferConsumer>& surface);      virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);      virtual status_t setParameters(const String8& params);      virtual status_t setListener(const sp<IMediaRecorderClient>& listener); @@ -73,6 +74,7 @@ private:      sp<ICamera> mCamera;      sp<ICameraRecordingProxy> mCameraProxy;      sp<IGraphicBufferProducer> mPreviewSurface; +    sp<IGraphicBufferConsumer> mPersistentSurface;      sp<IMediaRecorderClient> mListener;      String16 mClientName;      uid_t mClientUid; @@ -109,11 +111,11 @@ private:      int32_t mStartTimeOffsetMs;      int32_t mTotalBitRate; -    bool mCaptureTimeLapse; -    int64_t mTimeBetweenTimeLapseFrameCaptureUs; +    bool mCaptureFpsEnable; +    float mCaptureFps; +    int64_t mTimeBetweenCaptureUs;      sp<CameraSourceTimeLapse> mCameraSourceTimeLapse; -      String8 mParams;      bool mIsMetaDataStoredInVideoBuffers; @@ -127,6 +129,8 @@ private:      sp<IGraphicBufferProducer> mGraphicBufferProducer;      sp<ALooper> mLooper; +    static const int kMaxHighSpeedFps = 1000; +      status_t prepareInternal();      status_t setupMPEG4orWEBMRecording();      void setupMPEG4orWEBMMetaData(sp<MetaData> *meta); @@ -136,8 +140,7 @@ private:      status_t setupRTPRecording();      status_t setupMPEG2TSRecording();      sp<MediaSource> createAudioSource(); -    status_t checkVideoEncoderCapabilities( -            bool *supportsCameraSourceMetaDataMode); +    status_t checkVideoEncoderCapabilities();      status_t checkAudioEncoderCapabilities();      // Generic MediaSource set-up. Returns the appropriate      // source (CameraSource or SurfaceMediaSource) @@ -153,8 +156,8 @@ private:      status_t setParamAudioNumberOfChannels(int32_t channles);      status_t setParamAudioSamplingRate(int32_t sampleRate);      status_t setParamAudioTimeScale(int32_t timeScale); -    status_t setParamTimeLapseEnable(int32_t timeLapseEnable); -    status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs); +    status_t setParamCaptureFpsEnable(int32_t timeLapseEnable); +    status_t setParamCaptureFps(float fps);      status_t setParamVideoEncodingBitRate(int32_t bitRate);      status_t setParamVideoIFramesInterval(int32_t seconds);      status_t setParamVideoEncoderProfile(int32_t profile); diff --git a/media/libmediaplayerservice/VideoFrameScheduler.h b/media/libmediaplayerservice/VideoFrameScheduler.h deleted file mode 100644 index 84b27b4..0000000 --- a/media/libmediaplayerservice/VideoFrameScheduler.h +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright 2014, The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - *     http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef VIDEO_FRAME_SCHEDULER_H_ -#define VIDEO_FRAME_SCHEDULER_H_ - -#include <utils/RefBase.h> -#include <utils/Timers.h> - -#include <media/stagefright/foundation/ABase.h> - -namespace android { - -struct ISurfaceComposer; - -struct VideoFrameScheduler : public RefBase { -    VideoFrameScheduler(); - -    // (re)initialize scheduler -    void init(float videoFps = -1); -    // use in case of video render-time discontinuity, e.g. seek -    void restart(); -    // get adjusted nanotime for a video frame render at renderTime -    nsecs_t schedule(nsecs_t renderTime); - -    // returns the vsync period for the main display -    nsecs_t getVsyncPeriod(); - -    void release(); - -    static const size_t kHistorySize = 8; - -protected: -    virtual ~VideoFrameScheduler(); - -private: -    struct PLL { -        PLL(); - -        // reset PLL to new PLL -        void reset(float fps = -1); -        // keep current estimate, but restart phase -        void restart(); -        // returns period -        nsecs_t addSample(nsecs_t time); - -    private: -        nsecs_t mPeriod; -        nsecs_t mPhase; - -        bool    mPrimed;        // have an estimate for the period -        size_t  mSamplesUsedForPriming; - -        nsecs_t mLastTime;      // last input time -        nsecs_t mRefitAt;       // next input time to fit at - -        size_t  mNumSamples;    // can go past kHistorySize -        nsecs_t mTimes[kHistorySize]; - -        void test(); -        // returns whether fit was successful -        bool fit(nsecs_t phase, nsecs_t period, size_t numSamples, -                int64_t *a, int64_t *b, int64_t *err); -        void prime(size_t numSamples); -    }; - -    void updateVsync(); - -    nsecs_t mVsyncTime;        // vsync timing from display -    nsecs_t mVsyncPeriod; -    nsecs_t mVsyncRefreshAt;   // next time to refresh timing info - -    nsecs_t mLastVsyncTime;    // estimated vsync time for last frame -    nsecs_t mTimeCorrection;   // running adjustment - -    PLL mPll;                  // PLL for video frame rate based on render time - -    sp<ISurfaceComposer> mComposer; - -    DISALLOW_EVIL_CONSTRUCTORS(VideoFrameScheduler); -}; - -}  // namespace android - -#endif  // VIDEO_FRAME_SCHEDULER_H_ - diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk index 6609874..cd20837 100644 --- a/media/libmediaplayerservice/nuplayer/Android.mk +++ b/media/libmediaplayerservice/nuplayer/Android.mk @@ -16,6 +16,7 @@ LOCAL_SRC_FILES:=                       \          StreamingSource.cpp             \  LOCAL_C_INCLUDES := \ +	$(TOP)/frameworks/av/media/libstagefright                     \  	$(TOP)/frameworks/av/media/libstagefright/httplive            \  	$(TOP)/frameworks/av/media/libstagefright/include             \  	$(TOP)/frameworks/av/media/libstagefright/mpeg2ts             \ @@ -24,6 +25,15 @@ LOCAL_C_INCLUDES := \  	$(TOP)/frameworks/av/media/libmediaplayerservice              \  	$(TOP)/frameworks/native/include/media/openmax +LOCAL_CFLAGS += -Werror -Wall + +# enable experiments only in userdebug and eng builds +ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT))) +LOCAL_CFLAGS += -DENABLE_STAGEFRIGHT_EXPERIMENTS +endif + +LOCAL_CLANG := true +  LOCAL_MODULE:= libstagefright_nuplayer  LOCAL_MODULE_TAGS := eng diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp index 1b2fc5e..7dc9be7 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp +++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp @@ -56,7 +56,7 @@ NuPlayer::GenericSource::GenericSource(        mVideoLastDequeueTimeUs(0),        mFetchSubtitleDataGeneration(0),        mFetchTimedTextDataGeneration(0), -      mDurationUs(0ll), +      mDurationUs(-1ll),        mAudioIsVorbis(false),        mIsWidevine(false),        mIsSecure(false), @@ -65,12 +65,12 @@ NuPlayer::GenericSource::GenericSource(        mUID(uid),        mFd(-1),        mDrmManagerClient(NULL), -      mMetaDataSize(-1ll),        mBitrate(-1ll),        mPollBufferingGeneration(0),        mPendingReadBufferTypes(0),        mBuffering(false), -      mPrepareBuffering(false) { +      mPrepareBuffering(false), +      mPrevBufferPercentage(-1) {      resetDataSource();      DataSource::RegisterDefaultSniffers();  } @@ -124,29 +124,46 @@ status_t NuPlayer::GenericSource::setDataSource(      return OK;  } +status_t NuPlayer::GenericSource::setDataSource(const sp<DataSource>& source) { +    resetDataSource(); +    mDataSource = source; +    return OK; +} +  sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const {      return mFileMeta;  }  status_t NuPlayer::GenericSource::initFromDataSource() {      sp<MediaExtractor> extractor; +    String8 mimeType; +    float confidence; +    sp<AMessage> dummy; +    bool isWidevineStreaming = false;      CHECK(mDataSource != NULL);      if (mIsWidevine) { -        String8 mimeType; -        float confidence; -        sp<AMessage> dummy; -        bool success; - -        success = SniffWVM(mDataSource, &mimeType, &confidence, &dummy); -        if (!success -                || strcasecmp( +        isWidevineStreaming = SniffWVM( +                mDataSource, &mimeType, &confidence, &dummy); +        if (!isWidevineStreaming || +                strcasecmp(                      mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) {              ALOGE("unsupported widevine mime: %s", mimeType.string());              return UNKNOWN_ERROR;          } +    } else if (mIsStreaming) { +        if (!mDataSource->sniff(&mimeType, &confidence, &dummy)) { +            return UNKNOWN_ERROR; +        } +        isWidevineStreaming = !strcasecmp( +                mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM); +    } +    if (isWidevineStreaming) { +        // we don't want cached source for widevine streaming. +        mCachedSource.clear(); +        mDataSource = mHttpSource;          mWVMExtractor = new WVMExtractor(mDataSource);          mWVMExtractor->setAdaptiveStreamingMode(true);          if (mUIDValid) { @@ -155,7 +172,7 @@ status_t NuPlayer::GenericSource::initFromDataSource() {          extractor = mWVMExtractor;      } else {          extractor = MediaExtractor::Create(mDataSource, -                mSniffedMIME.empty() ? NULL: mSniffedMIME.c_str()); +                mimeType.isEmpty() ? NULL : mimeType.string());      }      if (extractor == NULL) { @@ -181,14 +198,6 @@ status_t NuPlayer::GenericSource::initFromDataSource() {              if (mFileMeta->findCString(kKeyMIMEType, &fileMime)                      && !strncasecmp(fileMime, "video/wvm", 9)) {                  mIsWidevine = true; -                if (!mUri.empty()) { -                  // streaming, but the app forgot to specify widevine:// url -                  mWVMExtractor = static_cast<WVMExtractor *>(extractor.get()); -                  mWVMExtractor->setAdaptiveStreamingMode(true); -                  if (mUIDValid) { -                    mWVMExtractor->setUID(mUID); -                  } -                }              }          }      } @@ -315,6 +324,10 @@ status_t NuPlayer::GenericSource::setBuffers(      return INVALID_OPERATION;  } +bool NuPlayer::GenericSource::isStreaming() const { +    return mIsStreaming; +} +  NuPlayer::GenericSource::~GenericSource() {      if (mLooper != NULL) {          mLooper->unregisterHandler(id()); @@ -332,7 +345,7 @@ void NuPlayer::GenericSource::prepareAsync() {          mLooper->registerHandler(this);      } -    sp<AMessage> msg = new AMessage(kWhatPrepareAsync, id()); +    sp<AMessage> msg = new AMessage(kWhatPrepareAsync, this);      msg->post();  } @@ -345,6 +358,7 @@ void NuPlayer::GenericSource::onPrepareAsync() {          if (!mUri.empty()) {              const char* uri = mUri.c_str(); +            String8 contentType;              mIsWidevine = !strncasecmp(uri, "widevine://", 11);              if (!strncasecmp("http://", uri, 7) @@ -359,7 +373,7 @@ void NuPlayer::GenericSource::onPrepareAsync() {              }              mDataSource = DataSource::CreateFromURI( -                   mHTTPService, uri, &mUriHeaders, &mContentType, +                   mHTTPService, uri, &mUriHeaders, &contentType,                     static_cast<HTTPBase *>(mHttpSource.get()));          } else {              mIsWidevine = false; @@ -373,34 +387,22 @@ void NuPlayer::GenericSource::onPrepareAsync() {              notifyPreparedAndCleanup(UNKNOWN_ERROR);              return;          } - -        if (mDataSource->flags() & DataSource::kIsCachingDataSource) { -            mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get()); -        } - -        // For widevine or other cached streaming cases, we need to wait for -        // enough buffering before reporting prepared. -        // Note that even when URL doesn't start with widevine://, mIsWidevine -        // could still be set to true later, if the streaming or file source -        // is sniffed to be widevine. We don't want to buffer for file source -        // in that case, so must check the flag now. -        mIsStreaming = (mIsWidevine || mCachedSource != NULL);      } -    // check initial caching status -    status_t err = prefillCacheIfNecessary(); -    if (err != OK) { -        if (err == -EAGAIN) { -            (new AMessage(kWhatPrepareAsync, id()))->post(200000); -        } else { -            ALOGE("Failed to prefill data cache!"); -            notifyPreparedAndCleanup(UNKNOWN_ERROR); -        } -        return; +    if (mDataSource->flags() & DataSource::kIsCachingDataSource) { +        mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());      } -    // init extrator from data source -    err = initFromDataSource(); +    // For widevine or other cached streaming cases, we need to wait for +    // enough buffering before reporting prepared. +    // Note that even when URL doesn't start with widevine://, mIsWidevine +    // could still be set to true later, if the streaming or file source +    // is sniffed to be widevine. We don't want to buffer for file source +    // in that case, so must check the flag now. +    mIsStreaming = (mIsWidevine || mCachedSource != NULL); + +    // init extractor from data source +    status_t err = initFromDataSource();      if (err != OK) {          ALOGE("Failed to init from data source!"); @@ -429,7 +431,7 @@ void NuPlayer::GenericSource::onPrepareAsync() {      if (mIsSecure) {          // secure decoders must be instantiated before starting widevine source -        sp<AMessage> reply = new AMessage(kWhatSecureDecodersInstantiated, id()); +        sp<AMessage> reply = new AMessage(kWhatSecureDecodersInstantiated, this);          notifyInstantiateSecureDecoders(reply);      } else {          finishPrepareAsync(); @@ -465,9 +467,6 @@ void NuPlayer::GenericSource::finishPrepareAsync() {  void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {      if (err != OK) { -        mMetaDataSize = -1ll; -        mContentType = ""; -        mSniffedMIME = "";          mDataSource.clear();          mCachedSource.clear();          mHttpSource.clear(); @@ -478,76 +477,6 @@ void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {      notifyPrepared(err);  } -status_t NuPlayer::GenericSource::prefillCacheIfNecessary() { -    CHECK(mDataSource != NULL); - -    if (mCachedSource == NULL) { -        // no prefill if the data source is not cached -        return OK; -    } - -    // We're not doing this for streams that appear to be audio-only -    // streams to ensure that even low bandwidth streams start -    // playing back fairly instantly. -    if (!strncasecmp(mContentType.string(), "audio/", 6)) { -        return OK; -    } - -    // We're going to prefill the cache before trying to instantiate -    // the extractor below, as the latter is an operation that otherwise -    // could block on the datasource for a significant amount of time. -    // During that time we'd be unable to abort the preparation phase -    // without this prefill. - -    // Initially make sure we have at least 192 KB for the sniff -    // to complete without blocking. -    static const size_t kMinBytesForSniffing = 192 * 1024; -    static const size_t kDefaultMetaSize = 200000; - -    status_t finalStatus; - -    size_t cachedDataRemaining = -            mCachedSource->approxDataRemaining(&finalStatus); - -    if (finalStatus != OK || (mMetaDataSize >= 0 -            && (off64_t)cachedDataRemaining >= mMetaDataSize)) { -        ALOGV("stop caching, status %d, " -                "metaDataSize %lld, cachedDataRemaining %zu", -                finalStatus, mMetaDataSize, cachedDataRemaining); -        return OK; -    } - -    ALOGV("now cached %zu bytes of data", cachedDataRemaining); - -    if (mMetaDataSize < 0 -            && cachedDataRemaining >= kMinBytesForSniffing) { -        String8 tmp; -        float confidence; -        sp<AMessage> meta; -        if (!mCachedSource->sniff(&tmp, &confidence, &meta)) { -            return UNKNOWN_ERROR; -        } - -        // We successfully identified the file's extractor to -        // be, remember this mime type so we don't have to -        // sniff it again when we call MediaExtractor::Create() -        mSniffedMIME = tmp.string(); - -        if (meta == NULL -                || !meta->findInt64("meta-data-size", -                        reinterpret_cast<int64_t*>(&mMetaDataSize))) { -            mMetaDataSize = kDefaultMetaSize; -        } - -        if (mMetaDataSize < 0ll) { -            ALOGE("invalid metaDataSize = %lld bytes", mMetaDataSize); -            return UNKNOWN_ERROR; -        } -    } - -    return -EAGAIN; -} -  void NuPlayer::GenericSource::start() {      ALOGI("start"); @@ -563,7 +492,7 @@ void NuPlayer::GenericSource::start() {      setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);      mStarted = true; -    (new AMessage(kWhatStart, id()))->post(); +    (new AMessage(kWhatStart, this))->post();  }  void NuPlayer::GenericSource::stop() { @@ -572,7 +501,7 @@ void NuPlayer::GenericSource::stop() {      mStarted = false;      if (mIsWidevine || mIsSecure) {          // For widevine or secure sources we need to prevent any further reads. -        sp<AMessage> msg = new AMessage(kWhatStopWidevine, id()); +        sp<AMessage> msg = new AMessage(kWhatStopWidevine, this);          sp<AMessage> response;          (void) msg->postAndAwaitResponse(&response);      } @@ -589,7 +518,7 @@ void NuPlayer::GenericSource::resume() {      setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000);      mStarted = true; -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();  }  void NuPlayer::GenericSource::disconnect() { @@ -616,7 +545,7 @@ status_t NuPlayer::GenericSource::feedMoreTSData() {  }  void NuPlayer::GenericSource::schedulePollBuffering() { -    sp<AMessage> msg = new AMessage(kWhatPollBuffering, id()); +    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);      msg->setInt32("generation", mPollBufferingGeneration);      msg->post(1000000ll);  } @@ -624,6 +553,7 @@ void NuPlayer::GenericSource::schedulePollBuffering() {  void NuPlayer::GenericSource::cancelPollBuffering() {      mBuffering = false;      ++mPollBufferingGeneration; +    mPrevBufferPercentage = -1;  }  void NuPlayer::GenericSource::restartPollBuffering() { @@ -633,7 +563,19 @@ void NuPlayer::GenericSource::restartPollBuffering() {      }  } -void NuPlayer::GenericSource::notifyBufferingUpdate(int percentage) { +void NuPlayer::GenericSource::notifyBufferingUpdate(int32_t percentage) { +    // Buffering percent could go backward as it's estimated from remaining +    // data and last access time. This could cause the buffering position +    // drawn on media control to jitter slightly. Remember previously reported +    // percentage and don't allow it to go backward. +    if (percentage < mPrevBufferPercentage) { +        percentage = mPrevBufferPercentage; +    } else if (percentage > 100) { +        percentage = 100; +    } + +    mPrevBufferPercentage = percentage; +      ALOGV("notifyBufferingUpdate: buffering %d%%", percentage);      sp<AMessage> msg = dupNotify(); @@ -687,10 +629,10 @@ void NuPlayer::GenericSource::sendCacheStats() {      int32_t kbps = 0;      status_t err = UNKNOWN_ERROR; -    if (mCachedSource != NULL) { -        err = mCachedSource->getEstimatedBandwidthKbps(&kbps); -    } else if (mWVMExtractor != NULL) { +    if (mWVMExtractor != NULL) {          err = mWVMExtractor->getEstimatedBandwidthKbps(&kbps); +    } else if (mCachedSource != NULL) { +        err = mCachedSource->getEstimatedBandwidthKbps(&kbps);      }      if (err == OK) { @@ -712,7 +654,13 @@ void NuPlayer::GenericSource::onPollBuffering() {      int64_t cachedDurationUs = -1ll;      ssize_t cachedDataRemaining = -1; -    if (mCachedSource != NULL) { +    ALOGW_IF(mWVMExtractor != NULL && mCachedSource != NULL, +            "WVMExtractor and NuCachedSource both present"); + +    if (mWVMExtractor != NULL) { +        cachedDurationUs = +                mWVMExtractor->getCachedDurationUs(&finalStatus); +    } else if (mCachedSource != NULL) {          cachedDataRemaining =                  mCachedSource->approxDataRemaining(&finalStatus); @@ -728,9 +676,6 @@ void NuPlayer::GenericSource::onPollBuffering() {                  cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate;              }          } -    } else if (mWVMExtractor != NULL) { -        cachedDurationUs -            = mWVMExtractor->getCachedDurationUs(&finalStatus);      }      if (finalStatus != OK) { @@ -762,7 +707,7 @@ void NuPlayer::GenericSource::onPollBuffering() {              stopBufferingIfNecessary();          }      } else if (cachedDataRemaining >= 0) { -        ALOGV("onPollBuffering: cachedDataRemaining %d bytes", +        ALOGV("onPollBuffering: cachedDataRemaining %zd bytes",                  cachedDataRemaining);          if (cachedDataRemaining < kLowWaterMarkBytes) { @@ -849,7 +794,7 @@ void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {            }            readBuffer(trackType, timeUs, &actualTimeUs, formatChange);            readBuffer(counterpartType, -1, NULL, formatChange); -          ALOGV("timeUs %lld actualTimeUs %lld", timeUs, actualTimeUs); +          ALOGV("timeUs %lld actualTimeUs %lld", (long long)timeUs, (long long)actualTimeUs);            break;        } @@ -918,7 +863,7 @@ void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {                mVideoTrack.mPackets->clear();            }            sp<AMessage> response = new AMessage; -          uint32_t replyID; +          sp<AReplyToken> replyID;            CHECK(msg->senderAwaitsResponse(&replyID));            response->postReply(replyID);            break; @@ -958,7 +903,7 @@ void NuPlayer::GenericSource::fetchTextData(          const int64_t oneSecUs = 1000000ll;          delayUs -= oneSecUs;      } -    sp<AMessage> msg2 = new AMessage(sendWhat, id()); +    sp<AMessage> msg2 = new AMessage(sendWhat, this);      msg2->setInt32("generation", msgGeneration);      msg2->post(delayUs < 0 ? 0 : delayUs);  } @@ -998,7 +943,7 @@ void NuPlayer::GenericSource::sendTextData(  }  sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) { -    sp<AMessage> msg = new AMessage(kWhatGetFormat, id()); +    sp<AMessage> msg = new AMessage(kWhatGetFormat, this);      msg->setInt32("audio", audio);      sp<AMessage> response; @@ -1020,7 +965,7 @@ void NuPlayer::GenericSource::onGetFormatMeta(sp<AMessage> msg) const {      sp<MetaData> format = doGetFormatMeta(audio);      response->setPointer("format", format.get()); -    uint32_t replyID; +    sp<AReplyToken> replyID;      CHECK(msg->senderAwaitsResponse(&replyID));      response->postReply(replyID);  } @@ -1060,7 +1005,9 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(      status_t result = track->mPackets->dequeueAccessUnit(accessUnit); -    if (!track->mPackets->hasBufferAvailable(&finalResult)) { +    // start pulling in more buffers if we only have one (or no) buffer left +    // so that decoder has less chance of being starved +    if (track->mPackets->getAvailableBufferCount(&finalResult) < 2) {          postReadBuffer(audio? MEDIA_TRACK_TYPE_AUDIO : MEDIA_TRACK_TYPE_VIDEO);      } @@ -1087,7 +1034,7 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(      if (mSubtitleTrack.mSource != NULL              && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) { -        sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id()); +        sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);          msg->setInt64("timeUs", timeUs);          msg->setInt32("generation", mFetchSubtitleDataGeneration);          msg->post(); @@ -1095,7 +1042,7 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit(      if (mTimedTextTrack.mSource != NULL              && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) { -        sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id()); +        sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);          msg->setInt64("timeUs", timeUs);          msg->setInt32("generation", mFetchTimedTextDataGeneration);          msg->post(); @@ -1124,6 +1071,7 @@ sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {      const char *mime;      CHECK(meta->findCString(kKeyMIMEType, &mime)); +    format->setString("mime", mime);      int32_t trackType;      if (!strncasecmp(mime, "video/", 6)) { @@ -1144,8 +1092,6 @@ sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {      format->setString("language", lang);      if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) { -        format->setString("mime", mime); -          int32_t isAutoselect = 1, isDefault = 0, isForced = 0;          meta->findInt32(kKeyTrackIsAutoselect, &isAutoselect);          meta->findInt32(kKeyTrackIsDefault, &isDefault); @@ -1160,7 +1106,7 @@ sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {  }  ssize_t NuPlayer::GenericSource::getSelectedTrack(media_track_type type) const { -    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id()); +    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this);      msg->setInt32("type", type);      sp<AMessage> response; @@ -1183,7 +1129,7 @@ void NuPlayer::GenericSource::onGetSelectedTrack(sp<AMessage> msg) const {      ssize_t index = doGetSelectedTrack(type);      response->setInt32("index", index); -    uint32_t replyID; +    sp<AReplyToken> replyID;      CHECK(msg->senderAwaitsResponse(&replyID));      response->postReply(replyID);  } @@ -1216,7 +1162,7 @@ ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const  status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) {      ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex); -    sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); +    sp<AMessage> msg = new AMessage(kWhatSelectTrack, this);      msg->setInt32("trackIndex", trackIndex);      msg->setInt32("select", select);      msg->setInt64("timeUs", timeUs); @@ -1241,7 +1187,7 @@ void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) {      status_t err = doSelectTrack(trackIndex, select, timeUs);      response->setInt32("err", err); -    uint32_t replyID; +    sp<AReplyToken> replyID;      CHECK(msg->senderAwaitsResponse(&replyID));      response->postReply(replyID);  } @@ -1302,7 +1248,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select,          status_t eosResult; // ignored          if (mSubtitleTrack.mSource != NULL                  && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) { -            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id()); +            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this);              msg->setInt64("timeUs", timeUs);              msg->setInt32("generation", mFetchSubtitleDataGeneration);              msg->post(); @@ -1310,7 +1256,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select,          if (mTimedTextTrack.mSource != NULL                  && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) { -            sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id()); +            sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this);              msg->setInt64("timeUs", timeUs);              msg->setInt32("generation", mFetchTimedTextDataGeneration);              msg->post(); @@ -1324,7 +1270,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select,              return OK;          } -        sp<AMessage> msg = new AMessage(kWhatChangeAVSource, id()); +        sp<AMessage> msg = new AMessage(kWhatChangeAVSource, this);          msg->setInt32("trackIndex", trackIndex);          msg->post();          return OK; @@ -1334,7 +1280,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select,  }  status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs) { -    sp<AMessage> msg = new AMessage(kWhatSeek, id()); +    sp<AMessage> msg = new AMessage(kWhatSeek, this);      msg->setInt64("seekTimeUs", seekTimeUs);      sp<AMessage> response; @@ -1354,7 +1300,7 @@ void NuPlayer::GenericSource::onSeek(sp<AMessage> msg) {      status_t err = doSeek(seekTimeUs);      response->setInt32("err", err); -    uint32_t replyID; +    sp<AReplyToken> replyID;      CHECK(msg->senderAwaitsResponse(&replyID));      response->postReply(replyID);  } @@ -1459,6 +1405,14 @@ sp<ABuffer> NuPlayer::GenericSource::mediaBufferToABuffer(          meta->setInt32("trackIndex", mSubtitleTrack.mIndex);      } +    uint32_t dataType; // unused +    const void *seiData; +    size_t seiLength; +    if (mb->meta_data()->findData(kKeySEI, &dataType, &seiData, &seiLength)) { +        sp<ABuffer> sei = ABuffer::CreateAsCopy(seiData, seiLength);; +        meta->setBuffer("sei", sei); +    } +      if (actualTimeUs) {          *actualTimeUs = timeUs;      } @@ -1474,7 +1428,7 @@ void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {      if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {          mPendingReadBufferTypes |= (1 << trackType); -        sp<AMessage> msg = new AMessage(kWhatReadBuffer, id()); +        sp<AMessage> msg = new AMessage(kWhatReadBuffer, this);          msg->setInt32("trackType", trackType);          msg->post();      } @@ -1506,6 +1460,8 @@ void NuPlayer::GenericSource::readBuffer(              track = &mVideoTrack;              if (mIsWidevine) {                  maxBuffers = 2; +            } else { +                maxBuffers = 4;              }              break;          case MEDIA_TRACK_TYPE_AUDIO: @@ -1562,17 +1518,7 @@ void NuPlayer::GenericSource::readBuffer(                  mVideoTimeUs = timeUs;              } -            // formatChange && seeking: track whose source is changed during selection -            // formatChange && !seeking: track whose source is not changed during selection -            // !formatChange: normal seek -            if ((seeking || formatChange) -                    && (trackType == MEDIA_TRACK_TYPE_AUDIO -                    || trackType == MEDIA_TRACK_TYPE_VIDEO)) { -                ATSParser::DiscontinuityType type = (formatChange && seeking) -                        ? ATSParser::DISCONTINUITY_FORMATCHANGE -                        : ATSParser::DISCONTINUITY_NONE; -                track->mPackets->queueDiscontinuity( type, NULL, true /* discard */); -            } +            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);              sp<ABuffer> buffer = mediaBufferToABuffer(                      mbuf, trackType, seekTimeUs, actualTimeUs); @@ -1590,10 +1536,26 @@ void NuPlayer::GenericSource::readBuffer(                      false /* discard */);  #endif          } else { +            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);              track->mPackets->signalEOS(err);              break;          }      }  } +void NuPlayer::GenericSource::queueDiscontinuityIfNeeded( +        bool seeking, bool formatChange, media_track_type trackType, Track *track) { +    // formatChange && seeking: track whose source is changed during selection +    // formatChange && !seeking: track whose source is not changed during selection +    // !formatChange: normal seek +    if ((seeking || formatChange) +            && (trackType == MEDIA_TRACK_TYPE_AUDIO +            || trackType == MEDIA_TRACK_TYPE_VIDEO)) { +        ATSParser::DiscontinuityType type = (formatChange && seeking) +                ? ATSParser::DISCONTINUITY_FORMATCHANGE +                : ATSParser::DISCONTINUITY_NONE; +        track->mPackets->queueDiscontinuity(type, NULL /* extra */, true /* discard */); +    } +} +  }  // namespace android diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h index 2d73ea9..dc85d2d 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.h +++ b/media/libmediaplayerservice/nuplayer/GenericSource.h @@ -31,12 +31,13 @@ class DecryptHandle;  class DrmManagerClient;  struct AnotherPacketSource;  struct ARTSPController; -struct DataSource; +class DataSource; +class IDataSource;  struct IMediaHTTPService;  struct MediaSource;  class MediaBuffer;  struct NuCachedSource2; -struct WVMExtractor; +class WVMExtractor;  struct NuPlayer::GenericSource : public NuPlayer::Source {      GenericSource(const sp<AMessage> ¬ify, bool uidValid, uid_t uid); @@ -48,6 +49,8 @@ struct NuPlayer::GenericSource : public NuPlayer::Source {      status_t setDataSource(int fd, int64_t offset, int64_t length); +    status_t setDataSource(const sp<DataSource>& dataSource); +      virtual void prepareAsync();      virtual void start(); @@ -72,6 +75,8 @@ struct NuPlayer::GenericSource : public NuPlayer::Source {      virtual status_t setBuffers(bool audio, Vector<MediaBuffer *> &buffers); +    virtual bool isStreaming() const; +  protected:      virtual ~GenericSource(); @@ -140,14 +145,13 @@ private:      sp<DecryptHandle> mDecryptHandle;      bool mStarted;      bool mStopRead; -    String8 mContentType; -    AString mSniffedMIME; -    off64_t mMetaDataSize;      int64_t mBitrate;      int32_t mPollBufferingGeneration;      uint32_t mPendingReadBufferTypes;      bool mBuffering;      bool mPrepareBuffering; +    int32_t mPrevBufferPercentage; +      mutable Mutex mReadBufferLock;      sp<ALooper> mLooper; @@ -159,8 +163,6 @@ private:      int64_t getLastReadPosition();      void setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position); -    status_t prefillCacheIfNecessary(); -      void notifyPreparedAndCleanup(status_t err);      void onSecureDecodersInstantiated(status_t err);      void finishPrepareAsync(); @@ -200,11 +202,14 @@ private:              media_track_type trackType,              int64_t seekTimeUs = -1ll, int64_t *actualTimeUs = NULL, bool formatChange = false); +    void queueDiscontinuityIfNeeded( +            bool seeking, bool formatChange, media_track_type trackType, Track *track); +      void schedulePollBuffering();      void cancelPollBuffering();      void restartPollBuffering();      void onPollBuffering(); -    void notifyBufferingUpdate(int percentage); +    void notifyBufferingUpdate(int32_t percentage);      void startBufferingIfNecessary();      void stopBufferingIfNecessary();      void sendCacheStats(); diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index a26ef9e..126625a 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -22,7 +22,6 @@  #include "AnotherPacketSource.h"  #include "LiveDataSource.h" -#include "LiveSession.h"  #include <media/IMediaHTTPService.h>  #include <media/stagefright/foundation/ABuffer.h> @@ -30,6 +29,7 @@  #include <media/stagefright/foundation/AMessage.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MetaData.h> +#include <media/stagefright/MediaDefs.h>  namespace android { @@ -44,7 +44,10 @@ NuPlayer::HTTPLiveSource::HTTPLiveSource(        mFlags(0),        mFinalResult(OK),        mOffset(0), -      mFetchSubtitleDataGeneration(0) { +      mFetchSubtitleDataGeneration(0), +      mFetchMetaDataGeneration(0), +      mHasMetadata(false), +      mMetadataSelected(false) {      if (headers) {          mExtraHeaders = *headers; @@ -81,7 +84,7 @@ void NuPlayer::HTTPLiveSource::prepareAsync() {          mLiveLooper->registerHandler(this);      } -    sp<AMessage> notify = new AMessage(kWhatSessionNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatSessionNotify, this);      mLiveSession = new LiveSession(              notify, @@ -142,19 +145,47 @@ sp<AMessage> NuPlayer::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {  ssize_t NuPlayer::HTTPLiveSource::getSelectedTrack(media_track_type type) const {      if (mLiveSession == NULL) {          return -1; +    } else if (type == MEDIA_TRACK_TYPE_METADATA) { +        // MEDIA_TRACK_TYPE_METADATA is always last track +        // mMetadataSelected can only be true when mHasMetadata is true +        return mMetadataSelected ? (mLiveSession->getTrackCount() - 1) : -1;      } else {          return mLiveSession->getSelectedTrack(type);      }  }  status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, int64_t /*timeUs*/) { -    status_t err = mLiveSession->selectTrack(trackIndex, select); +    if (mLiveSession == NULL) { +        return INVALID_OPERATION; +    } + +    status_t err = INVALID_OPERATION; +    bool postFetchMsg = false, isSub = false; +    if (!mHasMetadata || trackIndex != mLiveSession->getTrackCount() - 1) { +        err = mLiveSession->selectTrack(trackIndex, select); +        postFetchMsg = select; +        isSub = true; +    } else { +        // metadata track; i.e. (mHasMetadata && trackIndex == mLiveSession->getTrackCount() - 1) +        if (mMetadataSelected && !select) { +            err = OK; +        } else if (!mMetadataSelected && select) { +            postFetchMsg = true; +            err = OK; +        } else { +            err = BAD_VALUE; // behave as LiveSession::selectTrack +        } + +        mMetadataSelected = select; +    }      if (err == OK) { -        mFetchSubtitleDataGeneration++; -        if (select) { -            sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id()); -            msg->setInt32("generation", mFetchSubtitleDataGeneration); +        int32_t &generation = isSub ? mFetchSubtitleDataGeneration : mFetchMetaDataGeneration; +        generation++; +        if (postFetchMsg) { +            int32_t what = isSub ? kWhatFetchSubtitleData : kWhatFetchMetaData; +            sp<AMessage> msg = new AMessage(what, this); +            msg->setInt32("generation", generation);              msg->post();          }      } @@ -169,6 +200,49 @@ status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) {      return mLiveSession->seekTo(seekTimeUs);  } +void NuPlayer::HTTPLiveSource::pollForRawData( +        const sp<AMessage> &msg, int32_t currentGeneration, +        LiveSession::StreamType fetchType, int32_t pushWhat) { + +    int32_t generation; +    CHECK(msg->findInt32("generation", &generation)); + +    if (generation != currentGeneration) { +        return; +    } + +    sp<ABuffer> buffer; +    while (mLiveSession->dequeueAccessUnit(fetchType, &buffer) == OK) { + +        sp<AMessage> notify = dupNotify(); +        notify->setInt32("what", pushWhat); +        notify->setBuffer("buffer", buffer); + +        int64_t timeUs, baseUs, delayUs; +        CHECK(buffer->meta()->findInt64("baseUs", &baseUs)); +        CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); +        delayUs = baseUs + timeUs - ALooper::GetNowUs(); + +        if (fetchType == LiveSession::STREAMTYPE_SUBTITLES) { +            notify->post(); +            msg->post(delayUs > 0ll ? delayUs : 0ll); +            return; +        } else if (fetchType == LiveSession::STREAMTYPE_METADATA) { +            if (delayUs < -1000000ll) { // 1 second +                continue; +            } +            notify->post(); +            // push all currently available metadata buffers in each invocation of pollForRawData +            // continue; +        } else { +            TRESPASS(); +        } +    } + +    // try again in 1 second +    msg->post(1000000ll); +} +  void NuPlayer::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {      switch (msg->what()) {          case kWhatSessionNotify: @@ -179,33 +253,24 @@ void NuPlayer::HTTPLiveSource::onMessageReceived(const sp<AMessage> &msg) {          case kWhatFetchSubtitleData:          { -            int32_t generation; -            CHECK(msg->findInt32("generation", &generation)); +            pollForRawData( +                    msg, mFetchSubtitleDataGeneration, +                    /* fetch */ LiveSession::STREAMTYPE_SUBTITLES, +                    /* push */ kWhatSubtitleData); -            if (generation != mFetchSubtitleDataGeneration) { -                // stale +            break; +        } + +        case kWhatFetchMetaData: +        { +            if (!mMetadataSelected) {                  break;              } -            sp<ABuffer> buffer; -            if (mLiveSession->dequeueAccessUnit( -                    LiveSession::STREAMTYPE_SUBTITLES, &buffer) == OK) { -                sp<AMessage> notify = dupNotify(); -                notify->setInt32("what", kWhatSubtitleData); -                notify->setBuffer("buffer", buffer); -                notify->post(); - -                int64_t timeUs, baseUs, durationUs, delayUs; -                CHECK(buffer->meta()->findInt64("baseUs", &baseUs)); -                CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); -                CHECK(buffer->meta()->findInt64("durationUs", &durationUs)); -                delayUs = baseUs + timeUs - ALooper::GetNowUs(); - -                msg->post(delayUs > 0ll ? delayUs : 0ll); -            } else { -                // try again in 1 second -                msg->post(1000000ll); -            } +            pollForRawData( +                    msg, mFetchMetaDataGeneration, +                    /* fetch */ LiveSession::STREAMTYPE_METADATA, +                    /* push */ kWhatTimedMetaData);              break;          } @@ -281,6 +346,47 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {              break;          } +        case LiveSession::kWhatBufferingStart: +        { +            sp<AMessage> notify = dupNotify(); +            notify->setInt32("what", kWhatPauseOnBufferingStart); +            notify->post(); +            break; +        } + +        case LiveSession::kWhatBufferingEnd: +        { +            sp<AMessage> notify = dupNotify(); +            notify->setInt32("what", kWhatResumeOnBufferingEnd); +            notify->post(); +            break; +        } + + +        case LiveSession::kWhatBufferingUpdate: +        { +            sp<AMessage> notify = dupNotify(); +            int32_t percentage; +            CHECK(msg->findInt32("percentage", &percentage)); +            notify->setInt32("what", kWhatBufferingUpdate); +            notify->setInt32("percentage", percentage); +            notify->post(); +            break; +        } + +        case LiveSession::kWhatMetadataDetected: +        { +            if (!mHasMetadata) { +                mHasMetadata = true; + +                sp<AMessage> notify = dupNotify(); +                // notification without buffer triggers MEDIA_INFO_METADATA_UPDATE +                notify->setInt32("what", kWhatTimedMetaData); +                notify->post(); +            } +            break; +        } +          case LiveSession::kWhatError:          {              break; diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h index bbb8981..9e0ec2f 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -21,6 +21,8 @@  #include "NuPlayer.h"  #include "NuPlayerSource.h" +#include "LiveSession.h" +  namespace android {  struct LiveSession; @@ -60,6 +62,7 @@ private:      enum {          kWhatSessionNotify,          kWhatFetchSubtitleData, +        kWhatFetchMetaData,      };      sp<IMediaHTTPService> mHTTPService; @@ -71,8 +74,14 @@ private:      sp<ALooper> mLiveLooper;      sp<LiveSession> mLiveSession;      int32_t mFetchSubtitleDataGeneration; +    int32_t mFetchMetaDataGeneration; +    bool mHasMetadata; +    bool mMetadataSelected;      void onSessionNotify(const sp<AMessage> &msg); +    void pollForRawData( +            const sp<AMessage> &msg, int32_t currentGeneration, +            LiveSession::StreamType fetchType, int32_t pushWhat);      DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource);  }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index aeea204..77b9799 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -37,6 +37,9 @@  #include <cutils/properties.h> +#include <media/AudioResamplerPublic.h> +#include <media/AVSyncSettings.h> +  #include <media/stagefright/foundation/hexdump.h>  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h> @@ -45,7 +48,9 @@  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MetaData.h> +  #include <gui/IGraphicBufferProducer.h> +#include <gui/Surface.h>  #include "avc_utils.h" @@ -64,18 +69,16 @@ private:  };  struct NuPlayer::SeekAction : public Action { -    SeekAction(int64_t seekTimeUs, bool needNotify) -        : mSeekTimeUs(seekTimeUs), -          mNeedNotify(needNotify) { +    SeekAction(int64_t seekTimeUs) +        : mSeekTimeUs(seekTimeUs) {      }      virtual void execute(NuPlayer *player) { -        player->performSeek(mSeekTimeUs, mNeedNotify); +        player->performSeek(mSeekTimeUs);      }  private:      int64_t mSeekTimeUs; -    bool mNeedNotify;      DISALLOW_EVIL_CONSTRUCTORS(SeekAction);  }; @@ -96,16 +99,16 @@ private:  };  struct NuPlayer::SetSurfaceAction : public Action { -    SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper) -        : mWrapper(wrapper) { +    SetSurfaceAction(const sp<Surface> &surface) +        : mSurface(surface) {      }      virtual void execute(NuPlayer *player) { -        player->performSetSurface(mWrapper); +        player->performSetSurface(mSurface);      }  private: -    sp<NativeWindowWrapper> mWrapper; +    sp<Surface> mSurface;      DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction);  }; @@ -163,13 +166,15 @@ private:  //////////////////////////////////////////////////////////////////////////////// -NuPlayer::NuPlayer() +NuPlayer::NuPlayer(pid_t pid)      : mUIDValid(false), +      mPID(pid),        mSourceFlags(0),        mOffloadAudio(false),        mAudioDecoderGeneration(0),        mVideoDecoderGeneration(0),        mRendererGeneration(0), +      mPreviousSeekTimeUs(0),        mAudioEOS(false),        mVideoEOS(false),        mScanSourcesPending(false), @@ -180,9 +185,13 @@ NuPlayer::NuPlayer()        mFlushingVideo(NONE),        mResumePending(false),        mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW), +      mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT), +      mVideoFpsHint(-1.f),        mStarted(false), +      mSourceStarted(false),        mPaused(false), -      mPausedByClient(false) { +      mPausedByClient(false), +      mPausedForBuffering(false) {      clearFlushComplete();  } @@ -199,9 +208,9 @@ void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) {  }  void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) { -    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); +    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); -    sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);      msg->setObject("source", new StreamingSource(notify, source));      msg->post(); @@ -229,10 +238,10 @@ void NuPlayer::setDataSourceAsync(          const char *url,          const KeyedVector<String8, String8> *headers) { -    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); +    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);      size_t len = strlen(url); -    sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);      sp<Source> source;      if (IsHTTPLiveURL(url)) { @@ -266,9 +275,9 @@ void NuPlayer::setDataSourceAsync(  }  void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { -    sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); +    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); -    sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);      sp<GenericSource> source =              new GenericSource(notify, mUIDValid, mUID); @@ -284,38 +293,108 @@ void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {      msg->post();  } +void NuPlayer::setDataSourceAsync(const sp<DataSource> &dataSource) { +    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); +    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this); + +    sp<GenericSource> source = new GenericSource(notify, mUIDValid, mUID); +    status_t err = source->setDataSource(dataSource); + +    if (err != OK) { +        ALOGE("Failed to set data source!"); +        source = NULL; +    } + +    msg->setObject("source", source); +    msg->post(); +} +  void NuPlayer::prepareAsync() { -    (new AMessage(kWhatPrepare, id()))->post(); +    (new AMessage(kWhatPrepare, this))->post();  }  void NuPlayer::setVideoSurfaceTextureAsync(          const sp<IGraphicBufferProducer> &bufferProducer) { -    sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); +    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);      if (bufferProducer == NULL) { -        msg->setObject("native-window", NULL); +        msg->setObject("surface", NULL);      } else { -        msg->setObject( -                "native-window", -                new NativeWindowWrapper( -                    new Surface(bufferProducer, true /* controlledByApp */))); +        msg->setObject("surface", new Surface(bufferProducer, true /* controlledByApp */));      }      msg->post();  }  void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { -    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); +    sp<AMessage> msg = new AMessage(kWhatSetAudioSink, this);      msg->setObject("sink", sink);      msg->post();  }  void NuPlayer::start() { -    (new AMessage(kWhatStart, id()))->post(); +    (new AMessage(kWhatStart, this))->post(); +} + +status_t NuPlayer::setPlaybackSettings(const AudioPlaybackRate &rate) { +    // do some cursory validation of the settings here. audio modes are +    // only validated when set on the audiosink. +     if ((rate.mSpeed != 0.f && rate.mSpeed < AUDIO_TIMESTRETCH_SPEED_MIN) +            || rate.mSpeed > AUDIO_TIMESTRETCH_SPEED_MAX +            || rate.mPitch < AUDIO_TIMESTRETCH_SPEED_MIN +            || rate.mPitch > AUDIO_TIMESTRETCH_SPEED_MAX) { +        return BAD_VALUE; +    } +    sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this); +    writeToAMessage(msg, rate); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +    } +    return err; +} + +status_t NuPlayer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) { +    sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +        if (err == OK) { +            readFromAMessage(response, rate); +        } +    } +    return err; +} + +status_t NuPlayer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) { +    sp<AMessage> msg = new AMessage(kWhatConfigSync, this); +    writeToAMessage(msg, sync, videoFpsHint); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +    } +    return err; +} + +status_t NuPlayer::getSyncSettings( +        AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) { +    sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +        if (err == OK) { +            readFromAMessage(response, sync, videoFps); +        } +    } +    return err;  }  void NuPlayer::pause() { -    (new AMessage(kWhatPause, id()))->post(); +    (new AMessage(kWhatPause, this))->post();  }  void NuPlayer::resetAsync() { @@ -329,11 +408,11 @@ void NuPlayer::resetAsync() {          mSource->disconnect();      } -    (new AMessage(kWhatReset, id()))->post(); +    (new AMessage(kWhatReset, this))->post();  }  void NuPlayer::seekToAsync(int64_t seekTimeUs, bool needNotify) { -    sp<AMessage> msg = new AMessage(kWhatSeek, id()); +    sp<AMessage> msg = new AMessage(kWhatSeek, this);      msg->setInt64("seekTimeUs", seekTimeUs);      msg->setInt32("needNotify", needNotify);      msg->post(); @@ -345,23 +424,35 @@ void NuPlayer::writeTrackInfo(      int32_t trackType;      CHECK(format->findInt32("type", &trackType)); +    AString mime; +    if (!format->findString("mime", &mime)) { +        // Java MediaPlayer only uses mimetype for subtitle and timedtext tracks. +        // If we can't find the mimetype here it means that we wouldn't be needing +        // the mimetype on the Java end. We still write a placeholder mime to keep the +        // (de)serialization logic simple. +        if (trackType == MEDIA_TRACK_TYPE_AUDIO) { +            mime = "audio/"; +        } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) { +            mime = "video/"; +        } else { +            TRESPASS(); +        } +    } +      AString lang;      CHECK(format->findString("language", &lang));      reply->writeInt32(2); // write something non-zero      reply->writeInt32(trackType); +    reply->writeString16(String16(mime.c_str()));      reply->writeString16(String16(lang.c_str()));      if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) { -        AString mime; -        CHECK(format->findString("mime", &mime)); -          int32_t isAuto, isDefault, isForced;          CHECK(format->findInt32("auto", &isAuto));          CHECK(format->findInt32("default", &isDefault));          CHECK(format->findInt32("forced", &isForced)); -        reply->writeString16(String16(mime.c_str()));          reply->writeInt32(isAuto);          reply->writeInt32(isDefault);          reply->writeInt32(isForced); @@ -401,7 +492,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {          case kWhatGetTrackInfo:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              Parcel* reply; @@ -454,7 +545,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              sp<AMessage> response = new AMessage;              response->setInt32("err", err); -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              response->postReply(replyID);              break; @@ -462,7 +553,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {          case kWhatSelectTrack:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              size_t trackIndex; @@ -532,15 +623,25 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              break;          } -        case kWhatSetVideoNativeWindow: +        case kWhatSetVideoSurface:          { -            ALOGV("kWhatSetVideoNativeWindow");              sp<RefBase> obj; -            CHECK(msg->findObject("native-window", &obj)); - -            if (mSource == NULL || mSource->getFormat(false /* audio */) == NULL) { -                performSetSurface(static_cast<NativeWindowWrapper *>(obj.get())); +            CHECK(msg->findObject("surface", &obj)); +            sp<Surface> surface = static_cast<Surface *>(obj.get()); + +            ALOGD("onSetVideoSurface(%p, %s video decoder)", +                    surface.get(), +                    (mSource != NULL && mStarted && mSource->getFormat(false /* audio */) != NULL +                            && mVideoDecoder != NULL) ? "have" : "no"); + +            // Need to check mStarted before calling mSource->getFormat because NuPlayer might +            // be in preparing state and it could take long time. +            // When mStarted is true, mSource must have been set. +            if (mSource == NULL || !mStarted || mSource->getFormat(false /* audio */) == NULL +                    // NOTE: mVideoDecoder's mSurface is always non-null +                    || (mVideoDecoder != NULL && mVideoDecoder->setVideoSurface(surface) == OK)) { +                performSetSurface(surface);                  break;              } @@ -548,11 +649,9 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {                      new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,                                             FLUSH_CMD_SHUTDOWN /* video */)); -            mDeferredActions.push_back( -                    new SetSurfaceAction( -                        static_cast<NativeWindowWrapper *>(obj.get()))); +            mDeferredActions.push_back(new SetSurfaceAction(surface)); -            if (obj != NULL) { +            if (obj != NULL || mAudioDecoder != NULL) {                  if (mStarted) {                      // Issue a seek to refresh the video screen only if started otherwise                      // the extractor may not yet be started and will assert. @@ -561,7 +660,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {                      int64_t currentPositionUs = 0;                      if (getCurrentPosition(¤tPositionUs) == OK) {                          mDeferredActions.push_back( -                                new SeekAction(currentPositionUs, false /* needNotify */)); +                                new SeekAction(currentPositionUs));                      }                  } @@ -596,7 +695,10 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {          {              ALOGV("kWhatStart");              if (mStarted) { -                onResume(); +                // do not resume yet if the source is still buffering +                if (!mPausedForBuffering) { +                    onResume(); +                }              } else {                  onStart();              } @@ -604,6 +706,117 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatConfigPlayback: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); +            AudioPlaybackRate rate /* sanitized */; +            readFromAMessage(msg, &rate); +            status_t err = OK; +            if (mRenderer != NULL) { +                err = mRenderer->setPlaybackSettings(rate); +            } +            if (err == OK) { +                if (rate.mSpeed == 0.f) { +                    onPause(); +                    // save all other settings (using non-paused speed) +                    // so we can restore them on start +                    AudioPlaybackRate newRate = rate; +                    newRate.mSpeed = mPlaybackSettings.mSpeed; +                    mPlaybackSettings = newRate; +                } else { /* rate.mSpeed != 0.f */ +                    onResume(); +                    mPlaybackSettings = rate; +                } +            } + +            if (mVideoDecoder != NULL) { +                float rate = getFrameRate(); +                if (rate > 0) { +                    sp<AMessage> params = new AMessage(); +                    params->setFloat("operating-rate", rate * mPlaybackSettings.mSpeed); +                    mVideoDecoder->setParameters(params); +                } +            } + +            sp<AMessage> response = new AMessage; +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } + +        case kWhatGetPlaybackSettings: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); +            AudioPlaybackRate rate = mPlaybackSettings; +            status_t err = OK; +            if (mRenderer != NULL) { +                err = mRenderer->getPlaybackSettings(&rate); +            } +            if (err == OK) { +                // get playback settings used by renderer, as it may be +                // slightly off due to audiosink not taking small changes. +                mPlaybackSettings = rate; +                if (mPaused) { +                    rate.mSpeed = 0.f; +                } +            } +            sp<AMessage> response = new AMessage; +            if (err == OK) { +                writeToAMessage(response, rate); +            } +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } + +        case kWhatConfigSync: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); + +            ALOGV("kWhatConfigSync"); +            AVSyncSettings sync; +            float videoFpsHint; +            readFromAMessage(msg, &sync, &videoFpsHint); +            status_t err = OK; +            if (mRenderer != NULL) { +                err = mRenderer->setSyncSettings(sync, videoFpsHint); +            } +            if (err == OK) { +                mSyncSettings = sync; +                mVideoFpsHint = videoFpsHint; +            } +            sp<AMessage> response = new AMessage; +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } + +        case kWhatGetSyncSettings: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); +            AVSyncSettings sync = mSyncSettings; +            float videoFps = mVideoFpsHint; +            status_t err = OK; +            if (mRenderer != NULL) { +                err = mRenderer->getSyncSettings(&sync, &videoFps); +                if (err == OK) { +                    mSyncSettings = sync; +                    mVideoFpsHint = videoFps; +                } +            } +            sp<AMessage> response = new AMessage; +            if (err == OK) { +                writeToAMessage(response, sync, videoFps); +            } +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } +          case kWhatScanSources:          {              int32_t generation; @@ -623,26 +836,12 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              // initialize video before audio because successful initialization of              // video may change deep buffer mode of audio. -            if (mNativeWindow != NULL) { +            if (mSurface != NULL) {                  instantiateDecoder(false, &mVideoDecoder);              }              // Don't try to re-open audio sink if there's an existing decoder.              if (mAudioSink != NULL && mAudioDecoder == NULL) { -                sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */); -                sp<AMessage> videoFormat = mSource->getFormat(false /* audio */); -                audio_stream_type_t streamType = mAudioSink->getAudioStreamType(); -                const bool hasVideo = (videoFormat != NULL); -                const bool canOffload = canOffloadStream( -                        audioMeta, hasVideo, true /* is_streaming */, streamType); -                if (canOffload) { -                    if (!mOffloadAudio) { -                        mRenderer->signalEnableOffloadAudio(); -                    } -                    // open audio sink early under offload mode. -                    sp<AMessage> format = mSource->getFormat(true /*audio*/); -                    tryOpenAudioSinkForOffload(format, hasVideo); -                }                  instantiateDecoder(true, &mAudioDecoder);              } @@ -671,7 +870,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              }              if ((mAudioDecoder == NULL && mAudioSink != NULL) -                    || (mVideoDecoder == NULL && mNativeWindow != NULL)) { +                    || (mVideoDecoder == NULL && mSurface != NULL)) {                  msg->post(100000ll);                  mScanSourcesPending = true;              } @@ -810,6 +1009,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {                          // Widevine source reads must stop before releasing the video decoder.                          if (!audio && mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {                              mSource->stop(); +                            mSourceStarted = false;                          }                          getDecoder(audio)->initiateShutdown(); // In the middle of a seek.                          *flushing = SHUTTING_DOWN_DECODER;     // Shut down. @@ -876,6 +1076,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {                  CHECK(msg->findInt32("audio", &audio));                  ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); +                if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED +                        || mFlushingAudio == SHUT_DOWN)) { +                    // Flush has been handled by tear down. +                    break; +                }                  handleFlushComplete(audio, false /* isDecoder */);                  finishFlushIfPossible();              } else if (what == Renderer::kWhatVideoRenderingStart) { @@ -883,15 +1088,28 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              } else if (what == Renderer::kWhatMediaRenderingStart) {                  ALOGV("media rendering started");                  notifyListener(MEDIA_STARTED, 0, 0); -            } else if (what == Renderer::kWhatAudioOffloadTearDown) { -                ALOGV("Tear down audio offload, fall back to s/w path if due to error."); -                int64_t positionUs; -                CHECK(msg->findInt64("positionUs", &positionUs)); +            } else if (what == Renderer::kWhatAudioTearDown) {                  int32_t reason;                  CHECK(msg->findInt32("reason", &reason)); -                closeAudioSink(); +                ALOGV("Tear down audio with reason %d.", reason);                  mAudioDecoder.clear();                  ++mAudioDecoderGeneration; +                bool needsToCreateAudioDecoder = true; +                if (mFlushingAudio == FLUSHING_DECODER) { +                    mFlushComplete[1 /* audio */][1 /* isDecoder */] = true; +                    mFlushingAudio = FLUSHED; +                    finishFlushIfPossible(); +                } else if (mFlushingAudio == FLUSHING_DECODER_SHUTDOWN +                        || mFlushingAudio == SHUTTING_DOWN_DECODER) { +                    mFlushComplete[1 /* audio */][1 /* isDecoder */] = true; +                    mFlushingAudio = SHUT_DOWN; +                    finishFlushIfPossible(); +                    needsToCreateAudioDecoder = false; +                } +                if (mRenderer == NULL) { +                    break; +                } +                closeAudioSink();                  mRenderer->flush(                          true /* audio */, false /* notifyComplete */);                  if (mVideoDecoder != NULL) { @@ -899,10 +1117,13 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {                              false /* audio */, false /* notifyComplete */);                  } -                performSeek(positionUs, false /* needNotify */); -                if (reason == Renderer::kDueToError) { -                    mRenderer->signalDisableOffloadAudio(); -                    mOffloadAudio = false; +                int64_t positionUs; +                if (!msg->findInt64("positionUs", &positionUs)) { +                    positionUs = mPreviousSeekTimeUs; +                } +                performSeek(positionUs); + +                if (reason == Renderer::kDueToError && needsToCreateAudioDecoder) {                      instantiateDecoder(true /* audio */, &mAudioDecoder);                  }              } @@ -938,14 +1159,31 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {              CHECK(msg->findInt32("needNotify", &needNotify));              ALOGV("kWhatSeek seekTimeUs=%lld us, needNotify=%d", -                    seekTimeUs, needNotify); +                    (long long)seekTimeUs, needNotify); + +            if (!mStarted) { +                // Seek before the player is started. In order to preview video, +                // need to start the player and pause it. This branch is called +                // only once if needed. After the player is started, any seek +                // operation will go through normal path. +                // Audio-only cases are handled separately. +                onStart(seekTimeUs); +                if (mStarted) { +                    onPause(); +                    mPausedByClient = true; +                } +                if (needNotify) { +                    notifyDriverSeekComplete(); +                } +                break; +            }              mDeferredActions.push_back(                      new FlushDecoderAction(FLUSH_CMD_FLUSH /* audio */,                                             FLUSH_CMD_FLUSH /* video */));              mDeferredActions.push_back( -                    new SeekAction(seekTimeUs, needNotify)); +                    new SeekAction(seekTimeUs));              // After a flush without shutdown, decoder is paused.              // Don't resume it until source seek is done, otherwise it could @@ -1017,7 +1255,7 @@ status_t NuPlayer::onInstantiateSecureDecoders() {      // TRICKY: We rely on mRenderer being null, so that decoder does not start requesting      // data on instantiation. -    if (mNativeWindow != NULL) { +    if (mSurface != NULL) {          err = instantiateDecoder(false, &mVideoDecoder);          if (err != OK) {              return err; @@ -1033,14 +1271,23 @@ status_t NuPlayer::onInstantiateSecureDecoders() {      return OK;  } -void NuPlayer::onStart() { +void NuPlayer::onStart(int64_t startPositionUs) { +    if (!mSourceStarted) { +        mSourceStarted = true; +        mSource->start(); +    } +    if (startPositionUs > 0) { +        performSeek(startPositionUs); +        if (mSource->getFormat(false /* audio */) == NULL) { +            return; +        } +    } +      mOffloadAudio = false;      mAudioEOS = false;      mVideoEOS = false;      mStarted = true; -    mSource->start(); -      uint32_t flags = 0;      if (mSource->isRealTime()) { @@ -1056,26 +1303,30 @@ void NuPlayer::onStart() {      sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);      mOffloadAudio = -        canOffloadStream(audioMeta, (videoFormat != NULL), -                         true /* is_streaming */, streamType); +        canOffloadStream(audioMeta, (videoFormat != NULL), mSource->isStreaming(), streamType);      if (mOffloadAudio) {          flags |= Renderer::FLAG_OFFLOAD_AUDIO;      } -    sp<AMessage> notify = new AMessage(kWhatRendererNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatRendererNotify, this);      ++mRendererGeneration;      notify->setInt32("generation", mRendererGeneration);      mRenderer = new Renderer(mAudioSink, notify, flags); -      mRendererLooper = new ALooper;      mRendererLooper->setName("NuPlayerRenderer");      mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);      mRendererLooper->registerHandler(mRenderer); -    sp<MetaData> meta = getFileMeta(); -    int32_t rate; -    if (meta != NULL -            && meta->findInt32(kKeyFrameRate, &rate) && rate > 0) { +    status_t err = mRenderer->setPlaybackSettings(mPlaybackSettings); +    if (err != OK) { +        mSource->stop(); +        mSourceStarted = false; +        notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); +        return; +    } + +    float rate = getFrameRate(); +    if (rate > 0) {          mRenderer->setVideoFrameRate(rate);      } @@ -1137,6 +1388,7 @@ void NuPlayer::handleFlushComplete(bool audio, bool isDecoder) {                  // Widevine source reads must stop before releasing the video decoder.                  if (mSource != NULL && mSourceFlags & Source::FLAG_SECURE) {                      mSource->stop(); +                    mSourceStarted = false;                  }              }              getDecoder(audio)->initiateShutdown(); @@ -1176,7 +1428,7 @@ void NuPlayer::postScanSources() {          return;      } -    sp<AMessage> msg = new AMessage(kWhatScanSources, id()); +    sp<AMessage> msg = new AMessage(kWhatScanSources, this);      msg->setInt32("generation", mScanSourcesGeneration);      msg->post(); @@ -1203,6 +1455,38 @@ void NuPlayer::closeAudioSink() {      mRenderer->closeAudioSink();  } +void NuPlayer::determineAudioModeChange() { +    if (mSource == NULL || mAudioSink == NULL) { +        return; +    } + +    if (mRenderer == NULL) { +        ALOGW("No renderer can be used to determine audio mode. Use non-offload for safety."); +        mOffloadAudio = false; +        return; +    } + +    sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */); +    sp<AMessage> videoFormat = mSource->getFormat(false /* audio */); +    audio_stream_type_t streamType = mAudioSink->getAudioStreamType(); +    const bool hasVideo = (videoFormat != NULL); +    const bool canOffload = canOffloadStream( +            audioMeta, hasVideo, mSource->isStreaming(), streamType); +    if (canOffload) { +        if (!mOffloadAudio) { +            mRenderer->signalEnableOffloadAudio(); +        } +        // open audio sink early under offload mode. +        sp<AMessage> format = mSource->getFormat(true /*audio*/); +        tryOpenAudioSinkForOffload(format, hasVideo); +    } else { +        if (mOffloadAudio) { +            mRenderer->signalDisableOffloadAudio(); +            mOffloadAudio = false; +        } +    } +} +  status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {      if (*decoder != NULL) {          return OK; @@ -1214,11 +1498,13 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {          return -EWOULDBLOCK;      } +    format->setInt32("priority", 0 /* realtime */); +      if (!audio) {          AString mime;          CHECK(format->findString("mime", &mime)); -        sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id()); +        sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, this);          if (mCCDecoder == NULL) {              mCCDecoder = new CCDecoder(ccNotify);          } @@ -1230,28 +1516,36 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {          if (mSourceFlags & Source::FLAG_PROTECTED) {              format->setInt32("protected", true);          } + +        float rate = getFrameRate(); +        if (rate > 0) { +            format->setFloat("operating-rate", rate * mPlaybackSettings.mSpeed); +        }      }      if (audio) { -        sp<AMessage> notify = new AMessage(kWhatAudioNotify, id()); +        sp<AMessage> notify = new AMessage(kWhatAudioNotify, this);          ++mAudioDecoderGeneration;          notify->setInt32("generation", mAudioDecoderGeneration); +        determineAudioModeChange();          if (mOffloadAudio) { +            const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL); +            format->setInt32("has-video", hasVideo);              *decoder = new DecoderPassThrough(notify, mSource, mRenderer);          } else { -            *decoder = new Decoder(notify, mSource, mRenderer); +            *decoder = new Decoder(notify, mSource, mPID, mRenderer);          }      } else { -        sp<AMessage> notify = new AMessage(kWhatVideoNotify, id()); +        sp<AMessage> notify = new AMessage(kWhatVideoNotify, this);          ++mVideoDecoderGeneration;          notify->setInt32("generation", mVideoDecoderGeneration);          *decoder = new Decoder( -                notify, mSource, mRenderer, mNativeWindow, mCCDecoder); +                notify, mSource, mPID, mRenderer, mSurface, mCCDecoder);          // enable FRC if high-quality AV sync is requested, even if not -        // queuing to native window, as this will even improve textureview +        // directly queuing to display, as this will even improve textureview          // playback.          {              char value[PROPERTY_VALUE_MAX]; @@ -1299,8 +1593,6 @@ void NuPlayer::updateVideoSize(      }      int32_t displayWidth, displayHeight; -    int32_t cropLeft, cropTop, cropRight, cropBottom; -      if (outputFormat != NULL) {          int32_t width, height;          CHECK(outputFormat->findInt32("width", &width)); @@ -1382,7 +1674,11 @@ void NuPlayer::flushDecoder(bool audio, bool needShutdown) {      // Make sure we don't continue to scan sources until we finish flushing.      ++mScanSourcesGeneration; -    mScanSourcesPending = false; +    if (mScanSourcesPending) { +        mDeferredActions.push_back( +                new SimpleAction(&NuPlayer::performScanSources)); +        mScanSourcesPending = false; +    }      decoder->signalFlush(); @@ -1421,9 +1717,8 @@ void NuPlayer::queueDecoderShutdown(  status_t NuPlayer::setVideoScalingMode(int32_t mode) {      mVideoScalingMode = mode; -    if (mNativeWindow != NULL) { -        status_t ret = native_window_set_scaling_mode( -                mNativeWindow->getNativeWindow().get(), mVideoScalingMode); +    if (mSurface != NULL) { +        status_t ret = native_window_set_scaling_mode(mSurface.get(), mVideoScalingMode);          if (ret != OK) {              ALOGE("Failed to set scaling mode (%d): %s",                  -ret, strerror(-ret)); @@ -1434,7 +1729,7 @@ status_t NuPlayer::setVideoScalingMode(int32_t mode) {  }  status_t NuPlayer::getTrackInfo(Parcel* reply) const { -    sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, id()); +    sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, this);      msg->setPointer("reply", reply);      sp<AMessage> response; @@ -1443,7 +1738,7 @@ status_t NuPlayer::getTrackInfo(Parcel* reply) const {  }  status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const { -    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id()); +    sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this);      msg->setPointer("reply", reply);      msg->setInt32("type", type); @@ -1456,7 +1751,7 @@ status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const {  }  status_t NuPlayer::selectTrack(size_t trackIndex, bool select, int64_t timeUs) { -    sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); +    sp<AMessage> msg = new AMessage(kWhatSelectTrack, this);      msg->setSize("trackIndex", trackIndex);      msg->setInt32("select", select);      msg->setInt64("timeUs", timeUs); @@ -1484,13 +1779,15 @@ status_t NuPlayer::getCurrentPosition(int64_t *mediaUs) {      return renderer->getCurrentPosition(mediaUs);  } -void NuPlayer::getStats(int64_t *numFramesTotal, int64_t *numFramesDropped) { -    sp<DecoderBase> decoder = getDecoder(false /* audio */); -    if (decoder != NULL) { -        decoder->getStats(numFramesTotal, numFramesDropped); -    } else { -        *numFramesTotal = 0; -        *numFramesDropped = 0; +void NuPlayer::getStats(Vector<sp<AMessage> > *mTrackStats) { +    CHECK(mTrackStats != NULL); + +    mTrackStats->clear(); +    if (mVideoDecoder != NULL) { +        mTrackStats->push_back(mVideoDecoder->getStats()); +    } +    if (mAudioDecoder != NULL) { +        mTrackStats->push_back(mAudioDecoder->getStats());      }  } @@ -1498,8 +1795,30 @@ sp<MetaData> NuPlayer::getFileMeta() {      return mSource->getFileFormatMeta();  } +float NuPlayer::getFrameRate() { +    sp<MetaData> meta = mSource->getFormatMeta(false /* audio */); +    if (meta == NULL) { +        return 0; +    } +    int32_t rate; +    if (!meta->findInt32(kKeyFrameRate, &rate)) { +        // fall back to try file meta +        sp<MetaData> fileMeta = getFileMeta(); +        if (fileMeta == NULL) { +            ALOGW("source has video meta but not file meta"); +            return -1; +        } +        int32_t fileMetaRate; +        if (!fileMeta->findInt32(kKeyFrameRate, &fileMetaRate)) { +            return -1; +        } +        return fileMetaRate; +    } +    return rate; +} +  void NuPlayer::schedulePollDuration() { -    sp<AMessage> msg = new AMessage(kWhatPollDuration, id()); +    sp<AMessage> msg = new AMessage(kWhatPollDuration, this);      msg->setInt32("generation", mPollDurationGeneration);      msg->post();  } @@ -1531,11 +1850,10 @@ void NuPlayer::processDeferredActions() {      }  } -void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) { -    ALOGV("performSeek seekTimeUs=%lld us (%.2f secs), needNotify(%d)", -          seekTimeUs, -          seekTimeUs / 1E6, -          needNotify); +void NuPlayer::performSeek(int64_t seekTimeUs) { +    ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)", +          (long long)seekTimeUs, +          seekTimeUs / 1E6);      if (mSource == NULL) {          // This happens when reset occurs right before the loop mode @@ -1545,6 +1863,7 @@ void NuPlayer::performSeek(int64_t seekTimeUs, bool needNotify) {                  mAudioDecoder.get(), mVideoDecoder.get());          return;      } +    mPreviousSeekTimeUs = seekTimeUs;      mSource->seekTo(seekTimeUs);      ++mTimedTextGeneration; @@ -1603,6 +1922,7 @@ void NuPlayer::performReset() {      }      mStarted = false; +    mSourceStarted = false;  }  void NuPlayer::performScanSources() { @@ -1617,10 +1937,10 @@ void NuPlayer::performScanSources() {      }  } -void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) { +void NuPlayer::performSetSurface(const sp<Surface> &surface) {      ALOGV("performSetSurface"); -    mNativeWindow = wrapper; +    mSurface = surface;      // XXX - ignore error from setVideoScalingMode for now      setVideoScalingMode(mVideoScalingMode); @@ -1660,11 +1980,15 @@ void NuPlayer::performResumeDecoders(bool needNotify) {  void NuPlayer::finishResume() {      if (mResumePending) {          mResumePending = false; -        if (mDriver != NULL) { -            sp<NuPlayerDriver> driver = mDriver.promote(); -            if (driver != NULL) { -                driver->notifySeekComplete(); -            } +        notifyDriverSeekComplete(); +    } +} + +void NuPlayer::notifyDriverSeekComplete() { +    if (mDriver != NULL) { +        sp<NuPlayerDriver> driver = mDriver.promote(); +        if (driver != NULL) { +            driver->notifySeekComplete();          }      }  } @@ -1773,9 +2097,10 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {          case Source::kWhatPauseOnBufferingStart:          {              // ignore if not playing -            if (mStarted && !mPausedByClient) { +            if (mStarted) {                  ALOGI("buffer low, pausing..."); +                mPausedForBuffering = true;                  onPause();              }              // fall-thru @@ -1790,10 +2115,15 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {          case Source::kWhatResumeOnBufferingEnd:          {              // ignore if not playing -            if (mStarted && !mPausedByClient) { +            if (mStarted) {                  ALOGI("buffer ready, resuming..."); -                onResume(); +                mPausedForBuffering = false; + +                // do not resume yet if client didn't unpause +                if (!mPausedByClient) { +                    onResume(); +                }              }              // fall-thru          } @@ -1822,6 +2152,17 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {              break;          } +        case Source::kWhatTimedMetaData: +        { +            sp<ABuffer> buffer; +            if (!msg->findBuffer("buffer", &buffer)) { +                notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0); +            } else { +                sendTimedMetaData(buffer); +            } +            break; +        } +          case Source::kWhatTimedTextData:          {              int32_t generation; @@ -1930,6 +2271,19 @@ void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {      notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);  } +void NuPlayer::sendTimedMetaData(const sp<ABuffer> &buffer) { +    int64_t timeUs; +    CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); + +    Parcel in; +    in.writeInt64(timeUs); +    in.writeInt32(buffer->size()); +    in.writeInt32(buffer->size()); +    in.write(buffer->data(), buffer->size()); + +    notifyListener(MEDIA_META_DATA, 0, 0, &in); +} +  void NuPlayer::sendTimedTextData(const sp<ABuffer> &buffer) {      const void *data;      size_t size = 0; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 30ede1a..c9f0bbd 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -18,19 +18,22 @@  #define NU_PLAYER_H_ +#include <media/AudioResamplerPublic.h>  #include <media/MediaPlayerInterface.h>  #include <media/stagefright/foundation/AHandler.h> -#include <media/stagefright/NativeWindowWrapper.h>  namespace android {  struct ABuffer;  struct AMessage; +struct AudioPlaybackRate; +struct AVSyncSettings; +class IDataSource;  class MetaData;  struct NuPlayerDriver;  struct NuPlayer : public AHandler { -    NuPlayer(); +    NuPlayer(pid_t pid);      void setUID(uid_t uid); @@ -45,12 +48,19 @@ struct NuPlayer : public AHandler {      void setDataSourceAsync(int fd, int64_t offset, int64_t length); +    void setDataSourceAsync(const sp<DataSource> &source); +      void prepareAsync();      void setVideoSurfaceTextureAsync(              const sp<IGraphicBufferProducer> &bufferProducer);      void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink); +    status_t setPlaybackSettings(const AudioPlaybackRate &rate); +    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */); +    status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint); +    status_t getSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */); +      void start();      void pause(); @@ -67,9 +77,10 @@ struct NuPlayer : public AHandler {      status_t getSelectedTrack(int32_t type, Parcel* reply) const;      status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);      status_t getCurrentPosition(int64_t *mediaUs); -    void getStats(int64_t *mNumFramesTotal, int64_t *mNumFramesDropped); +    void getStats(Vector<sp<AMessage> > *mTrackStats);      sp<MetaData> getFileMeta(); +    float getFrameRate();  protected:      virtual ~NuPlayer(); @@ -101,9 +112,13 @@ private:      enum {          kWhatSetDataSource              = '=DaS',          kWhatPrepare                    = 'prep', -        kWhatSetVideoNativeWindow       = '=NaW', +        kWhatSetVideoSurface            = '=VSu',          kWhatSetAudioSink               = '=AuS',          kWhatMoreDataQueued             = 'more', +        kWhatConfigPlayback             = 'cfPB', +        kWhatConfigSync                 = 'cfSy', +        kWhatGetPlaybackSettings        = 'gPbS', +        kWhatGetSyncSettings            = 'gSyS',          kWhatStart                      = 'strt',          kWhatScanSources                = 'scan',          kWhatVideoNotify                = 'vidN', @@ -124,9 +139,10 @@ private:      wp<NuPlayerDriver> mDriver;      bool mUIDValid;      uid_t mUID; +    pid_t mPID;      sp<Source> mSource;      uint32_t mSourceFlags; -    sp<NativeWindowWrapper> mNativeWindow; +    sp<Surface> mSurface;      sp<MediaPlayerBase::AudioSink> mAudioSink;      sp<DecoderBase> mVideoDecoder;      bool mOffloadAudio; @@ -138,6 +154,8 @@ private:      int32_t mVideoDecoderGeneration;      int32_t mRendererGeneration; +    int64_t mPreviousSeekTimeUs; +      List<sp<Action> > mDeferredActions;      bool mAudioEOS; @@ -175,7 +193,11 @@ private:      int32_t mVideoScalingMode; +    AudioPlaybackRate mPlaybackSettings; +    AVSyncSettings mSyncSettings; +    float mVideoFpsHint;      bool mStarted; +    bool mSourceStarted;      // Actual pause state, either as requested by client or due to buffering.      bool mPaused; @@ -185,6 +207,9 @@ private:      // still become true, when we pause internally due to buffering.      bool mPausedByClient; +    // Pause state as requested by source (internally) due to buffering +    bool mPausedForBuffering; +      inline const sp<DecoderBase> &getDecoder(bool audio) {          return audio ? mAudioDecoder : mVideoDecoder;      } @@ -198,6 +223,7 @@ private:      void tryOpenAudioSinkForOffload(const sp<AMessage> &format, bool hasVideo);      void closeAudioSink(); +    void determineAudioModeChange();      status_t instantiateDecoder(bool audio, sp<DecoderBase> *decoder); @@ -212,7 +238,7 @@ private:      void handleFlushComplete(bool audio, bool isDecoder);      void finishFlushIfPossible(); -    void onStart(); +    void onStart(int64_t startPositionUs = -1);      void onResume();      void onPause(); @@ -221,6 +247,7 @@ private:      void flushDecoder(bool audio, bool needShutdown);      void finishResume(); +    void notifyDriverSeekComplete();      void postScanSources(); @@ -229,11 +256,11 @@ private:      void processDeferredActions(); -    void performSeek(int64_t seekTimeUs, bool needNotify); +    void performSeek(int64_t seekTimeUs);      void performDecoderFlush(FlushCommand audio, FlushCommand video);      void performReset();      void performScanSources(); -    void performSetSurface(const sp<NativeWindowWrapper> &wrapper); +    void performSetSurface(const sp<Surface> &wrapper);      void performResumeDecoders(bool needNotify);      void onSourceNotify(const sp<AMessage> &msg); @@ -243,6 +270,7 @@ private:              bool audio, bool video, const sp<AMessage> &reply);      void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex); +    void sendTimedMetaData(const sp<ABuffer> &buffer);      void sendTimedTextData(const sp<ABuffer> &buffer);      void writeTrackInfo(Parcel* reply, const sp<AMessage> format) const; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp index 9229704..ac3c6b6 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp @@ -19,6 +19,7 @@  #include <utils/Log.h>  #include <inttypes.h> +#include "avc_utils.h"  #include "NuPlayerCCDecoder.h"  #include <media/stagefright/foundation/ABitReader.h> @@ -50,6 +51,7 @@ static bool isNullPad(CCData *cc) {      return cc->mData1 < 0x10 && cc->mData2 < 0x10;  } +static void dumpBytePair(const sp<ABuffer> &ccBuf) __attribute__ ((unused));  static void dumpBytePair(const sp<ABuffer> &ccBuf) {      size_t offset = 0;      AString out; @@ -185,17 +187,38 @@ int32_t NuPlayer::CCDecoder::getTrackIndex(size_t channel) const {  // returns true if a new CC track is found  bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) { -    int64_t timeUs; -    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); -      sp<ABuffer> sei;      if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {          return false;      } +    int64_t timeUs; +    CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); +      bool trackAdded = false; -    NALBitReader br(sei->data() + 1, sei->size() - 1); +    const NALPosition *nal = (NALPosition *) sei->data(); + +    for (size_t i = 0; i < sei->size() / sizeof(NALPosition); ++i, ++nal) { +        trackAdded |= parseSEINalUnit( +                timeUs, accessUnit->data() + nal->nalOffset, nal->nalSize); +    } + +    return trackAdded; +} + +// returns true if a new CC track is found +bool NuPlayer::CCDecoder::parseSEINalUnit( +        int64_t timeUs, const uint8_t *nalStart, size_t nalSize) { +    unsigned nalType = nalStart[0] & 0x1f; + +    // the buffer should only have SEI in it +    if (nalType != 6) { +        return false; +    } + +    bool trackAdded = false; +    NALBitReader br(nalStart + 1, nalSize - 1);      // sei_message()      while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message()          uint32_t payload_type = 0; @@ -214,20 +237,25 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {          // sei_payload()          if (payload_type == 4) { -            // user_data_registered_itu_t_t35() - -            // ATSC A/72: 6.4.2 -            uint8_t itu_t_t35_country_code = br.getBits(8); -            uint16_t itu_t_t35_provider_code = br.getBits(16); -            uint32_t user_identifier = br.getBits(32); -            uint8_t user_data_type_code = br.getBits(8); - -            payload_size -= 1 + 2 + 4 + 1; +            bool isCC = false; +            if (payload_size > 1 + 2 + 4 + 1) { +                // user_data_registered_itu_t_t35() + +                // ATSC A/72: 6.4.2 +                uint8_t itu_t_t35_country_code = br.getBits(8); +                uint16_t itu_t_t35_provider_code = br.getBits(16); +                uint32_t user_identifier = br.getBits(32); +                uint8_t user_data_type_code = br.getBits(8); + +                payload_size -= 1 + 2 + 4 + 1; + +                isCC = itu_t_t35_country_code == 0xB5 +                        && itu_t_t35_provider_code == 0x0031 +                        && user_identifier == 'GA94' +                        && user_data_type_code == 0x3; +            } -            if (itu_t_t35_country_code == 0xB5 -                    && itu_t_t35_provider_code == 0x0031 -                    && user_identifier == 'GA94' -                    && user_data_type_code == 0x3) { +            if (isCC && payload_size > 2) {                  // MPEG_cc_data()                  // ATSC A/53 Part 4: 6.2.3.1                  br.skipBits(1); //process_em_data_flag @@ -243,7 +271,7 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {                      sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData));                      ccBuf->setRange(0, 0); -                    for (size_t i = 0; i < cc_count; i++) { +                    for (size_t i = 0; i < cc_count && payload_size >= 3; i++) {                          uint8_t marker = br.getBits(5);                          CHECK_EQ(marker, 0x1f); @@ -253,6 +281,8 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {                          uint8_t cc_data_1 = br.getBits(8) & 0x7f;                          uint8_t cc_data_2 = br.getBits(8) & 0x7f; +                        payload_size -= 3; +                          if (cc_valid                                  && (cc_type == 0 || cc_type == 1)) {                              CCData cc(cc_type, cc_data_1, cc_data_2); @@ -269,7 +299,6 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {                              }                          }                      } -                    payload_size -= cc_count * 3;                      mCCMap.add(timeUs, ccBuf);                      break; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h index 5e06f4e..77fb0fe 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h @@ -49,6 +49,7 @@ private:      bool isTrackValid(size_t index) const;      int32_t getTrackIndex(size_t channel) const;      bool extractFromSEI(const sp<ABuffer> &accessUnit); +    bool parseSEINalUnit(int64_t timeUs, const uint8_t *nalStart, size_t nalSize);      sp<ABuffer> filterCCBuf(const sp<ABuffer> &ccBuf, size_t index);      DISALLOW_EVIL_CONSTRUCTORS(CCDecoder); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp index 5d98d98..3646828 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp @@ -24,6 +24,7 @@  #include "NuPlayerRenderer.h"  #include "NuPlayerSource.h" +#include <cutils/properties.h>  #include <media/ICrypto.h>  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h> @@ -33,29 +34,41 @@  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MediaErrors.h> +#include <gui/Surface.h> +  #include "avc_utils.h"  #include "ATSParser.h"  namespace android { +static inline bool getAudioDeepBufferSetting() { +    return property_get_bool("media.stagefright.audio.deep", false /* default_value */); +} +  NuPlayer::Decoder::Decoder(          const sp<AMessage> ¬ify,          const sp<Source> &source, +        pid_t pid,          const sp<Renderer> &renderer, -        const sp<NativeWindowWrapper> &nativeWindow, +        const sp<Surface> &surface,          const sp<CCDecoder> &ccDecoder)      : DecoderBase(notify), -      mNativeWindow(nativeWindow), +      mSurface(surface),        mSource(source),        mRenderer(renderer),        mCCDecoder(ccDecoder), +      mPid(pid),        mSkipRenderingUntilMediaTimeUs(-1ll),        mNumFramesTotal(0ll), -      mNumFramesDropped(0ll), +      mNumInputFramesDropped(0ll), +      mNumOutputFramesDropped(0ll), +      mVideoWidth(0), +      mVideoHeight(0),        mIsAudio(true),        mIsVideoAVC(false),        mIsSecure(false),        mFormatChangePending(false), +      mTimeChangePending(false),        mPaused(true),        mResumePending(false),        mComponentName("decoder") { @@ -65,14 +78,31 @@ NuPlayer::Decoder::Decoder(  }  NuPlayer::Decoder::~Decoder() { +    mCodec->release();      releaseAndResetMediaBuffers();  } -void NuPlayer::Decoder::getStats( -        int64_t *numFramesTotal, -        int64_t *numFramesDropped) const { -    *numFramesTotal = mNumFramesTotal; -    *numFramesDropped = mNumFramesDropped; +sp<AMessage> NuPlayer::Decoder::getStats() const { +    mStats->setInt64("frames-total", mNumFramesTotal); +    mStats->setInt64("frames-dropped-input", mNumInputFramesDropped); +    mStats->setInt64("frames-dropped-output", mNumOutputFramesDropped); +    return mStats; +} + +status_t NuPlayer::Decoder::setVideoSurface(const sp<Surface> &surface) { +    if (surface == NULL || ADebug::isExperimentEnabled("legacy-setsurface")) { +        return BAD_VALUE; +    } + +    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this); + +    msg->setObject("surface", surface); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +    } +    return err;  }  void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) { @@ -81,25 +111,71 @@ void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {      switch (msg->what()) {          case kWhatCodecNotify:          { -            if (!isStaleReply(msg)) { -                int32_t numInput, numOutput; +            int32_t cbID; +            CHECK(msg->findInt32("callbackID", &cbID)); + +            ALOGV("[%s] kWhatCodecNotify: cbID = %d, paused = %d", +                    mIsAudio ? "audio" : "video", cbID, mPaused); + +            if (mPaused) { +                break; +            } + +            switch (cbID) { +                case MediaCodec::CB_INPUT_AVAILABLE: +                { +                    int32_t index; +                    CHECK(msg->findInt32("index", &index)); -                if (!msg->findInt32("input-buffers", &numInput)) { -                    numInput = INT32_MAX; +                    handleAnInputBuffer(index); +                    break; +                } + +                case MediaCodec::CB_OUTPUT_AVAILABLE: +                { +                    int32_t index; +                    size_t offset; +                    size_t size; +                    int64_t timeUs; +                    int32_t flags; + +                    CHECK(msg->findInt32("index", &index)); +                    CHECK(msg->findSize("offset", &offset)); +                    CHECK(msg->findSize("size", &size)); +                    CHECK(msg->findInt64("timeUs", &timeUs)); +                    CHECK(msg->findInt32("flags", &flags)); + +                    handleAnOutputBuffer(index, offset, size, timeUs, flags); +                    break;                  } -                if (!msg->findInt32("output-buffers", &numOutput)) { -                    numOutput = INT32_MAX; +                case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: +                { +                    sp<AMessage> format; +                    CHECK(msg->findMessage("format", &format)); + +                    handleOutputFormatChange(format); +                    break;                  } -                if (!mPaused) { -                    while (numInput-- > 0 && handleAnInputBuffer()) {} +                case MediaCodec::CB_ERROR: +                { +                    status_t err; +                    CHECK(msg->findInt32("err", &err)); +                    ALOGE("Decoder (%s) reported error : 0x%x", +                            mIsAudio ? "audio" : "video", err); + +                    handleError(err); +                    break;                  } -                while (numOutput-- > 0 && handleAnOutputBuffer()) {} +                default: +                { +                    TRESPASS(); +                    break; +                }              } -            requestCodecNotification();              break;          } @@ -111,6 +187,46 @@ void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatSetVideoSurface: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); + +            sp<RefBase> obj; +            CHECK(msg->findObject("surface", &obj)); +            sp<Surface> surface = static_cast<Surface *>(obj.get()); // non-null +            int32_t err = INVALID_OPERATION; +            // NOTE: in practice mSurface is always non-null, but checking here for completeness +            if (mCodec != NULL && mSurface != NULL) { +                // TODO: once AwesomePlayer is removed, remove this automatic connecting +                // to the surface by MediaPlayerService. +                // +                // at this point MediaPlayerService::client has already connected to the +                // surface, which MediaCodec does not expect +                err = native_window_api_disconnect(surface.get(), NATIVE_WINDOW_API_MEDIA); +                if (err == OK) { +                    err = mCodec->setSurface(surface); +                    ALOGI_IF(err, "codec setSurface returned: %d", err); +                    if (err == OK) { +                        // reconnect to the old surface as MPS::Client will expect to +                        // be able to disconnect from it. +                        (void)native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_MEDIA); +                        mSurface = surface; +                    } +                } +                if (err != OK) { +                    // reconnect to the new surface on error as MPS::Client will expect to +                    // be able to disconnect from it. +                    (void)native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA); +                } +            } + +            sp<AMessage> response = new AMessage; +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } +          default:              DecoderBase::onMessageReceived(msg);              break; @@ -121,6 +237,7 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {      CHECK(mCodec == NULL);      mFormatChangePending = false; +    mTimeChangePending = false;      ++mBufferGeneration; @@ -130,16 +247,12 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {      mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);      mIsVideoAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str()); -    sp<Surface> surface = NULL; -    if (mNativeWindow != NULL) { -        surface = mNativeWindow->getSurfaceTextureClient(); -    } -      mComponentName = mime;      mComponentName.append(" decoder"); -    ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), surface.get()); +    ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), mSurface.get()); -    mCodec = MediaCodec::CreateByType(mCodecLooper, mime.c_str(), false /* encoder */); +    mCodec = MediaCodec::CreateByType( +            mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid);      int32_t secure = 0;      if (format->findInt32("secure", &secure) && secure != 0) {          if (mCodec != NULL) { @@ -148,7 +261,7 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {              mCodec->release();              ALOGI("[%s] creating", mComponentName.c_str());              mCodec = MediaCodec::CreateByComponentName( -                    mCodecLooper, mComponentName.c_str()); +                    mCodecLooper, mComponentName.c_str(), NULL /* err */, mPid);          }      }      if (mCodec == NULL) { @@ -162,17 +275,17 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {      mCodec->getName(&mComponentName);      status_t err; -    if (mNativeWindow != NULL) { +    if (mSurface != NULL) {          // disconnect from surface as MediaCodec will reconnect          err = native_window_api_disconnect( -                surface.get(), NATIVE_WINDOW_API_MEDIA); +                mSurface.get(), NATIVE_WINDOW_API_MEDIA);          // We treat this as a warning, as this is a preparatory step.          // Codec will try to connect to the surface, which is where          // any error signaling will occur.          ALOGW_IF(err != OK, "failed to disconnect from surface: %d", err);      }      err = mCodec->configure( -            format, surface, NULL /* crypto */, 0 /* flags */); +            format, mSurface, NULL /* crypto */, 0 /* flags */);      if (err != OK) {          ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);          mCodec->release(); @@ -186,6 +299,21 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {      CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));      CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat)); +    mStats->setString("mime", mime.c_str()); +    mStats->setString("component-name", mComponentName.c_str()); + +    if (!mIsAudio) { +        int32_t width, height; +        if (mOutputFormat->findInt32("width", &width) +                && mOutputFormat->findInt32("height", &height)) { +            mStats->setInt32("width", width); +            mStats->setInt32("height", height); +        } +    } + +    sp<AMessage> reply = new AMessage(kWhatCodecNotify, this); +    mCodec->setCallback(reply); +      err = mCodec->start();      if (err != OK) {          ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err); @@ -195,36 +323,32 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {          return;      } -    // the following should work after start -    CHECK_EQ((status_t)OK, mCodec->getInputBuffers(&mInputBuffers));      releaseAndResetMediaBuffers(); -    CHECK_EQ((status_t)OK, mCodec->getOutputBuffers(&mOutputBuffers)); -    ALOGV("[%s] got %zu input and %zu output buffers", -            mComponentName.c_str(), -            mInputBuffers.size(), -            mOutputBuffers.size()); -    if (mRenderer != NULL) { -        requestCodecNotification(); -    }      mPaused = false;      mResumePending = false;  } +void NuPlayer::Decoder::onSetParameters(const sp<AMessage> ¶ms) { +    if (mCodec == NULL) { +        ALOGW("onSetParameters called before codec is created."); +        return; +    } +    mCodec->setParameters(params); +} +  void NuPlayer::Decoder::onSetRenderer(const sp<Renderer> &renderer) {      bool hadNoRenderer = (mRenderer == NULL);      mRenderer = renderer;      if (hadNoRenderer && mRenderer != NULL) { -        requestCodecNotification(); +        // this means that the widevine legacy source is ready +        onRequestInputBuffers();      }  }  void NuPlayer::Decoder::onGetInputBuffers(          Vector<sp<ABuffer> > *dstBuffers) { -    dstBuffers->clear(); -    for (size_t i = 0; i < mInputBuffers.size(); i++) { -        dstBuffers->push(mInputBuffers[i]); -    } +    CHECK_EQ((status_t)OK, mCodec->getWidevineLegacyBuffers(dstBuffers));  }  void NuPlayer::Decoder::onResume(bool notifyComplete) { @@ -233,9 +357,10 @@ void NuPlayer::Decoder::onResume(bool notifyComplete) {      if (notifyComplete) {          mResumePending = true;      } +    mCodec->start();  } -void NuPlayer::Decoder::onFlush(bool notifyComplete) { +void NuPlayer::Decoder::doFlush(bool notifyComplete) {      if (mCCDecoder != NULL) {          mCCDecoder->flush();      } @@ -259,13 +384,23 @@ void NuPlayer::Decoder::onFlush(bool notifyComplete) {          // we attempt to release the buffers even if flush fails.      }      releaseAndResetMediaBuffers(); +    mPaused = true; +} -    if (notifyComplete) { -        sp<AMessage> notify = mNotify->dup(); -        notify->setInt32("what", kWhatFlushCompleted); -        notify->post(); -        mPaused = true; + +void NuPlayer::Decoder::onFlush() { +    doFlush(true); + +    if (isDiscontinuityPending()) { +        // This could happen if the client starts seeking/shutdown +        // after we queued an EOS for discontinuities. +        // We can consider discontinuity handled. +        finishHandleDiscontinuity(false /* flushOnTimeChange */);      } + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatFlushCompleted); +    notify->post();  }  void NuPlayer::Decoder::onShutdown(bool notifyComplete) { @@ -279,12 +414,10 @@ void NuPlayer::Decoder::onShutdown(bool notifyComplete) {          mCodec = NULL;          ++mBufferGeneration; -        if (mNativeWindow != NULL) { +        if (mSurface != NULL) {              // reconnect to surface as MediaCodec disconnected from it              status_t error = -                    native_window_api_connect( -                            mNativeWindow->getNativeWindow().get(), -                            NATIVE_WINDOW_API_MEDIA); +                    native_window_api_connect(mSurface.get(), NATIVE_WINDOW_API_MEDIA);              ALOGW_IF(error != NO_ERROR,                      "[%s] failed to connect to native window, error=%d",                      mComponentName.c_str(), error); @@ -308,17 +441,23 @@ void NuPlayer::Decoder::onShutdown(bool notifyComplete) {      }  } -void NuPlayer::Decoder::doRequestBuffers() { -    if (mFormatChangePending) { -        return; +/* + * returns true if we should request more data + */ +bool NuPlayer::Decoder::doRequestBuffers() { +    // mRenderer is only NULL if we have a legacy widevine source that +    // is not yet ready. In this case we must not fetch input. +    if (isDiscontinuityPending() || mRenderer == NULL) { +        return false;      }      status_t err = OK; -    while (!mDequeuedInputBuffers.empty()) { +    while (err == OK && !mDequeuedInputBuffers.empty()) {          size_t bufferIx = *mDequeuedInputBuffers.begin();          sp<AMessage> msg = new AMessage();          msg->setSize("buffer-ix", bufferIx);          err = fetchInputData(msg); -        if (err != OK) { +        if (err != OK && err != ERROR_END_OF_STREAM) { +            // if EOS, need to queue EOS buffer              break;          }          mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin()); @@ -329,40 +468,59 @@ void NuPlayer::Decoder::doRequestBuffers() {          }      } -    if (err == -EWOULDBLOCK -            && mSource->feedMoreTSData() == OK) { -        scheduleRequestBuffers(); -    } +    return err == -EWOULDBLOCK +            && mSource->feedMoreTSData() == OK;  } -bool NuPlayer::Decoder::handleAnInputBuffer() { -    if (mFormatChangePending) { +void NuPlayer::Decoder::handleError(int32_t err) +{ +    // We cannot immediately release the codec due to buffers still outstanding +    // in the renderer.  We signal to the player the error so it can shutdown/release the +    // decoder after flushing and increment the generation to discard unnecessary messages. + +    ++mBufferGeneration; + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatError); +    notify->setInt32("err", err); +    notify->post(); +} + +bool NuPlayer::Decoder::handleAnInputBuffer(size_t index) { +    if (isDiscontinuityPending()) {          return false;      } -    size_t bufferIx = -1; -    status_t res = mCodec->dequeueInputBuffer(&bufferIx); -    ALOGV("[%s] dequeued input: %d", -            mComponentName.c_str(), res == OK ? (int)bufferIx : res); -    if (res != OK) { -        if (res != -EAGAIN) { -            ALOGE("Failed to dequeue input buffer for %s (err=%d)", -                    mComponentName.c_str(), res); -            handleError(res); -        } + +    sp<ABuffer> buffer; +    mCodec->getInputBuffer(index, &buffer); + +    if (buffer == NULL) { +        handleError(UNKNOWN_ERROR);          return false;      } -    CHECK_LT(bufferIx, mInputBuffers.size()); +    if (index >= mInputBuffers.size()) { +        for (size_t i = mInputBuffers.size(); i <= index; ++i) { +            mInputBuffers.add(); +            mMediaBuffers.add(); +            mInputBufferIsDequeued.add(); +            mMediaBuffers.editItemAt(i) = NULL; +            mInputBufferIsDequeued.editItemAt(i) = false; +        } +    } +    mInputBuffers.editItemAt(index) = buffer; + +    //CHECK_LT(bufferIx, mInputBuffers.size()); -    if (mMediaBuffers[bufferIx] != NULL) { -        mMediaBuffers[bufferIx]->release(); -        mMediaBuffers.editItemAt(bufferIx) = NULL; +    if (mMediaBuffers[index] != NULL) { +        mMediaBuffers[index]->release(); +        mMediaBuffers.editItemAt(index) = NULL;      } -    mInputBufferIsDequeued.editItemAt(bufferIx) = true; +    mInputBufferIsDequeued.editItemAt(index) = true;      if (!mCSDsToSubmit.isEmpty()) {          sp<AMessage> msg = new AMessage(); -        msg->setSize("buffer-ix", bufferIx); +        msg->setSize("buffer-ix", index);          sp<ABuffer> buffer = mCSDsToSubmit.itemAt(0);          ALOGI("[%s] resubmitting CSD", mComponentName.c_str()); @@ -380,111 +538,51 @@ bool NuPlayer::Decoder::handleAnInputBuffer() {          mPendingInputMessages.erase(mPendingInputMessages.begin());      } -    if (!mInputBufferIsDequeued.editItemAt(bufferIx)) { +    if (!mInputBufferIsDequeued.editItemAt(index)) {          return true;      } -    mDequeuedInputBuffers.push_back(bufferIx); +    mDequeuedInputBuffers.push_back(index);      onRequestInputBuffers();      return true;  } -bool NuPlayer::Decoder::handleAnOutputBuffer() { -    if (mFormatChangePending) { -        return false; -    } -    size_t bufferIx = -1; -    size_t offset; -    size_t size; -    int64_t timeUs; -    uint32_t flags; -    status_t res = mCodec->dequeueOutputBuffer( -            &bufferIx, &offset, &size, &timeUs, &flags); - -    if (res != OK) { -        ALOGV("[%s] dequeued output: %d", mComponentName.c_str(), res); -    } else { -        ALOGV("[%s] dequeued output: %d (time=%lld flags=%" PRIu32 ")", -                mComponentName.c_str(), (int)bufferIx, timeUs, flags); -    } - -    if (res == INFO_OUTPUT_BUFFERS_CHANGED) { -        res = mCodec->getOutputBuffers(&mOutputBuffers); -        if (res != OK) { -            ALOGE("Failed to get output buffers for %s after INFO event (err=%d)", -                    mComponentName.c_str(), res); -            handleError(res); -            return false; -        } -        // NuPlayer ignores this -        return true; -    } else if (res == INFO_FORMAT_CHANGED) { -        sp<AMessage> format = new AMessage(); -        res = mCodec->getOutputFormat(&format); -        if (res != OK) { -            ALOGE("Failed to get output format for %s after INFO event (err=%d)", -                    mComponentName.c_str(), res); -            handleError(res); -            return false; -        } - -        if (!mIsAudio) { -            sp<AMessage> notify = mNotify->dup(); -            notify->setInt32("what", kWhatVideoSizeChanged); -            notify->setMessage("format", format); -            notify->post(); -        } else if (mRenderer != NULL) { -            uint32_t flags; -            int64_t durationUs; -            bool hasVideo = (mSource->getFormat(false /* audio */) != NULL); -            if (!hasVideo && -                    mSource->getDuration(&durationUs) == OK && -                    durationUs -                        > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { -                flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; -            } else { -                flags = AUDIO_OUTPUT_FLAG_NONE; -            } +bool NuPlayer::Decoder::handleAnOutputBuffer( +        size_t index, +        size_t offset, +        size_t size, +        int64_t timeUs, +        int32_t flags) { +//    CHECK_LT(bufferIx, mOutputBuffers.size()); +    sp<ABuffer> buffer; +    mCodec->getOutputBuffer(index, &buffer); -            res = mRenderer->openAudioSink( -                    format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaded */); -            if (res != OK) { -                ALOGE("Failed to open AudioSink on format change for %s (err=%d)", -                        mComponentName.c_str(), res); -                handleError(res); -                return false; -            } +    if (index >= mOutputBuffers.size()) { +        for (size_t i = mOutputBuffers.size(); i <= index; ++i) { +            mOutputBuffers.add();          } -        return true; -    } else if (res == INFO_DISCONTINUITY) { -        // nothing to do -        return true; -    } else if (res != OK) { -        if (res != -EAGAIN) { -            ALOGE("Failed to dequeue output buffer for %s (err=%d)", -                    mComponentName.c_str(), res); -            handleError(res); -        } -        return false;      } -    CHECK_LT(bufferIx, mOutputBuffers.size()); -    sp<ABuffer> buffer = mOutputBuffers[bufferIx]; +    mOutputBuffers.editItemAt(index) = buffer; +      buffer->setRange(offset, size);      buffer->meta()->clear();      buffer->meta()->setInt64("timeUs", timeUs); -    if (flags & MediaCodec::BUFFER_FLAG_EOS) { -        buffer->meta()->setInt32("eos", true); -        notifyResumeCompleteIfNecessary(); -    } + +    bool eos = flags & MediaCodec::BUFFER_FLAG_EOS;      // we do not expect CODECCONFIG or SYNCFRAME for decoder -    sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id()); -    reply->setSize("buffer-ix", bufferIx); +    sp<AMessage> reply = new AMessage(kWhatRenderBuffer, this); +    reply->setSize("buffer-ix", index);      reply->setInt32("generation", mBufferGeneration); -    if (mSkipRenderingUntilMediaTimeUs >= 0) { +    if (eos) { +        ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video"); + +        buffer->meta()->setInt32("eos", true); +        reply->setInt32("eos", true); +    } else if (mSkipRenderingUntilMediaTimeUs >= 0) {          if (timeUs < mSkipRenderingUntilMediaTimeUs) {              ALOGV("[%s] dropping buffer at time %lld as requested.",                       mComponentName.c_str(), (long long)timeUs); @@ -496,13 +594,15 @@ bool NuPlayer::Decoder::handleAnOutputBuffer() {          mSkipRenderingUntilMediaTimeUs = -1;      } +    mNumFramesTotal += !mIsAudio; +      // wait until 1st frame comes out to signal resume complete      notifyResumeCompleteIfNecessary();      if (mRenderer != NULL) {          // send the buffer to renderer.          mRenderer->queueBuffer(mIsAudio, buffer, reply); -        if (flags & MediaCodec::BUFFER_FLAG_EOS) { +        if (eos && !isDiscontinuityPending()) {              mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);          }      } @@ -510,6 +610,36 @@ bool NuPlayer::Decoder::handleAnOutputBuffer() {      return true;  } +void NuPlayer::Decoder::handleOutputFormatChange(const sp<AMessage> &format) { +    if (!mIsAudio) { +        int32_t width, height; +        if (format->findInt32("width", &width) +                && format->findInt32("height", &height)) { +            mStats->setInt32("width", width); +            mStats->setInt32("height", height); +        } +        sp<AMessage> notify = mNotify->dup(); +        notify->setInt32("what", kWhatVideoSizeChanged); +        notify->setMessage("format", format); +        notify->post(); +    } else if (mRenderer != NULL) { +        uint32_t flags; +        int64_t durationUs; +        bool hasVideo = (mSource->getFormat(false /* audio */) != NULL); +        if (getAudioDeepBufferSetting() // override regardless of source duration +                || (!hasVideo +                        && mSource->getDuration(&durationUs) == OK +                        && durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US)) { +            flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; +        } else { +            flags = AUDIO_OUTPUT_FLAG_NONE; +        } + +        mRenderer->openAudioSink( +                format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaed */); +    } +} +  void NuPlayer::Decoder::releaseAndResetMediaBuffers() {      for (size_t i = 0; i < mMediaBuffers.size(); i++) {          if (mMediaBuffers[i] != NULL) { @@ -533,11 +663,8 @@ void NuPlayer::Decoder::releaseAndResetMediaBuffers() {  }  void NuPlayer::Decoder::requestCodecNotification() { -    if (mFormatChangePending) { -        return; -    }      if (mCodec != NULL) { -        sp<AMessage> reply = new AMessage(kWhatCodecNotify, id()); +        sp<AMessage> reply = new AMessage(kWhatCodecNotify, this);          reply->setInt32("generation", mBufferGeneration);          mCodec->requestActivityNotification(reply);      } @@ -582,43 +709,31 @@ status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) {                      formatChange = !seamlessFormatChange;                  } -                if (formatChange || timeChange) { -                    sp<AMessage> msg = mNotify->dup(); -                    msg->setInt32("what", kWhatInputDiscontinuity); -                    msg->setInt32("formatChange", formatChange); -                    msg->post(); -                } - +                // For format or time change, return EOS to queue EOS input, +                // then wait for EOS on output.                  if (formatChange /* not seamless */) { -                    // must change decoder -                    // return EOS and wait to be killed                      mFormatChangePending = true; -                    return ERROR_END_OF_STREAM; +                    err = ERROR_END_OF_STREAM;                  } else if (timeChange) { -                    // need to flush -                    // TODO: Ideally we shouldn't need a flush upon time -                    // discontinuity, flushing will cause loss of frames. -                    // We probably should queue a time change marker to the -                    // output queue, and handles it in renderer instead.                      rememberCodecSpecificData(newFormat); -                    onFlush(false /* notifyComplete */); -                    err = OK; +                    mTimeChangePending = true; +                    err = ERROR_END_OF_STREAM;                  } else if (seamlessFormatChange) {                      // reuse existing decoder and don't flush                      rememberCodecSpecificData(newFormat); -                    err = OK; +                    continue;                  } else {                      // This stream is unaffected by the discontinuity                      return -EWOULDBLOCK;                  }              } -            reply->setInt32("err", err); -            return OK; -        } +            // reply should only be returned without a buffer set +            // when there is an error (including EOS) +            CHECK(err != OK); -        if (!mIsAudio) { -            ++mNumFramesTotal; +            reply->setInt32("err", err); +            return ERROR_END_OF_STREAM;          }          dropAccessUnit = false; @@ -628,7 +743,7 @@ status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) {                  && mIsVideoAVC                  && !IsAVCReferenceFrame(accessUnit)) {              dropAccessUnit = true; -            ++mNumFramesDropped; +            ++mNumInputFramesDropped;          }      } while (dropAccessUnit); @@ -636,7 +751,7 @@ status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) {  #if 0      int64_t mediaTimeUs;      CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); -    ALOGV("feeding %s input buffer at media time %.2f secs", +    ALOGV("[%s] feeding input buffer at media time %.3f",           mIsAudio ? "audio" : "video",           mediaTimeUs / 1E6);  #endif @@ -696,10 +811,7 @@ bool NuPlayer::Decoder::onInputBufferFetched(const sp<AMessage> &msg) {          int32_t streamErr = ERROR_END_OF_STREAM;          CHECK(msg->findInt32("err", &streamErr) || !hasBuffer); -        if (streamErr == OK) { -            /* buffers are returned to hold on to */ -            return true; -        } +        CHECK(streamErr != OK);          // attempt to queue EOS          status_t err = mCodec->queueInputBuffer( @@ -781,6 +893,7 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {      status_t err;      int32_t render;      size_t bufferIx; +    int32_t eos;      CHECK(msg->findSize("buffer-ix", &bufferIx));      if (!mIsAudio) { @@ -798,6 +911,7 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {          CHECK(msg->findInt64("timestampNs", ×tampNs));          err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);      } else { +        mNumOutputFramesDropped += !mIsAudio;          err = mCodec->releaseOutputBuffer(bufferIx);      }      if (err != OK) { @@ -805,6 +919,40 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {                  mComponentName.c_str(), err);          handleError(err);      } +    if (msg->findInt32("eos", &eos) && eos +            && isDiscontinuityPending()) { +        finishHandleDiscontinuity(true /* flushOnTimeChange */); +    } +} + +bool NuPlayer::Decoder::isDiscontinuityPending() const { +    return mFormatChangePending || mTimeChangePending; +} + +void NuPlayer::Decoder::finishHandleDiscontinuity(bool flushOnTimeChange) { +    ALOGV("finishHandleDiscontinuity: format %d, time %d, flush %d", +            mFormatChangePending, mTimeChangePending, flushOnTimeChange); + +    // If we have format change, pause and wait to be killed; +    // If we have time change only, flush and restart fetching. + +    if (mFormatChangePending) { +        mPaused = true; +    } else if (mTimeChangePending) { +        if (flushOnTimeChange) { +            doFlush(false /* notifyComplete */); +            signalResume(false /* notifyComplete */); +        } +    } + +    // Notify NuPlayer to either shutdown decoder, or rescan sources +    sp<AMessage> msg = mNotify->dup(); +    msg->setInt32("what", kWhatInputDiscontinuity); +    msg->setInt32("formatChange", mFormatChangePending); +    msg->post(); + +    mFormatChangePending = false; +    mTimeChangePending = false;  }  bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange( diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h index 1bfa94f..eeb4af4 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h @@ -26,13 +26,15 @@ namespace android {  struct NuPlayer::Decoder : public DecoderBase {      Decoder(const sp<AMessage> ¬ify,              const sp<Source> &source, +            pid_t pid,              const sp<Renderer> &renderer = NULL, -            const sp<NativeWindowWrapper> &nativeWindow = NULL, +            const sp<Surface> &surface = NULL,              const sp<CCDecoder> &ccDecoder = NULL); -    virtual void getStats( -            int64_t *mNumFramesTotal, -            int64_t *mNumFramesDropped) const; +    virtual sp<AMessage> getStats() const; + +    // sets the output surface of video decoders. +    virtual status_t setVideoSurface(const sp<Surface> &surface);  protected:      virtual ~Decoder(); @@ -40,20 +42,22 @@ protected:      virtual void onMessageReceived(const sp<AMessage> &msg);      virtual void onConfigure(const sp<AMessage> &format); +    virtual void onSetParameters(const sp<AMessage> ¶ms);      virtual void onSetRenderer(const sp<Renderer> &renderer);      virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);      virtual void onResume(bool notifyComplete); -    virtual void onFlush(bool notifyComplete); +    virtual void onFlush();      virtual void onShutdown(bool notifyComplete); -    virtual void doRequestBuffers(); +    virtual bool doRequestBuffers();  private:      enum {          kWhatCodecNotify         = 'cdcN',          kWhatRenderBuffer        = 'rndr', +        kWhatSetVideoSurface     = 'sSur'      }; -    sp<NativeWindowWrapper> mNativeWindow; +    sp<Surface> mSurface;      sp<Source> mSource;      sp<Renderer> mRenderer; @@ -74,25 +78,38 @@ private:      Vector<MediaBuffer *> mMediaBuffers;      Vector<size_t> mDequeuedInputBuffers; +    const pid_t mPid;      int64_t mSkipRenderingUntilMediaTimeUs;      int64_t mNumFramesTotal; -    int64_t mNumFramesDropped; +    int64_t mNumInputFramesDropped; +    int64_t mNumOutputFramesDropped; +    int32_t mVideoWidth; +    int32_t mVideoHeight;      bool mIsAudio;      bool mIsVideoAVC;      bool mIsSecure;      bool mFormatChangePending; +    bool mTimeChangePending;      bool mPaused;      bool mResumePending;      AString mComponentName; -    bool handleAnInputBuffer(); -    bool handleAnOutputBuffer(); +    void handleError(int32_t err); +    bool handleAnInputBuffer(size_t index); +    bool handleAnOutputBuffer( +            size_t index, +            size_t offset, +            size_t size, +            int64_t timeUs, +            int32_t flags); +    void handleOutputFormatChange(const sp<AMessage> &format);      void releaseAndResetMediaBuffers();      void requestCodecNotification();      bool isStaleReply(const sp<AMessage> &msg); +    void doFlush(bool notifyComplete);      status_t fetchInputData(sp<AMessage> &reply);      bool onInputBufferFetched(const sp<AMessage> &msg);      void onRenderBuffer(const sp<AMessage> &msg); @@ -100,6 +117,8 @@ private:      bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;      bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;      void rememberCodecSpecificData(const sp<AMessage> &format); +    bool isDiscontinuityPending() const; +    void finishHandleDiscontinuity(bool flushOnTimeChange);      void notifyResumeCompleteIfNecessary(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp index d56fc4d..7e76842 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp @@ -31,6 +31,7 @@ namespace android {  NuPlayer::DecoderBase::DecoderBase(const sp<AMessage> ¬ify)      :  mNotify(notify),         mBufferGeneration(0), +       mStats(new AMessage),         mRequestInputBuffersPending(false) {      // Every decoder has its own looper because MediaCodec operations      // are blocking, but NuPlayer needs asynchronous operations. @@ -61,7 +62,7 @@ status_t PostAndAwaitResponse(  }  void NuPlayer::DecoderBase::configure(const sp<AMessage> &format) { -    sp<AMessage> msg = new AMessage(kWhatConfigure, id()); +    sp<AMessage> msg = new AMessage(kWhatConfigure, this);      msg->setMessage("format", format);      msg->post();  } @@ -70,14 +71,20 @@ void NuPlayer::DecoderBase::init() {      mDecoderLooper->registerHandler(this);  } +void NuPlayer::DecoderBase::setParameters(const sp<AMessage> ¶ms) { +    sp<AMessage> msg = new AMessage(kWhatSetParameters, this); +    msg->setMessage("params", params); +    msg->post(); +} +  void NuPlayer::DecoderBase::setRenderer(const sp<Renderer> &renderer) { -    sp<AMessage> msg = new AMessage(kWhatSetRenderer, id()); +    sp<AMessage> msg = new AMessage(kWhatSetRenderer, this);      msg->setObject("renderer", renderer);      msg->post();  }  status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) const { -    sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, id()); +    sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, this);      msg->setPointer("buffers", buffers);      sp<AMessage> response; @@ -85,17 +92,17 @@ status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) c  }  void NuPlayer::DecoderBase::signalFlush() { -    (new AMessage(kWhatFlush, id()))->post(); +    (new AMessage(kWhatFlush, this))->post();  }  void NuPlayer::DecoderBase::signalResume(bool notifyComplete) { -    sp<AMessage> msg = new AMessage(kWhatResume, id()); +    sp<AMessage> msg = new AMessage(kWhatResume, this);      msg->setInt32("notifyComplete", notifyComplete);      msg->post();  }  void NuPlayer::DecoderBase::initiateShutdown() { -    (new AMessage(kWhatShutdown, id()))->post(); +    (new AMessage(kWhatShutdown, this))->post();  }  void NuPlayer::DecoderBase::onRequestInputBuffers() { @@ -103,16 +110,13 @@ void NuPlayer::DecoderBase::onRequestInputBuffers() {          return;      } -    doRequestBuffers(); -} +    // doRequestBuffers() return true if we should request more data +    if (doRequestBuffers()) { +        mRequestInputBuffersPending = true; -void NuPlayer::DecoderBase::scheduleRequestBuffers() { -    if (mRequestInputBuffersPending) { -        return; +        sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, this); +        msg->post(10 * 1000ll);      } -    mRequestInputBuffersPending = true; -    sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, id()); -    msg->post(10 * 1000ll);  }  void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) { @@ -126,6 +130,14 @@ void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatSetParameters: +        { +            sp<AMessage> params; +            CHECK(msg->findMessage("params", ¶ms)); +            onSetParameters(params); +            break; +        } +          case kWhatSetRenderer:          {              sp<RefBase> obj; @@ -136,7 +148,7 @@ void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {          case kWhatGetInputBuffers:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              Vector<sp<ABuffer> > *dstBuffers; @@ -157,7 +169,7 @@ void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) {          case kWhatFlush:          { -            onFlush(true); +            onFlush();              break;          } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h index 6732ff4..b0dc01d 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h @@ -26,24 +26,27 @@ namespace android {  struct ABuffer;  struct MediaCodec; -struct MediaBuffer; +class MediaBuffer; +class Surface;  struct NuPlayer::DecoderBase : public AHandler {      DecoderBase(const sp<AMessage> ¬ify);      void configure(const sp<AMessage> &format);      void init(); +    void setParameters(const sp<AMessage> ¶ms);      void setRenderer(const sp<Renderer> &renderer); +    virtual status_t setVideoSurface(const sp<Surface> &) { return INVALID_OPERATION; }      status_t getInputBuffers(Vector<sp<ABuffer> > *dstBuffers) const;      void signalFlush();      void signalResume(bool notifyComplete);      void initiateShutdown(); -    virtual void getStats( -            int64_t *mNumFramesTotal, -            int64_t *mNumFramesDropped) const = 0; +    virtual sp<AMessage> getStats() const { +        return mStats; +    }      enum {          kWhatInputDiscontinuity  = 'inDi', @@ -62,23 +65,25 @@ protected:      virtual void onMessageReceived(const sp<AMessage> &msg);      virtual void onConfigure(const sp<AMessage> &format) = 0; +    virtual void onSetParameters(const sp<AMessage> ¶ms) = 0;      virtual void onSetRenderer(const sp<Renderer> &renderer) = 0;      virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers) = 0;      virtual void onResume(bool notifyComplete) = 0; -    virtual void onFlush(bool notifyComplete) = 0; +    virtual void onFlush() = 0;      virtual void onShutdown(bool notifyComplete) = 0;      void onRequestInputBuffers(); -    void scheduleRequestBuffers(); -    virtual void doRequestBuffers() = 0; +    virtual bool doRequestBuffers() = 0;      virtual void handleError(int32_t err);      sp<AMessage> mNotify;      int32_t mBufferGeneration; +    sp<AMessage> mStats;  private:      enum {          kWhatConfigure           = 'conf', +        kWhatSetParameters       = 'setP',          kWhatSetRenderer         = 'setR',          kWhatGetInputBuffers     = 'gInB',          kWhatRequestInputBuffers = 'reqB', diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp index 9f7f09a..30146c4 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp @@ -59,12 +59,6 @@ NuPlayer::DecoderPassThrough::DecoderPassThrough(  NuPlayer::DecoderPassThrough::~DecoderPassThrough() {  } -void NuPlayer::DecoderPassThrough::getStats( -        int64_t *numFramesTotal, int64_t *numFramesDropped) const { -    *numFramesTotal = 0; -    *numFramesDropped = 0; -} -  void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {      ALOGV("[%s] onConfigure", mComponentName.c_str());      mCachedBytes = 0; @@ -74,17 +68,24 @@ void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) {      onRequestInputBuffers(); +    int32_t hasVideo = 0; +    format->findInt32("has-video", &hasVideo); +      // The audio sink is already opened before the PassThrough decoder is created.      // Opening again might be relevant if decoder is instantiated after shutdown and      // format is different.      status_t err = mRenderer->openAudioSink( -            format, true /* offloadOnly */, false /* hasVideo */, +            format, true /* offloadOnly */, hasVideo,              AUDIO_OUTPUT_FLAG_NONE /* flags */, NULL /* isOffloaded */);      if (err != OK) {          handleError(err);      }  } +void NuPlayer::DecoderPassThrough::onSetParameters(const sp<AMessage> &/*params*/) { +    ALOGW("onSetParameters() called unexpectedly"); +} +  void NuPlayer::DecoderPassThrough::onSetRenderer(          const sp<Renderer> &renderer) {      // renderer can't be changed during offloading @@ -110,7 +111,10 @@ bool NuPlayer::DecoderPassThrough::isDoneFetching() const {      return mCachedBytes >= kMaxCachedBytes || mReachedEOS || mPaused;  } -void NuPlayer::DecoderPassThrough::doRequestBuffers() { +/* + * returns true if we should request more data + */ +bool NuPlayer::DecoderPassThrough::doRequestBuffers() {      status_t err = OK;      while (!isDoneFetching()) {          sp<AMessage> msg = new AMessage(); @@ -123,10 +127,8 @@ void NuPlayer::DecoderPassThrough::doRequestBuffers() {          onInputBufferFetched(msg);      } -    if (err == -EWOULDBLOCK -            && mSource->feedMoreTSData() == OK) { -        scheduleRequestBuffers(); -    } +    return err == -EWOULDBLOCK +            && mSource->feedMoreTSData() == OK;  }  status_t NuPlayer::DecoderPassThrough::dequeueAccessUnit(sp<ABuffer> *accessUnit) { @@ -247,7 +249,7 @@ status_t NuPlayer::DecoderPassThrough::fetchInputData(sp<AMessage> &reply) {                  }                  if (timeChange) { -                    onFlush(false /* notifyComplete */); +                    doFlush(false /* notifyComplete */);                      err = OK;                  } else if (formatChange) {                      // do seamless format change @@ -333,7 +335,7 @@ void NuPlayer::DecoderPassThrough::onInputBufferFetched(          return;      } -    sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id()); +    sp<AMessage> reply = new AMessage(kWhatBufferConsumed, this);      reply->setInt32("generation", mBufferGeneration);      reply->setInt32("size", bufferSize); @@ -364,7 +366,7 @@ void NuPlayer::DecoderPassThrough::onResume(bool notifyComplete) {      }  } -void NuPlayer::DecoderPassThrough::onFlush(bool notifyComplete) { +void NuPlayer::DecoderPassThrough::doFlush(bool notifyComplete) {      ++mBufferGeneration;      mSkipRenderingUntilMediaTimeUs = -1;      mPendingAudioAccessUnit.clear(); @@ -376,18 +378,21 @@ void NuPlayer::DecoderPassThrough::onFlush(bool notifyComplete) {          mRenderer->signalTimeDiscontinuity();      } -    if (notifyComplete) { -        mPaused = true; -        sp<AMessage> notify = mNotify->dup(); -        notify->setInt32("what", kWhatFlushCompleted); -        notify->post(); -    } -      mPendingBuffersToDrain = 0;      mCachedBytes = 0;      mReachedEOS = false;  } +void NuPlayer::DecoderPassThrough::onFlush() { +    doFlush(true /* notifyComplete */); + +    mPaused = true; +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatFlushCompleted); +    notify->post(); + +} +  void NuPlayer::DecoderPassThrough::onShutdown(bool notifyComplete) {      ++mBufferGeneration;      mSkipRenderingUntilMediaTimeUs = -1; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h index a6e1faf..db33e87 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h @@ -29,10 +29,6 @@ struct NuPlayer::DecoderPassThrough : public DecoderBase {                         const sp<Source> &source,                         const sp<Renderer> &renderer); -    virtual void getStats( -            int64_t *mNumFramesTotal, -            int64_t *mNumFramesDropped) const; -  protected:      virtual ~DecoderPassThrough(); @@ -40,12 +36,13 @@ protected:      virtual void onMessageReceived(const sp<AMessage> &msg);      virtual void onConfigure(const sp<AMessage> &format); +    virtual void onSetParameters(const sp<AMessage> ¶ms);      virtual void onSetRenderer(const sp<Renderer> &renderer);      virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers);      virtual void onResume(bool notifyComplete); -    virtual void onFlush(bool notifyComplete); +    virtual void onFlush();      virtual void onShutdown(bool notifyComplete); -    virtual void doRequestBuffers(); +    virtual bool doRequestBuffers();  private:      enum { @@ -77,6 +74,7 @@ private:      status_t dequeueAccessUnit(sp<ABuffer> *accessUnit);      sp<ABuffer> aggregateBuffer(const sp<ABuffer> &accessUnit);      status_t fetchInputData(sp<AMessage> &reply); +    void doFlush(bool notifyComplete);      void onInputBufferFetched(const sp<AMessage> &msg);      void onBufferConsumed(int32_t size); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index bc79fdb..7370224 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -18,6 +18,7 @@  #define LOG_TAG "NuPlayerDriver"  #include <inttypes.h>  #include <utils/Log.h> +#include <cutils/properties.h>  #include "NuPlayerDriver.h" @@ -32,7 +33,7 @@  namespace android { -NuPlayerDriver::NuPlayerDriver() +NuPlayerDriver::NuPlayerDriver(pid_t pid)      : mState(STATE_IDLE),        mIsAsyncPrepare(false),        mAsyncResult(UNKNOWN_ERROR), @@ -54,7 +55,7 @@ NuPlayerDriver::NuPlayerDriver()              true,  /* canCallJava */              PRIORITY_AUDIO); -    mPlayer = new NuPlayer; +    mPlayer = new NuPlayer(pid);      mLooper->registerHandler(mPlayer);      mPlayer->setDriver(this); @@ -135,6 +136,25 @@ status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {      return mAsyncResult;  } +status_t NuPlayerDriver::setDataSource(const sp<DataSource> &source) { +    ALOGV("setDataSource(%p) callback source", this); +    Mutex::Autolock autoLock(mLock); + +    if (mState != STATE_IDLE) { +        return INVALID_OPERATION; +    } + +    mState = STATE_SET_DATASOURCE_PENDING; + +    mPlayer->setDataSourceAsync(source); + +    while (mState == STATE_SET_DATASOURCE_PENDING) { +        mCondition.wait(mLock); +    } + +    return mAsyncResult; +} +  status_t NuPlayerDriver::setVideoSurfaceTexture(          const sp<IGraphicBufferProducer> &bufferProducer) {      ALOGV("setVideoSurfaceTexture(%p)", this); @@ -222,7 +242,7 @@ status_t NuPlayerDriver::prepareAsync() {  }  status_t NuPlayerDriver::start() { -    ALOGD("start(%p)", this); +    ALOGD("start(%p), state is %d, eos is %d", this, mState, mAtEOS);      Mutex::Autolock autoLock(mLock);      switch (mState) { @@ -341,6 +361,34 @@ bool NuPlayerDriver::isPlaying() {      return mState == STATE_RUNNING && !mAtEOS;  } +status_t NuPlayerDriver::setPlaybackSettings(const AudioPlaybackRate &rate) { +    Mutex::Autolock autoLock(mLock); +    status_t err = mPlayer->setPlaybackSettings(rate); +    if (err == OK) { +        if (rate.mSpeed == 0.f && mState == STATE_RUNNING) { +            mState = STATE_PAUSED; +            // try to update position +            (void)mPlayer->getCurrentPosition(&mPositionUs); +            notifyListener_l(MEDIA_PAUSED); +        } else if (rate.mSpeed != 0.f && mState == STATE_PAUSED) { +            mState = STATE_RUNNING; +        } +    } +    return err; +} + +status_t NuPlayerDriver::getPlaybackSettings(AudioPlaybackRate *rate) { +    return mPlayer->getPlaybackSettings(rate); +} + +status_t NuPlayerDriver::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) { +    return mPlayer->setSyncSettings(sync, videoFpsHint); +} + +status_t NuPlayerDriver::getSyncSettings(AVSyncSettings *sync, float *videoFps) { +    return mPlayer->getSyncSettings(sync, videoFps); +} +  status_t NuPlayerDriver::seekTo(int msec) {      ALOGD("seekTo(%p) %d ms", this, msec);      Mutex::Autolock autoLock(mLock); @@ -350,17 +398,10 @@ status_t NuPlayerDriver::seekTo(int msec) {      switch (mState) {          case STATE_PREPARED:          case STATE_STOPPED_AND_PREPARED: -        { +        case STATE_PAUSED:              mStartupSeekTimeUs = seekTimeUs; -            // pretend that the seek completed. It will actually happen when starting playback. -            // TODO: actually perform the seek here, so the player is ready to go at the new -            // location -            notifySeekComplete_l(); -            break; -        } - +            // fall through.          case STATE_RUNNING: -        case STATE_PAUSED:          {              mAtEOS = false;              mSeekInProgress = true; @@ -444,6 +485,13 @@ status_t NuPlayerDriver::reset() {          notifyListener_l(MEDIA_STOPPED);      } +    char value[PROPERTY_VALUE_MAX]; +    if (property_get("persist.debug.sf.stats", value, NULL) && +            (!strcmp("1", value) || !strcasecmp("true", value))) { +        Vector<String16> args; +        dump(-1, args); +    } +      mState = STATE_RESET_IN_PROGRESS;      mPlayer->resetAsync(); @@ -612,22 +660,59 @@ void NuPlayerDriver::notifySeekComplete_l() {  status_t NuPlayerDriver::dump(          int fd, const Vector<String16> & /* args */) const { -    int64_t numFramesTotal; -    int64_t numFramesDropped; -    mPlayer->getStats(&numFramesTotal, &numFramesDropped); -    FILE *out = fdopen(dup(fd), "w"); +    Vector<sp<AMessage> > trackStats; +    mPlayer->getStats(&trackStats); -    fprintf(out, " NuPlayer\n"); -    fprintf(out, "  numFramesTotal(%" PRId64 "), numFramesDropped(%" PRId64 "), " -                 "percentageDropped(%.2f)\n", -                 numFramesTotal, -                 numFramesDropped, -                 numFramesTotal == 0 -                    ? 0.0 : (double)numFramesDropped / numFramesTotal); +    AString logString(" NuPlayer\n"); +    char buf[256] = {0}; -    fclose(out); -    out = NULL; +    for (size_t i = 0; i < trackStats.size(); ++i) { +        const sp<AMessage> &stats = trackStats.itemAt(i); + +        AString mime; +        if (stats->findString("mime", &mime)) { +            snprintf(buf, sizeof(buf), "  mime(%s)\n", mime.c_str()); +            logString.append(buf); +        } + +        AString name; +        if (stats->findString("component-name", &name)) { +            snprintf(buf, sizeof(buf), "    decoder(%s)\n", name.c_str()); +            logString.append(buf); +        } + +        if (mime.startsWith("video/")) { +            int32_t width, height; +            if (stats->findInt32("width", &width) +                    && stats->findInt32("height", &height)) { +                snprintf(buf, sizeof(buf), "    resolution(%d x %d)\n", width, height); +                logString.append(buf); +            } + +            int64_t numFramesTotal = 0; +            int64_t numFramesDropped = 0; + +            stats->findInt64("frames-total", &numFramesTotal); +            stats->findInt64("frames-dropped-output", &numFramesDropped); +            snprintf(buf, sizeof(buf), "    numFramesTotal(%lld), numFramesDropped(%lld), " +                     "percentageDropped(%.2f%%)\n", +                     (long long)numFramesTotal, +                     (long long)numFramesDropped, +                     numFramesTotal == 0 +                            ? 0.0 : (double)(numFramesDropped * 100) / numFramesTotal); +            logString.append(buf); +        } +    } + +    ALOGI("%s", logString.c_str()); + +    if (fd >= 0) { +        FILE *out = fdopen(dup(fd), "w"); +        fprintf(out, "%s", logString.c_str()); +        fclose(out); +        out = NULL; +    }      return OK;  } @@ -640,6 +725,7 @@ void NuPlayerDriver::notifyListener(  void NuPlayerDriver::notifyListener_l(          int msg, int ext1, int ext2, const Parcel *in) { +    ALOGD("notifyListener_l(%p), (%d, %d, %d)", this, msg, ext1, ext2);      switch (msg) {          case MEDIA_PLAYBACK_COMPLETE:          { diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 5cba7d9..d009fd7 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -24,7 +24,7 @@ struct ALooper;  struct NuPlayer;  struct NuPlayerDriver : public MediaPlayerInterface { -    NuPlayerDriver(); +    NuPlayerDriver(pid_t pid);      virtual status_t initCheck(); @@ -39,6 +39,8 @@ struct NuPlayerDriver : public MediaPlayerInterface {      virtual status_t setDataSource(const sp<IStreamSource> &source); +    virtual status_t setDataSource(const sp<DataSource>& dataSource); +      virtual status_t setVideoSurfaceTexture(              const sp<IGraphicBufferProducer> &bufferProducer);      virtual status_t prepare(); @@ -47,6 +49,10 @@ struct NuPlayerDriver : public MediaPlayerInterface {      virtual status_t stop();      virtual status_t pause();      virtual bool isPlaying(); +    virtual status_t setPlaybackSettings(const AudioPlaybackRate &rate); +    virtual status_t getPlaybackSettings(AudioPlaybackRate *rate); +    virtual status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint); +    virtual status_t getSyncSettings(AVSyncSettings *sync, float *videoFps);      virtual status_t seekTo(int msec);      virtual status_t getCurrentPosition(int *msec);      virtual status_t getDuration(int *msec); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 25225a8..04a46f4 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -19,22 +19,52 @@  #include <utils/Log.h>  #include "NuPlayerRenderer.h" - +#include <cutils/properties.h>  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AMessage.h>  #include <media/stagefright/foundation/AUtils.h>  #include <media/stagefright/foundation/AWakeLock.h> +#include <media/stagefright/MediaClock.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MetaData.h>  #include <media/stagefright/Utils.h> - -#include <VideoFrameScheduler.h> +#include <media/stagefright/VideoFrameScheduler.h>  #include <inttypes.h>  namespace android { +/* + * Example of common configuration settings in shell script form + +   #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager +   adb shell setprop audio.offload.disable 1 + +   #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager +   adb shell setprop audio.offload.video 1 + +   #Use audio callbacks for PCM data +   adb shell setprop media.stagefright.audio.cbk 1 + +   #Use deep buffer for PCM data with video (it is generally enabled for audio-only) +   adb shell setprop media.stagefright.audio.deep 1 + +   #Set size of buffers for pcm audio sink in msec (example: 1000 msec) +   adb shell setprop media.stagefright.audio.sink 1000 + + * These configurations take effect for the next track played (not the current track). + */ + +static inline bool getUseAudioCallbackSetting() { +    return property_get_bool("media.stagefright.audio.cbk", false /* default_value */); +} + +static inline int32_t getAudioSinkPcmMsSetting() { +    return property_get_int32( +            "media.stagefright.audio.sink", 500 /* default_value */); +} +  // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink  // is closed to allow the audio DSP to power down.  static const int64_t kOffloadPauseMaxUs = 10000000ll; @@ -63,34 +93,34 @@ NuPlayer::Renderer::Renderer(        mDrainVideoQueuePending(false),        mAudioQueueGeneration(0),        mVideoQueueGeneration(0), +      mAudioDrainGeneration(0), +      mVideoDrainGeneration(0), +      mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),        mAudioFirstAnchorTimeMediaUs(-1),        mAnchorTimeMediaUs(-1), -      mAnchorTimeRealUs(-1),        mAnchorNumFramesWritten(-1), -      mAnchorMaxMediaUs(-1),        mVideoLateByUs(0ll),        mHasAudio(false),        mHasVideo(false), -      mPauseStartedTimeRealUs(-1), -      mFlushingAudio(false), -      mFlushingVideo(false),        mNotifyCompleteAudio(false),        mNotifyCompleteVideo(false),        mSyncQueues(false),        mPaused(false), -      mPausePositionMediaTimeUs(-1),        mVideoSampleReceived(false),        mVideoRenderingStarted(false),        mVideoRenderingStartGeneration(0),        mAudioRenderingStartGeneration(0),        mAudioOffloadPauseTimeoutGeneration(0), -      mAudioOffloadTornDown(false), +      mAudioTornDown(false),        mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),        mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),        mTotalBuffersQueued(0),        mLastAudioBufferDrained(0), +      mUseAudioCallback(false),        mWakeLock(new AWakeLock()) { - +    mMediaClock = new MediaClock; +    mPlaybackRate = mPlaybackSettings.mSpeed; +    mMediaClock->setPlaybackRate(mPlaybackRate);  }  NuPlayer::Renderer::~Renderer() { @@ -105,7 +135,8 @@ void NuPlayer::Renderer::queueBuffer(          bool audio,          const sp<ABuffer> &buffer,          const sp<AMessage> ¬ifyConsumed) { -    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this); +    msg->setInt32("queueGeneration", getQueueGeneration(audio));      msg->setInt32("audio", static_cast<int32_t>(audio));      msg->setBuffer("buffer", buffer);      msg->setMessage("notifyConsumed", notifyConsumed); @@ -115,199 +146,209 @@ void NuPlayer::Renderer::queueBuffer(  void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {      CHECK_NE(finalResult, (status_t)OK); -    sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); +    sp<AMessage> msg = new AMessage(kWhatQueueEOS, this); +    msg->setInt32("queueGeneration", getQueueGeneration(audio));      msg->setInt32("audio", static_cast<int32_t>(audio));      msg->setInt32("finalResult", finalResult);      msg->post();  } +status_t NuPlayer::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) { +    sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this); +    writeToAMessage(msg, rate); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +    } +    return err; +} + +status_t NuPlayer::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) { +    if (rate.mSpeed == 0.f) { +        onPause(); +        // don't call audiosink's setPlaybackRate if pausing, as pitch does not +        // have to correspond to the any non-0 speed (e.g old speed). Keep +        // settings nonetheless, using the old speed, in case audiosink changes. +        AudioPlaybackRate newRate = rate; +        newRate.mSpeed = mPlaybackSettings.mSpeed; +        mPlaybackSettings = newRate; +        return OK; +    } + +    if (mAudioSink != NULL && mAudioSink->ready()) { +        status_t err = mAudioSink->setPlaybackRate(rate); +        if (err != OK) { +            return err; +        } +    } +    mPlaybackSettings = rate; +    mPlaybackRate = rate.mSpeed; +    mMediaClock->setPlaybackRate(mPlaybackRate); +    return OK; +} + +status_t NuPlayer::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) { +    sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +        if (err == OK) { +            readFromAMessage(response, rate); +        } +    } +    return err; +} + +status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) { +    if (mAudioSink != NULL && mAudioSink->ready()) { +        status_t err = mAudioSink->getPlaybackRate(rate); +        if (err == OK) { +            if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) { +                ALOGW("correcting mismatch in internal/external playback rate"); +            } +            // get playback settings used by audiosink, as it may be +            // slightly off due to audiosink not taking small changes. +            mPlaybackSettings = *rate; +            if (mPaused) { +                rate->mSpeed = 0.f; +            } +        } +        return err; +    } +    *rate = mPlaybackSettings; +    return OK; +} + +status_t NuPlayer::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) { +    sp<AMessage> msg = new AMessage(kWhatConfigSync, this); +    writeToAMessage(msg, sync, videoFpsHint); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +    } +    return err; +} + +status_t NuPlayer::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) { +    if (sync.mSource != AVSYNC_SOURCE_DEFAULT) { +        return BAD_VALUE; +    } +    // TODO: support sync sources +    return INVALID_OPERATION; +} + +status_t NuPlayer::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) { +    sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this); +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +        if (err == OK) { +            readFromAMessage(response, sync, videoFps); +        } +    } +    return err; +} + +status_t NuPlayer::Renderer::onGetSyncSettings( +        AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) { +    *sync = mSyncSettings; +    *videoFps = -1.f; +    return OK; +} +  void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {      { -        Mutex::Autolock autoLock(mFlushLock); +        Mutex::Autolock autoLock(mLock);          if (audio) {              mNotifyCompleteAudio |= notifyComplete; -            if (mFlushingAudio) { -                return; -            } -            mFlushingAudio = true; +            clearAudioFirstAnchorTime_l(); +            ++mAudioQueueGeneration; +            ++mAudioDrainGeneration;          } else {              mNotifyCompleteVideo |= notifyComplete; -            if (mFlushingVideo) { -                return; -            } -            mFlushingVideo = true; +            ++mVideoQueueGeneration; +            ++mVideoDrainGeneration;          } + +        clearAnchorTime_l(); +        mVideoLateByUs = 0; +        mSyncQueues = false;      } -    sp<AMessage> msg = new AMessage(kWhatFlush, id()); +    sp<AMessage> msg = new AMessage(kWhatFlush, this);      msg->setInt32("audio", static_cast<int32_t>(audio));      msg->post();  }  void NuPlayer::Renderer::signalTimeDiscontinuity() { -    Mutex::Autolock autoLock(mLock); -    // CHECK(mAudioQueue.empty()); -    // CHECK(mVideoQueue.empty()); -    setAudioFirstAnchorTime(-1); -    setAnchorTime(-1, -1); -    setVideoLateByUs(0); -    mSyncQueues = false; -} - -void NuPlayer::Renderer::signalAudioSinkChanged() { -    (new AMessage(kWhatAudioSinkChanged, id()))->post();  }  void NuPlayer::Renderer::signalDisableOffloadAudio() { -    (new AMessage(kWhatDisableOffloadAudio, id()))->post(); +    (new AMessage(kWhatDisableOffloadAudio, this))->post();  }  void NuPlayer::Renderer::signalEnableOffloadAudio() { -    (new AMessage(kWhatEnableOffloadAudio, id()))->post(); +    (new AMessage(kWhatEnableOffloadAudio, this))->post();  }  void NuPlayer::Renderer::pause() { -    (new AMessage(kWhatPause, id()))->post(); +    (new AMessage(kWhatPause, this))->post();  }  void NuPlayer::Renderer::resume() { -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();  }  void NuPlayer::Renderer::setVideoFrameRate(float fps) { -    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); +    sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);      msg->setFloat("frame-rate", fps);      msg->post();  } -// Called on any threads, except renderer's thread. -status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { -    { -        Mutex::Autolock autoLock(mLock); -        int64_t currentPositionUs; -        if (getCurrentPositionIfPaused_l(¤tPositionUs)) { -            *mediaUs = currentPositionUs; -            return OK; -        } -    } -    return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs()); -} - -// Called on only renderer's thread. -status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) { -    return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs()); -} - -// Called on only renderer's thread. -// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's -// thread, no need to acquire mLock. -status_t NuPlayer::Renderer::getCurrentPositionOnLooper( -        int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { -    int64_t currentPositionUs; -    if (getCurrentPositionIfPaused_l(¤tPositionUs)) { -        *mediaUs = currentPositionUs; -        return OK; -    } -    return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo); -} - -// Called either with mLock acquired or on renderer's thread. -bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) { -    if (!mPaused || mPausePositionMediaTimeUs < 0ll) { -        return false; -    } -    *mediaUs = mPausePositionMediaTimeUs; -    return true; -} -  // Called on any threads. -status_t NuPlayer::Renderer::getCurrentPositionFromAnchor( -        int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { -    Mutex::Autolock autoLock(mTimeLock); -    if (!mHasAudio && !mHasVideo) { -        return NO_INIT; -    } - -    if (mAnchorTimeMediaUs < 0) { -        return NO_INIT; -    } - -    int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; - -    if (mPauseStartedTimeRealUs != -1) { -        positionUs -= (nowUs - mPauseStartedTimeRealUs); -    } - -    // limit position to the last queued media time (for video only stream -    // position will be discrete as we don't know how long each frame lasts) -    if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) { -        if (positionUs > mAnchorMaxMediaUs) { -            positionUs = mAnchorMaxMediaUs; -        } -    } - -    if (positionUs < mAudioFirstAnchorTimeMediaUs) { -        positionUs = mAudioFirstAnchorTimeMediaUs; -    } - -    *mediaUs = (positionUs <= 0) ? 0 : positionUs; -    return OK; -} - -void NuPlayer::Renderer::setHasMedia(bool audio) { -    Mutex::Autolock autoLock(mTimeLock); -    if (audio) { -        mHasAudio = true; -    } else { -        mHasVideo = true; -    } +status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { +    return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);  } -void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { -    Mutex::Autolock autoLock(mTimeLock); -    mAudioFirstAnchorTimeMediaUs = mediaUs; +void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() { +    mAudioFirstAnchorTimeMediaUs = -1; +    mMediaClock->setStartingTimeMedia(-1);  } -void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { -    Mutex::Autolock autoLock(mTimeLock); +void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {      if (mAudioFirstAnchorTimeMediaUs == -1) {          mAudioFirstAnchorTimeMediaUs = mediaUs; +        mMediaClock->setStartingTimeMedia(mediaUs);      }  } -void NuPlayer::Renderer::setAnchorTime( -        int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) { -    Mutex::Autolock autoLock(mTimeLock); -    mAnchorTimeMediaUs = mediaUs; -    mAnchorTimeRealUs = realUs; -    mAnchorNumFramesWritten = numFramesWritten; -    if (resume) { -        mPauseStartedTimeRealUs = -1; -    } +void NuPlayer::Renderer::clearAnchorTime_l() { +    mMediaClock->clearAnchor(); +    mAnchorTimeMediaUs = -1; +    mAnchorNumFramesWritten = -1;  }  void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { -    Mutex::Autolock autoLock(mTimeLock); +    Mutex::Autolock autoLock(mLock);      mVideoLateByUs = lateUs;  }  int64_t NuPlayer::Renderer::getVideoLateByUs() { -    Mutex::Autolock autoLock(mTimeLock); +    Mutex::Autolock autoLock(mLock);      return mVideoLateByUs;  } -void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { -    Mutex::Autolock autoLock(mTimeLock); -    mPauseStartedTimeRealUs = realUs; -} -  status_t NuPlayer::Renderer::openAudioSink(          const sp<AMessage> &format,          bool offloadOnly,          bool hasVideo,          uint32_t flags,          bool *isOffloaded) { -    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); +    sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);      msg->setMessage("format", format);      msg->setInt32("offload-only", offloadOnly);      msg->setInt32("has-video", hasVideo); @@ -328,7 +369,7 @@ status_t NuPlayer::Renderer::openAudioSink(  }  void NuPlayer::Renderer::closeAudioSink() { -    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); +    sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);      sp<AMessage> response;      msg->postAndAwaitResponse(&response); @@ -356,7 +397,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {              response->setInt32("err", err);              response->setInt32("offload", offloadingAudio()); -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              response->postReply(replyID); @@ -365,7 +406,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {          case kWhatCloseAudioSink:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              onCloseAudioSink(); @@ -383,14 +424,14 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {          case kWhatDrainAudioQueue:          { +            mDrainAudioQueuePending = false; +              int32_t generation; -            CHECK(msg->findInt32("generation", &generation)); -            if (generation != mAudioQueueGeneration) { +            CHECK(msg->findInt32("drainGeneration", &generation)); +            if (generation != getDrainGeneration(true /* audio */)) {                  break;              } -            mDrainAudioQueuePending = false; -              if (onDrainAudioQueue()) {                  uint32_t numFramesPlayed;                  CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), @@ -404,12 +445,13 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {                  int64_t delayUs =                      mAudioSink->msecsPerFrame()                          * numFramesPendingPlayout * 1000ll; +                if (mPlaybackRate > 1.0f) { +                    delayUs /= mPlaybackRate; +                }                  // Let's give it more data after about half that time                  // has elapsed. -                // kWhatDrainAudioQueue is used for non-offloading mode, -                // and mLock is used only for offloading mode. Therefore, -                // no need to acquire mLock here. +                Mutex::Autolock autoLock(mLock);                  postDrainAudioQueue_l(delayUs / 2);              }              break; @@ -418,8 +460,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {          case kWhatDrainVideoQueue:          {              int32_t generation; -            CHECK(msg->findInt32("generation", &generation)); -            if (generation != mVideoQueueGeneration) { +            CHECK(msg->findInt32("drainGeneration", &generation)); +            if (generation != getDrainGeneration(false /* audio */)) {                  break;              } @@ -427,22 +469,20 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {              onDrainVideoQueue(); -            Mutex::Autolock autoLock(mLock); -            postDrainVideoQueue_l(); +            postDrainVideoQueue();              break;          }          case kWhatPostDrainVideoQueue:          {              int32_t generation; -            CHECK(msg->findInt32("generation", &generation)); -            if (generation != mVideoQueueGeneration) { +            CHECK(msg->findInt32("drainGeneration", &generation)); +            if (generation != getDrainGeneration(false /* audio */)) {                  break;              }              mDrainVideoQueuePending = false; -            Mutex::Autolock autoLock(mLock); -            postDrainVideoQueue_l(); +            postDrainVideoQueue();              break;          } @@ -458,15 +498,69 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {              break;          } -        case kWhatFlush: +        case kWhatConfigPlayback:          { -            onFlush(msg); +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); +            AudioPlaybackRate rate; +            readFromAMessage(msg, &rate); +            status_t err = onConfigPlayback(rate); +            sp<AMessage> response = new AMessage; +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } + +        case kWhatGetPlaybackSettings: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); +            AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT; +            status_t err = onGetPlaybackSettings(&rate); +            sp<AMessage> response = new AMessage; +            if (err == OK) { +                writeToAMessage(response, rate); +            } +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } + +        case kWhatConfigSync: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); +            AVSyncSettings sync; +            float videoFpsHint; +            readFromAMessage(msg, &sync, &videoFpsHint); +            status_t err = onConfigSync(sync, videoFpsHint); +            sp<AMessage> response = new AMessage; +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } + +        case kWhatGetSyncSettings: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); + +            ALOGV("kWhatGetSyncSettings"); +            AVSyncSettings sync; +            float videoFps = -1.f; +            status_t err = onGetSyncSettings(&sync, &videoFps); +            sp<AMessage> response = new AMessage; +            if (err == OK) { +                writeToAMessage(response, sync, videoFps); +            } +            response->setInt32("err", err); +            response->postReply(replyID);              break;          } -        case kWhatAudioSinkChanged: +        case kWhatFlush:          { -            onAudioSinkChanged(); +            onFlush(msg);              break;          } @@ -502,21 +596,21 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {              break;          } -        case kWhatAudioOffloadTearDown: +        case kWhatAudioTearDown:          { -            onAudioOffloadTearDown(kDueToError); +            onAudioTearDown(kDueToError);              break;          }          case kWhatAudioOffloadPauseTimeout:          {              int32_t generation; -            CHECK(msg->findInt32("generation", &generation)); +            CHECK(msg->findInt32("drainGeneration", &generation));              if (generation != mAudioOffloadPauseTimeoutGeneration) {                  break;              }              ALOGV("Audio Offload tear down due to pause timeout."); -            onAudioOffloadTearDown(kDueToTimeout); +            onAudioTearDown(kDueToTimeout);              mWakeLock->release();              break;          } @@ -528,8 +622,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {  }  void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { -    if (mDrainAudioQueuePending || mSyncQueues || mPaused -            || offloadingAudio()) { +    if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {          return;      } @@ -538,19 +631,19 @@ void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {      }      mDrainAudioQueuePending = true; -    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); -    msg->setInt32("generation", mAudioQueueGeneration); +    sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this); +    msg->setInt32("drainGeneration", mAudioDrainGeneration);      msg->post(delayUs);  } -void NuPlayer::Renderer::prepareForMediaRenderingStart() { -    mAudioRenderingStartGeneration = mAudioQueueGeneration; -    mVideoRenderingStartGeneration = mVideoQueueGeneration; +void NuPlayer::Renderer::prepareForMediaRenderingStart_l() { +    mAudioRenderingStartGeneration = mAudioDrainGeneration; +    mVideoRenderingStartGeneration = mVideoDrainGeneration;  } -void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { -    if (mVideoRenderingStartGeneration == mVideoQueueGeneration && -        mAudioRenderingStartGeneration == mAudioQueueGeneration) { +void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() { +    if (mVideoRenderingStartGeneration == mVideoDrainGeneration && +        mAudioRenderingStartGeneration == mAudioDrainGeneration) {          mVideoRenderingStartGeneration = -1;          mAudioRenderingStartGeneration = -1; @@ -578,13 +671,15 @@ size_t NuPlayer::Renderer::AudioSinkCallback(          case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:          { +            ALOGV("AudioSink::CB_EVENT_STREAM_END");              me->notifyEOS(true /* audio */, ERROR_END_OF_STREAM);              break;          }          case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:          { -            me->notifyAudioOffloadTearDown(); +            ALOGV("AudioSink::CB_EVENT_TEAR_DOWN"); +            me->notifyAudioTearDown();              break;          }      } @@ -595,7 +690,7 @@ size_t NuPlayer::Renderer::AudioSinkCallback(  size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {      Mutex::Autolock autoLock(mLock); -    if (!offloadingAudio() || mPaused) { +    if (!mUseAudioCallback) {          return 0;      } @@ -603,13 +698,13 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {      size_t sizeCopied = 0;      bool firstEntry = true; +    QueueEntry *entry;  // will be valid after while loop if hasEOS is set.      while (sizeCopied < size && !mAudioQueue.empty()) { -        QueueEntry *entry = &*mAudioQueue.begin(); +        entry = &*mAudioQueue.begin();          if (entry->mBuffer == NULL) { // EOS              hasEOS = true;              mAudioQueue.erase(mAudioQueue.begin()); -            entry = NULL;              break;          } @@ -617,8 +712,8 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {              firstEntry = false;              int64_t mediaTimeUs;              CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); -            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); -            setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); +            ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6); +            setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);          }          size_t copy = entry->mBuffer->size() - entry->mOffset; @@ -638,34 +733,97 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {              entry = NULL;          }          sizeCopied += copy; -        notifyIfMediaRenderingStarted(); + +        notifyIfMediaRenderingStarted_l();      }      if (mAudioFirstAnchorTimeMediaUs >= 0) {          int64_t nowUs = ALooper::GetNowUs(); -        setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs)); +        int64_t nowMediaUs = +            mAudioFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs); +        // we don't know how much data we are queueing for offloaded tracks. +        mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);      } -    // we don't know how much data we are queueing for offloaded tracks -    mAnchorMaxMediaUs = -1; +    // for non-offloaded audio, we need to compute the frames written because +    // there is no EVENT_STREAM_END notification. The frames written gives +    // an estimate on the pending played out duration. +    if (!offloadingAudio()) { +        mNumFramesWritten += sizeCopied / mAudioSink->frameSize(); +    }      if (hasEOS) { -        (new AMessage(kWhatStopAudioSink, id()))->post(); +        (new AMessage(kWhatStopAudioSink, this))->post(); +        // As there is currently no EVENT_STREAM_END callback notification for +        // non-offloaded audio tracks, we need to post the EOS ourselves. +        if (!offloadingAudio()) { +            int64_t postEOSDelayUs = 0; +            if (mAudioSink->needsTrailingPadding()) { +                postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs()); +            } +            ALOGV("fillAudioBuffer: notifyEOS " +                    "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld", +                    mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs); +            notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs); +        }      } -      return sizeCopied;  } +void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() { +    List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it; +    bool foundEOS = false; +    while (it != mAudioQueue.end()) { +        int32_t eos; +        QueueEntry *entry = &*it++; +        if (entry->mBuffer == NULL +                || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) { +            itEOS = it; +            foundEOS = true; +        } +    } + +    if (foundEOS) { +        // post all replies before EOS and drop the samples +        for (it = mAudioQueue.begin(); it != itEOS; it++) { +            if (it->mBuffer == NULL) { +                // delay doesn't matter as we don't even have an AudioTrack +                notifyEOS(true /* audio */, it->mFinalResult); +            } else { +                it->mNotifyConsumed->post(); +            } +        } +        mAudioQueue.erase(mAudioQueue.begin(), itEOS); +    } +} +  bool NuPlayer::Renderer::onDrainAudioQueue() { +    // TODO: This call to getPosition checks if AudioTrack has been created +    // in AudioSink before draining audio. If AudioTrack doesn't exist, then +    // CHECKs on getPosition will fail. +    // We still need to figure out why AudioTrack is not created when +    // this function is called. One possible reason could be leftover +    // audio. Another possible place is to check whether decoder +    // has received INFO_FORMAT_CHANGED as the first buffer since +    // AudioSink is opened there, and possible interactions with flush +    // immediately after start. Investigate error message +    // "vorbis_dsp_synthesis returned -135", along with RTSP.      uint32_t numFramesPlayed;      if (mAudioSink->getPosition(&numFramesPlayed) != OK) { +        // When getPosition fails, renderer will not reschedule the draining +        // unless new samples are queued. +        // If we have pending EOS (or "eos" marker for discontinuities), we need +        // to post these now as NuPlayerDecoder might be waiting for it. +        drainAudioQueueUntilLastEOS(); + +        ALOGW("onDrainAudioQueue(): audio sink is not ready");          return false;      } +#if 0      ssize_t numFramesAvailableToWrite =          mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); -#if 0      if (numFramesAvailableToWrite == mAudioSink->frameCount()) {          ALOGI("audio sink underrun");      } else { @@ -674,10 +832,8 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {      }  #endif -    size_t numBytesAvailableToWrite = -        numFramesAvailableToWrite * mAudioSink->frameSize(); - -    while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { +    uint32_t prevFramesWritten = mNumFramesWritten; +    while (!mAudioQueue.empty()) {          QueueEntry *entry = &*mAudioQueue.begin();          mLastAudioBufferDrained = entry->mBufferOrdinal; @@ -702,22 +858,27 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {              return false;          } -        if (entry->mOffset == 0) { +        // ignore 0-sized buffer which could be EOS marker with no data +        if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {              int64_t mediaTimeUs;              CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); -            ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); +            ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs", +                    mediaTimeUs / 1E6);              onNewAudioMediaTime(mediaTimeUs);          }          size_t copy = entry->mBuffer->size() - entry->mOffset; -        if (copy > numBytesAvailableToWrite) { -            copy = numBytesAvailableToWrite; -        } -        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); +        ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, +                                            copy, false /* blocking */);          if (written < 0) {              // An error in AudioSink write. Perhaps the AudioSink was not properly opened. -            ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy); +            if (written == WOULD_BLOCK) { +                ALOGV("AudioSink write would block when writing %zu bytes", copy); +            } else { +                ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy); +                notifyAudioTearDown(); +            }              break;          } @@ -729,73 +890,98 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {              entry = NULL;          } -        numBytesAvailableToWrite -= written;          size_t copiedFrames = written / mAudioSink->frameSize();          mNumFramesWritten += copiedFrames; -        notifyIfMediaRenderingStarted(); +        { +            Mutex::Autolock autoLock(mLock); +            notifyIfMediaRenderingStarted_l(); +        }          if (written != (ssize_t)copy) {              // A short count was received from AudioSink::write()              // -            // AudioSink write should block until exactly the number of bytes are delivered. -            // But it may return with a short count (without an error) when: +            // AudioSink write is called in non-blocking mode. +            // It may return with a short count when:              //              // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. -            // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. +            // 2) The data to be copied exceeds the available buffer in AudioSink. +            // 3) An error occurs and data has been partially copied to the buffer in AudioSink. +            // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.              // (Case 1)              // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it              // needs to fail, as we should not carry over fractional frames between calls.              CHECK_EQ(copy % mAudioSink->frameSize(), 0); -            // (Case 2) +            // (Case 2, 3, 4)              // Return early to the caller.              // Beware of calling immediately again as this may busy-loop if you are not careful. -            ALOGW("AudioSink write short frame count %zd < %zu", written, copy); +            ALOGV("AudioSink write short frame count %zd < %zu", written, copy);              break;          }      } -    mAnchorMaxMediaUs = -        mAnchorTimeMediaUs + -                (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) -                        * 1000LL * mAudioSink->msecsPerFrame()); +    int64_t maxTimeMedia; +    { +        Mutex::Autolock autoLock(mLock); +        maxTimeMedia = +            mAnchorTimeMediaUs + +                    (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) +                            * 1000LL * mAudioSink->msecsPerFrame()); +    } +    mMediaClock->updateMaxTimeMedia(maxTimeMedia); + +    // calculate whether we need to reschedule another write. +    bool reschedule = !mAudioQueue.empty() +            && (!mPaused +                || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers +    //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u", +    //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten); +    return reschedule; +} -    return !mAudioQueue.empty(); +int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) { +    int32_t sampleRate = offloadingAudio() ? +            mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate; +    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. +    return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);  } +// Calculate duration of pending samples if played at normal rate (i.e., 1.0).  int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { -    int64_t writtenAudioDurationUs = -        mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); +    int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);      return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs);  }  int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { -    int64_t currentPositionUs; -    if (mPaused || getCurrentPositionOnLooper( -            ¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) { -        // If failed to get current position, e.g. due to audio clock is not ready, then just -        // play out video immediately without delay. +    int64_t realUs; +    if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) { +        // If failed to get current position, e.g. due to audio clock is +        // not ready, then just play out video immediately without delay.          return nowUs;      } -    return (mediaTimeUs - currentPositionUs) + nowUs; +    return realUs;  }  void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { +    Mutex::Autolock autoLock(mLock);      // TRICKY: vorbis decoder generates multiple frames with the same      // timestamp, so only update on the first frame with a given timestamp      if (mediaTimeUs == mAnchorTimeMediaUs) {          return;      } -    setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); +    setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);      int64_t nowUs = ALooper::GetNowUs(); -    setAnchorTime( -            mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten); +    int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs); +    mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs); +    mAnchorNumFramesWritten = mNumFramesWritten; +    mAnchorTimeMediaUs = mediaTimeUs;  } -void NuPlayer::Renderer::postDrainVideoQueue_l() { +// Called without mLock acquired. +void NuPlayer::Renderer::postDrainVideoQueue() {      if (mDrainVideoQueuePending -            || mSyncQueues +            || getSyncQueues()              || (mPaused && mVideoSampleReceived)) {          return;      } @@ -806,8 +992,8 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() {      QueueEntry &entry = *mVideoQueue.begin(); -    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); -    msg->setInt32("generation", mVideoQueueGeneration); +    sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this); +    msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));      if (entry.mBuffer == NULL) {          // EOS doesn't carry a timestamp. @@ -827,16 +1013,19 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() {          int64_t mediaTimeUs;          CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); -        if (mAnchorTimeMediaUs < 0) { -            setAnchorTime(mediaTimeUs, nowUs); -            mPausePositionMediaTimeUs = mediaTimeUs; -            mAnchorMaxMediaUs = mediaTimeUs; -            realTimeUs = nowUs; -        } else { -            realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); +        { +            Mutex::Autolock autoLock(mLock); +            if (mAnchorTimeMediaUs < 0) { +                mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs); +                mAnchorTimeMediaUs = mediaTimeUs; +                realTimeUs = nowUs; +            } else { +                realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); +            }          }          if (!mHasAudio) { -            mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps +            // smooth out videos >= 10fps +            mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);          }          // Heuristics to handle situation when media time changed without a @@ -913,18 +1102,21 @@ void NuPlayer::Renderer::onDrainVideoQueue() {          if (tooLate) {              ALOGV("video late by %lld us (%.2f secs)", -                 mVideoLateByUs, mVideoLateByUs / 1E6); +                 (long long)mVideoLateByUs, mVideoLateByUs / 1E6);          } else { +            int64_t mediaUs = 0; +            mMediaClock->getMediaTime(realTimeUs, &mediaUs);              ALOGV("rendering video at media time %.2f secs",                      (mFlags & FLAG_REAL_TIME ? realTimeUs : -                    (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); +                    mediaUs) / 1E6);          }      } else {          setVideoLateByUs(0);          if (!mVideoSampleReceived && !mHasAudio) {              // This will ensure that the first frame after a flush won't be used as anchor              // when renderer is in paused state, because resume can happen any time after seek. -            setAnchorTime(-1, -1); +            Mutex::Autolock autoLock(mLock); +            clearAnchorTime_l();          }      } @@ -941,7 +1133,8 @@ void NuPlayer::Renderer::onDrainVideoQueue() {              mVideoRenderingStarted = true;              notifyVideoRenderingStart();          } -        notifyIfMediaRenderingStarted(); +        Mutex::Autolock autoLock(mLock); +        notifyIfMediaRenderingStarted_l();      }  } @@ -959,15 +1152,23 @@ void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t del      notify->post(delayUs);  } -void NuPlayer::Renderer::notifyAudioOffloadTearDown() { -    (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); +void NuPlayer::Renderer::notifyAudioTearDown() { +    (new AMessage(kWhatAudioTearDown, this))->post();  }  void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {      int32_t audio;      CHECK(msg->findInt32("audio", &audio)); -    setHasMedia(audio); +    if (dropBufferIfStale(audio, msg)) { +        return; +    } + +    if (audio) { +        mHasAudio = true; +    } else { +        mHasVideo = true; +    }      if (mHasVideo) {          if (mVideoScheduler == NULL) { @@ -976,10 +1177,6 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {          }      } -    if (dropBufferWhileFlushing(audio, msg)) { -        return; -    } -      sp<ABuffer> buffer;      CHECK(msg->findBuffer("buffer", &buffer)); @@ -993,15 +1190,16 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {      entry.mFinalResult = OK;      entry.mBufferOrdinal = ++mTotalBuffersQueued; -    Mutex::Autolock autoLock(mLock);      if (audio) { +        Mutex::Autolock autoLock(mLock);          mAudioQueue.push_back(entry);          postDrainAudioQueue_l();      } else {          mVideoQueue.push_back(entry); -        postDrainVideoQueue_l(); +        postDrainVideoQueue();      } +    Mutex::Autolock autoLock(mLock);      if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {          return;      } @@ -1050,7 +1248,9 @@ void NuPlayer::Renderer::syncQueuesDone_l() {      }      if (!mVideoQueue.empty()) { -        postDrainVideoQueue_l(); +        mLock.unlock(); +        postDrainVideoQueue(); +        mLock.lock();      }  } @@ -1058,7 +1258,7 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {      int32_t audio;      CHECK(msg->findInt32("audio", &audio)); -    if (dropBufferWhileFlushing(audio, msg)) { +    if (dropBufferIfStale(audio, msg)) {          return;      } @@ -1069,19 +1269,20 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {      entry.mOffset = 0;      entry.mFinalResult = finalResult; -    Mutex::Autolock autoLock(mLock);      if (audio) { +        Mutex::Autolock autoLock(mLock);          if (mAudioQueue.empty() && mSyncQueues) {              syncQueuesDone_l();          }          mAudioQueue.push_back(entry);          postDrainAudioQueue_l();      } else { -        if (mVideoQueue.empty() && mSyncQueues) { +        if (mVideoQueue.empty() && getSyncQueues()) { +            Mutex::Autolock autoLock(mLock);              syncQueuesDone_l();          }          mVideoQueue.push_back(entry); -        postDrainVideoQueue_l(); +        postDrainVideoQueue();      }  } @@ -1090,31 +1291,25 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {      CHECK(msg->findInt32("audio", &audio));      { -        Mutex::Autolock autoLock(mFlushLock); +        Mutex::Autolock autoLock(mLock);          if (audio) { -            mFlushingAudio = false;              notifyComplete = mNotifyCompleteAudio;              mNotifyCompleteAudio = false;          } else { -            mFlushingVideo = false;              notifyComplete = mNotifyCompleteVideo;              mNotifyCompleteVideo = false;          } -    } -    // If we're currently syncing the queues, i.e. dropping audio while -    // aligning the first audio/video buffer times and only one of the -    // two queues has data, we may starve that queue by not requesting -    // more buffers from the decoder. If the other source then encounters -    // a discontinuity that leads to flushing, we'll never find the -    // corresponding discontinuity on the other queue. -    // Therefore we'll stop syncing the queues if at least one of them -    // is flushed. -    { -         Mutex::Autolock autoLock(mLock); -         syncQueuesDone_l(); -         setPauseStartedTimeRealUs(-1); -         setAnchorTime(-1, -1); +        // If we're currently syncing the queues, i.e. dropping audio while +        // aligning the first audio/video buffer times and only one of the +        // two queues has data, we may starve that queue by not requesting +        // more buffers from the decoder. If the other source then encounters +        // a discontinuity that leads to flushing, we'll never find the +        // corresponding discontinuity on the other queue. +        // Therefore we'll stop syncing the queues if at least one of them +        // is flushed. +        syncQueuesDone_l(); +        clearAnchorTime_l();      }      ALOGV("flushing %s", audio ? "audio" : "video"); @@ -1123,12 +1318,11 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {              Mutex::Autolock autoLock(mLock);              flushQueue(&mAudioQueue); -            ++mAudioQueueGeneration; -            prepareForMediaRenderingStart(); +            ++mAudioDrainGeneration; +            prepareForMediaRenderingStart_l(); -            if (offloadingAudio()) { -                setAudioFirstAnchorTime(-1); -            } +            // the frame count will be reset after flush. +            clearAudioFirstAnchorTime_l();          }          mDrainAudioQueuePending = false; @@ -1136,19 +1330,32 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {          if (offloadingAudio()) {              mAudioSink->pause();              mAudioSink->flush(); -            mAudioSink->start(); +            if (!mPaused) { +                mAudioSink->start(); +            } +        } else { +            mAudioSink->pause(); +            mAudioSink->flush(); +            // Call stop() to signal to the AudioSink to completely fill the +            // internal buffer before resuming playback. +            mAudioSink->stop(); +            if (!mPaused) { +                mAudioSink->start(); +            } +            mNumFramesWritten = 0;          }      } else {          flushQueue(&mVideoQueue);          mDrainVideoQueuePending = false; -        ++mVideoQueueGeneration;          if (mVideoScheduler != NULL) {              mVideoScheduler->restart();          } -        prepareForMediaRenderingStart(); +        Mutex::Autolock autoLock(mLock); +        ++mVideoDrainGeneration; +        prepareForMediaRenderingStart_l();      }      mVideoSampleReceived = false; @@ -1178,20 +1385,12 @@ void NuPlayer::Renderer::notifyFlushComplete(bool audio) {      notify->post();  } -bool NuPlayer::Renderer::dropBufferWhileFlushing( +bool NuPlayer::Renderer::dropBufferIfStale(          bool audio, const sp<AMessage> &msg) { -    bool flushing = false; +    int32_t queueGeneration; +    CHECK(msg->findInt32("queueGeneration", &queueGeneration)); -    { -        Mutex::Autolock autoLock(mFlushLock); -        if (audio) { -            flushing = mFlushingAudio; -        } else { -            flushing = mFlushingVideo; -        } -    } - -    if (!flushing) { +    if (queueGeneration == getQueueGeneration(audio)) {          return false;      } @@ -1209,7 +1408,10 @@ void NuPlayer::Renderer::onAudioSinkChanged() {      }      CHECK(!mDrainAudioQueuePending);      mNumFramesWritten = 0; -    mAnchorNumFramesWritten = -1; +    { +        Mutex::Autolock autoLock(mLock); +        mAnchorNumFramesWritten = -1; +    }      uint32_t written;      if (mAudioSink->getFramesWritten(&written) == OK) {          mNumFramesWritten = written; @@ -1219,40 +1421,33 @@ void NuPlayer::Renderer::onAudioSinkChanged() {  void NuPlayer::Renderer::onDisableOffloadAudio() {      Mutex::Autolock autoLock(mLock);      mFlags &= ~FLAG_OFFLOAD_AUDIO; -    ++mAudioQueueGeneration; +    ++mAudioDrainGeneration; +    if (mAudioRenderingStartGeneration != -1) { +        prepareForMediaRenderingStart_l(); +    }  }  void NuPlayer::Renderer::onEnableOffloadAudio() {      Mutex::Autolock autoLock(mLock);      mFlags |= FLAG_OFFLOAD_AUDIO; -    ++mAudioQueueGeneration; +    ++mAudioDrainGeneration; +    if (mAudioRenderingStartGeneration != -1) { +        prepareForMediaRenderingStart_l(); +    }  }  void NuPlayer::Renderer::onPause() {      if (mPaused) { -        ALOGW("Renderer::onPause() called while already paused!");          return;      } -    int64_t currentPositionUs; -    int64_t pausePositionMediaTimeUs; -    if (getCurrentPositionFromAnchor( -            ¤tPositionUs, ALooper::GetNowUs()) == OK) { -        pausePositionMediaTimeUs = currentPositionUs; -    } else { -        // Set paused position to -1 (unavailabe) if we don't have anchor time -        // This could happen if client does a seekTo() immediately followed by -        // pause(). Renderer will be flushed with anchor time cleared. We don't -        // want to leave stale value in mPausePositionMediaTimeUs. -        pausePositionMediaTimeUs = -1; -    } +      {          Mutex::Autolock autoLock(mLock); -        mPausePositionMediaTimeUs = pausePositionMediaTimeUs; -        ++mAudioQueueGeneration; -        ++mVideoQueueGeneration; -        prepareForMediaRenderingStart(); +        // we do not increment audio drain generation so that we fill audio buffer during pause. +        ++mVideoDrainGeneration; +        prepareForMediaRenderingStart_l();          mPaused = true; -        setPauseStartedTimeRealUs(ALooper::GetNowUs()); +        mMediaClock->setPlaybackRate(0.0);      }      mDrainAudioQueuePending = false; @@ -1263,7 +1458,7 @@ void NuPlayer::Renderer::onPause() {          startAudioOffloadPauseTimeout();      } -    ALOGV("now paused audio queue has %d entries, video has %d entries", +    ALOGV("now paused audio queue has %zu entries, video has %zu entries",            mAudioQueue.size(), mVideoQueue.size());  } @@ -1274,24 +1469,30 @@ void NuPlayer::Renderer::onResume() {      if (mHasAudio) {          cancelAudioOffloadPauseTimeout(); -        mAudioSink->start(); +        status_t err = mAudioSink->start(); +        if (err != OK) { +            notifyAudioTearDown(); +        }      } -    Mutex::Autolock autoLock(mLock); -    mPaused = false; -    if (mPauseStartedTimeRealUs != -1) { -        int64_t newAnchorRealUs = -            mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; -        setAnchorTime( -                mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */); -    } +    { +        Mutex::Autolock autoLock(mLock); +        mPaused = false; -    if (!mAudioQueue.empty()) { -        postDrainAudioQueue_l(); +        // configure audiosink as we did not do it when pausing +        if (mAudioSink != NULL && mAudioSink->ready()) { +            mAudioSink->setPlaybackRate(mPlaybackSettings); +        } + +        mMediaClock->setPlaybackRate(mPlaybackRate); + +        if (!mAudioQueue.empty()) { +            postDrainAudioQueue_l(); +        }      }      if (!mVideoQueue.empty()) { -        postDrainVideoQueue_l(); +        postDrainVideoQueue();      }  } @@ -1302,6 +1503,21 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {      mVideoScheduler->init(fps);  } +int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) { +    Mutex::Autolock autoLock(mLock); +    return (audio ? mAudioQueueGeneration : mVideoQueueGeneration); +} + +int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) { +    Mutex::Autolock autoLock(mLock); +    return (audio ? mAudioDrainGeneration : mVideoDrainGeneration); +} + +bool NuPlayer::Renderer::getSyncQueues() { +    Mutex::Autolock autoLock(mLock); +    return mSyncQueues; +} +  // TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()  // as it acquires locks and may query the audio driver.  // @@ -1309,6 +1525,7 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {  // accessing getTimestamp() or getPosition() every time a data buffer with  // a media time is received.  // +// Calculate duration of played samples if played at normal rate (i.e., 1.0).  int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {      uint32_t numFramesPlayed;      int64_t numFramesPlayedAt; @@ -1343,12 +1560,11 @@ int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {          CHECK_EQ(res, (status_t)OK);          numFramesPlayedAt = nowUs;          numFramesPlayedAt += 1000LL * mAudioSink->latency() / 2; /* XXX */ -        //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); +        //ALOGD("getPosition: %u %lld", numFramesPlayed, (long long)numFramesPlayedAt);      } -    // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.      //CHECK_EQ(numFramesPlayed & (1 << 31), 0);  // can't be negative until 12.4 hrs, test -    int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) +    int64_t durationUs = getDurationUsIfPlayedAtSampleRate(numFramesPlayed)              + nowUs - numFramesPlayedAt;      if (durationUs < 0) {          // Occurs when numFramesPlayed position is very small and the following: @@ -1366,23 +1582,22 @@ int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) {      return durationUs;  } -void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reason) { -    if (mAudioOffloadTornDown) { +void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) { +    if (mAudioTornDown) {          return;      } -    mAudioOffloadTornDown = true; +    mAudioTornDown = true;      int64_t currentPositionUs; -    if (getCurrentPositionOnLooper(¤tPositionUs) != OK) { -        currentPositionUs = 0; +    sp<AMessage> notify = mNotify->dup(); +    if (getCurrentPosition(¤tPositionUs) == OK) { +        notify->setInt64("positionUs", currentPositionUs);      }      mAudioSink->stop();      mAudioSink->flush(); -    sp<AMessage> notify = mNotify->dup(); -    notify->setInt32("what", kWhatAudioOffloadTearDown); -    notify->setInt64("positionUs", currentPositionUs); +    notify->setInt32("what", kWhatAudioTearDown);      notify->setInt32("reason", reason);      notify->post();  } @@ -1390,8 +1605,8 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso  void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {      if (offloadingAudio()) {          mWakeLock->acquire(); -        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); -        msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); +        sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this); +        msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);          msg->post(kOffloadPauseMaxUs);      }  } @@ -1475,18 +1690,23 @@ status_t NuPlayer::Renderer::onOpenAudioSink(              offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;              audioSinkChanged = true;              mAudioSink->close(); +              err = mAudioSink->open(                      sampleRate,                      numChannels,                      (audio_channel_mask_t)channelMask,                      audioFormat, -                    8 /* bufferCount */, +                    0 /* bufferCount - unused */,                      &NuPlayer::Renderer::AudioSinkCallback,                      this,                      (audio_output_flags_t)offloadFlags,                      &offloadInfo);              if (err == OK) { +                err = mAudioSink->setPlaybackRate(mPlaybackSettings); +            } + +            if (err == OK) {                  // If the playback is offloaded to h/w, we pass                  // the HAL some metadata information.                  // We don't want to do this for PCM because it @@ -1494,7 +1714,9 @@ status_t NuPlayer::Renderer::onOpenAudioSink(                  // before reaching the hardware.                  // TODO                  mCurrentOffloadInfo = offloadInfo; -                err = mAudioSink->start(); +                if (!mPaused) { // for preview mode, don't start if paused +                    err = mAudioSink->start(); +                }                  ALOGV_IF(err == OK, "openAudioSink: offload succeeded");              }              if (err != OK) { @@ -1503,6 +1725,9 @@ status_t NuPlayer::Renderer::onOpenAudioSink(                  onDisableOffloadAudio();                  mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;                  ALOGV("openAudioSink: offload failed"); +            } else { +                mUseAudioCallback = true;  // offload mode transfers data through callback +                ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.              }          }      } @@ -1527,29 +1752,47 @@ status_t NuPlayer::Renderer::onOpenAudioSink(          audioSinkChanged = true;          mAudioSink->close();          mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER; +        // Note: It is possible to set up the callback, but not use it to send audio data. +        // This requires a fix in AudioSink to explicitly specify the transfer mode. +        mUseAudioCallback = getUseAudioCallbackSetting(); +        if (mUseAudioCallback) { +            ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message. +        } + +        // Compute the desired buffer size. +        // For callback mode, the amount of time before wakeup is about half the buffer size. +        const uint32_t frameCount = +                (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000; +          status_t err = mAudioSink->open(                      sampleRate,                      numChannels,                      (audio_channel_mask_t)channelMask,                      AUDIO_FORMAT_PCM_16_BIT, -                    8 /* bufferCount */, +                    0 /* bufferCount - unused */, +                    mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL, +                    mUseAudioCallback ? this : NULL, +                    (audio_output_flags_t)pcmFlags,                      NULL, -                    NULL, -                    (audio_output_flags_t)pcmFlags); +                    true /* doNotReconnect */, +                    frameCount); +        if (err == OK) { +            err = mAudioSink->setPlaybackRate(mPlaybackSettings); +        }          if (err != OK) {              ALOGW("openAudioSink: non offloaded open failed status: %d", err);              mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;              return err;          }          mCurrentPcmInfo = info; -        mAudioSink->start(); +        if (!mPaused) { // for preview mode, don't start if paused +            mAudioSink->start(); +        }      }      if (audioSinkChanged) {          onAudioSinkChanged();      } -    if (offloadingAudio()) { -        mAudioOffloadTornDown = false; -    } +    mAudioTornDown = false;      return OK;  } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h index 003d1d0..3e65649 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -18,12 +18,16 @@  #define NUPLAYER_RENDERER_H_ +#include <media/AudioResamplerPublic.h> +#include <media/AVSyncSettings.h> +  #include "NuPlayer.h"  namespace android {  struct ABuffer;  class  AWakeLock; +struct MediaClock;  struct VideoFrameScheduler;  struct NuPlayer::Renderer : public AHandler { @@ -47,6 +51,11 @@ struct NuPlayer::Renderer : public AHandler {      void queueEOS(bool audio, status_t finalResult); +    status_t setPlaybackSettings(const AudioPlaybackRate &rate /* sanitized */); +    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */); +    status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint); +    status_t getSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */); +      void flush(bool audio, bool notifyComplete);      void signalTimeDiscontinuity(); @@ -61,16 +70,8 @@ struct NuPlayer::Renderer : public AHandler {      void setVideoFrameRate(float fps); -    // Following setters and getters are protected by mTimeLock.      status_t getCurrentPosition(int64_t *mediaUs); -    void setHasMedia(bool audio); -    void setAudioFirstAnchorTime(int64_t mediaUs); -    void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs); -    void setAnchorTime( -            int64_t mediaUs, int64_t realUs, int64_t numFramesWritten = -1, bool resume = false); -    void setVideoLateByUs(int64_t lateUs);      int64_t getVideoLateByUs(); -    void setPauseStartedTimeRealUs(int64_t realUs);      status_t openAudioSink(              const sp<AMessage> &format, @@ -81,16 +82,16 @@ struct NuPlayer::Renderer : public AHandler {      void closeAudioSink();      enum { -        kWhatEOS                 = 'eos ', -        kWhatFlushComplete       = 'fluC', -        kWhatPosition            = 'posi', -        kWhatVideoRenderingStart = 'vdrd', -        kWhatMediaRenderingStart = 'mdrd', -        kWhatAudioOffloadTearDown = 'aOTD', +        kWhatEOS                      = 'eos ', +        kWhatFlushComplete            = 'fluC', +        kWhatPosition                 = 'posi', +        kWhatVideoRenderingStart      = 'vdrd', +        kWhatMediaRenderingStart      = 'mdrd', +        kWhatAudioTearDown            = 'adTD',          kWhatAudioOffloadPauseTimeout = 'aOPT',      }; -    enum AudioOffloadTearDownReason { +    enum AudioTearDownReason {          kDueToError = 0,          kDueToTimeout,      }; @@ -107,8 +108,11 @@ private:          kWhatPostDrainVideoQueue = 'pDVQ',          kWhatQueueBuffer         = 'queB',          kWhatQueueEOS            = 'qEOS', +        kWhatConfigPlayback      = 'cfPB', +        kWhatConfigSync          = 'cfSy', +        kWhatGetPlaybackSettings = 'gPbS', +        kWhatGetSyncSettings     = 'gSyS',          kWhatFlush               = 'flus', -        kWhatAudioSinkChanged    = 'auSC',          kWhatPause               = 'paus',          kWhatResume              = 'resm',          kWhatOpenAudioSink       = 'opnA', @@ -142,26 +146,23 @@ private:      bool mDrainVideoQueuePending;      int32_t mAudioQueueGeneration;      int32_t mVideoQueueGeneration; +    int32_t mAudioDrainGeneration; +    int32_t mVideoDrainGeneration; + +    sp<MediaClock> mMediaClock; +    float mPlaybackRate; // audio track rate + +    AudioPlaybackRate mPlaybackSettings; +    AVSyncSettings mSyncSettings; +    float mVideoFpsHint; -    Mutex mTimeLock; -    // |mTimeLock| protects the following 7 member vars that are related to time. -    // Note: those members are only written on Renderer thread, so reading on Renderer thread -    // doesn't need to be protected. Otherwise accessing those members must be protected by -    // |mTimeLock|. -    // TODO: move those members to a seperated media clock class.      int64_t mAudioFirstAnchorTimeMediaUs;      int64_t mAnchorTimeMediaUs; -    int64_t mAnchorTimeRealUs;      int64_t mAnchorNumFramesWritten; -    int64_t mAnchorMaxMediaUs;      int64_t mVideoLateByUs;      bool mHasAudio;      bool mHasVideo; -    int64_t mPauseStartedTimeRealUs; -    Mutex mFlushLock;  // protects the following 2 member vars. -    bool mFlushingAudio; -    bool mFlushingVideo;      bool mNotifyCompleteAudio;      bool mNotifyCompleteVideo; @@ -169,7 +170,6 @@ private:      // modified on only renderer's thread.      bool mPaused; -    int64_t mPausePositionMediaTimeUs;      bool mVideoSampleReceived;      bool mVideoRenderingStarted; @@ -179,7 +179,7 @@ private:      int64_t mLastPositionUpdateUs;      int32_t mAudioOffloadPauseTimeoutGeneration; -    bool mAudioOffloadTornDown; +    bool mAudioTornDown;      audio_offload_info_t mCurrentOffloadInfo;      struct PcmInfo { @@ -194,6 +194,7 @@ private:      int32_t mTotalBuffersQueued;      int32_t mLastAudioBufferDrained; +    bool mUseAudioCallback;      sp<AWakeLock> mWakeLock; @@ -207,18 +208,24 @@ private:      size_t fillAudioBuffer(void *buffer, size_t size);      bool onDrainAudioQueue(); +    void drainAudioQueueUntilLastEOS();      int64_t getPendingAudioPlayoutDurationUs(int64_t nowUs);      int64_t getPlayedOutAudioDurationUs(int64_t nowUs);      void postDrainAudioQueue_l(int64_t delayUs = 0); +    void clearAnchorTime_l(); +    void clearAudioFirstAnchorTime_l(); +    void setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs); +    void setVideoLateByUs(int64_t lateUs); +      void onNewAudioMediaTime(int64_t mediaTimeUs);      int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);      void onDrainVideoQueue(); -    void postDrainVideoQueue_l(); +    void postDrainVideoQueue(); -    void prepareForMediaRenderingStart(); -    void notifyIfMediaRenderingStarted(); +    void prepareForMediaRenderingStart_l(); +    void notifyIfMediaRenderingStarted_l();      void onQueueBuffer(const sp<AMessage> &msg);      void onQueueEOS(const sp<AMessage> &msg); @@ -226,10 +233,18 @@ private:      void onAudioSinkChanged();      void onDisableOffloadAudio();      void onEnableOffloadAudio(); +    status_t onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */); +    status_t onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */); +    status_t onConfigSync(const AVSyncSettings &sync, float videoFpsHint); +    status_t onGetSyncSettings(AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */); +      void onPause();      void onResume();      void onSetVideoFrameRate(float fps); -    void onAudioOffloadTearDown(AudioOffloadTearDownReason reason); +    int32_t getQueueGeneration(bool audio); +    int32_t getDrainGeneration(bool audio); +    bool getSyncQueues(); +    void onAudioTearDown(AudioTearDownReason reason);      status_t onOpenAudioSink(              const sp<AMessage> &format,              bool offloadOnly, @@ -242,10 +257,10 @@ private:      void notifyPosition();      void notifyVideoLateBy(int64_t lateByUs);      void notifyVideoRenderingStart(); -    void notifyAudioOffloadTearDown(); +    void notifyAudioTearDown();      void flushQueue(List<QueueEntry> *queue); -    bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg); +    bool dropBufferIfStale(bool audio, const sp<AMessage> &msg);      void syncQueuesDone_l();      bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; } @@ -253,9 +268,11 @@ private:      void startAudioOffloadPauseTimeout();      void cancelAudioOffloadPauseTimeout(); +    int64_t getDurationUsIfPlayedAtSampleRate(uint32_t numFrames); +      DISALLOW_EVIL_CONSTRUCTORS(Renderer);  }; -}  // namespace android +} // namespace android  #endif  // NUPLAYER_RENDERER_H_ diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h index d9f14a2..11a6a9f 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -28,7 +28,7 @@  namespace android {  struct ABuffer; -struct MediaBuffer; +class MediaBuffer;  struct NuPlayer::Source : public AHandler {      enum Flags { @@ -53,6 +53,7 @@ struct NuPlayer::Source : public AHandler {          kWhatCacheStats,          kWhatSubtitleData,          kWhatTimedTextData, +        kWhatTimedMetaData,          kWhatQueueDecoderShutdown,          kWhatDrmNoLicense,          kWhatInstantiateSecureDecoders, @@ -117,6 +118,10 @@ struct NuPlayer::Source : public AHandler {          return false;      } +    virtual bool isStreaming() const { +        return true; +    } +  protected:      virtual ~Source() {} diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp index 885ebe4..f53afbd 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp @@ -29,9 +29,9 @@ namespace android {  NuPlayer::NuPlayerStreamListener::NuPlayerStreamListener(          const sp<IStreamSource> &source, -        ALooper::handler_id id) +        const sp<AHandler> &targetHandler)      : mSource(source), -      mTargetID(id), +      mTargetHandler(targetHandler),        mEOS(false),        mSendDataNotification(true) {      mSource->setListener(this); @@ -65,8 +65,8 @@ void NuPlayer::NuPlayerStreamListener::queueBuffer(size_t index, size_t size) {      if (mSendDataNotification) {          mSendDataNotification = false; -        if (mTargetID != 0) { -            (new AMessage(kWhatMoreDataQueued, mTargetID))->post(); +        if (mTargetHandler != NULL) { +            (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post();          }      }  } @@ -86,8 +86,8 @@ void NuPlayer::NuPlayerStreamListener::issueCommand(      if (mSendDataNotification) {          mSendDataNotification = false; -        if (mTargetID != 0) { -            (new AMessage(kWhatMoreDataQueued, mTargetID))->post(); +        if (mTargetHandler != NULL) { +            (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post();          }      }  } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h index 1874d80..2de829b 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h @@ -29,7 +29,7 @@ struct MemoryDealer;  struct NuPlayer::NuPlayerStreamListener : public BnStreamListener {      NuPlayerStreamListener(              const sp<IStreamSource> &source, -            ALooper::handler_id targetID); +            const sp<AHandler> &targetHandler);      virtual void queueBuffer(size_t index, size_t size); @@ -59,7 +59,7 @@ private:      Mutex mLock;      sp<IStreamSource> mSource; -    ALooper::handler_id mTargetID; +    sp<AHandler> mTargetHandler;      sp<MemoryDealer> mMemoryDealer;      Vector<sp<IMemory> > mBuffers;      List<QueueEntry> mQueue; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index 0282a9f..58ff113 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -87,7 +87,7 @@ void NuPlayer::RTSPSource::prepareAsync() {      CHECK(mHandler == NULL);      CHECK(mSDPLoader == NULL); -    sp<AMessage> notify = new AMessage(kWhatNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatNotify, this);      CHECK_EQ(mState, (int)DISCONNECTED);      mState = CONNECTING; @@ -116,7 +116,7 @@ void NuPlayer::RTSPSource::stop() {      if (mLooper == NULL) {          return;      } -    sp<AMessage> msg = new AMessage(kWhatDisconnect, id()); +    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);      sp<AMessage> dummy;      msg->postAndAwaitResponse(&dummy); @@ -138,7 +138,9 @@ void NuPlayer::RTSPSource::pause() {  }  void NuPlayer::RTSPSource::resume() { -    mHandler->resume(); +    if (mHandler != NULL) { +        mHandler->resume(); +    }  }  status_t NuPlayer::RTSPSource::feedMoreTSData() { @@ -292,16 +294,22 @@ status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) {  }  status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) { -    sp<AMessage> msg = new AMessage(kWhatPerformSeek, id()); +    sp<AMessage> msg = new AMessage(kWhatPerformSeek, this);      msg->setInt32("generation", ++mSeekGeneration);      msg->setInt64("timeUs", seekTimeUs); -    msg->post(200000ll); -    return OK; +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); +    if (err == OK && response != NULL) { +        CHECK(response->findInt32("err", &err)); +    } + +    return err;  }  void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) {      if (mState != CONNECTED) { +        finishSeek(INVALID_OPERATION);          return;      } @@ -311,7 +319,7 @@ void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) {  void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {      if (msg->what() == kWhatDisconnect) { -        uint32_t replyID; +        sp<AReplyToken> replyID;          CHECK(msg->senderAwaitsResponse(&replyID));          mDisconnectReplyID = replyID; @@ -320,9 +328,11 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {      } else if (msg->what() == kWhatPerformSeek) {          int32_t generation;          CHECK(msg->findInt32("generation", &generation)); +        CHECK(msg->senderAwaitsResponse(&mSeekReplyID));          if (generation != mSeekGeneration) {              // obsolete. +            finishSeek(OK);              return;          } @@ -368,6 +378,37 @@ void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) {          case MyHandler::kWhatSeekDone:          {              mState = CONNECTED; +            if (mSeekReplyID != NULL) { +                // Unblock seekTo here in case we attempted to seek in a live stream +                finishSeek(OK); +            } +            break; +        } + +        case MyHandler::kWhatSeekPaused: +        { +            sp<AnotherPacketSource> source = getSource(true /* audio */); +            if (source != NULL) { +                source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE, +                        /* extra */ NULL, +                        /* discard */ true); +            } +            source = getSource(false /* video */); +            if (source != NULL) { +                source->queueDiscontinuity(ATSParser::DISCONTINUITY_NONE, +                        /* extra */ NULL, +                        /* discard */ true); +            }; + +            status_t err = OK; +            msg->findInt32("err", &err); +            finishSeek(err); + +            if (err == OK) { +                int64_t timeUs; +                CHECK(msg->findInt64("time", &timeUs)); +                mHandler->continueSeekAfterPause(timeUs); +            }              break;          } @@ -600,7 +641,7 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) {              ALOGE("Unable to find url in SDP");              err = UNKNOWN_ERROR;          } else { -            sp<AMessage> notify = new AMessage(kWhatNotify, id()); +            sp<AMessage> notify = new AMessage(kWhatNotify, this);              mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID);              mLooper->registerHandler(mHandler); @@ -700,5 +741,12 @@ bool NuPlayer::RTSPSource::stopBufferingIfNecessary() {      return true;  } +void NuPlayer::RTSPSource::finishSeek(status_t err) { +    CHECK(mSeekReplyID != NULL); +    sp<AMessage> seekReply = new AMessage; +    seekReply->setInt32("err", err); +    seekReply->postReply(mSeekReplyID); +    mSeekReplyID = NULL; +}  }  // namespace android diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index ac3299a..6438a1e 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -25,6 +25,7 @@  namespace android {  struct ALooper; +struct AReplyToken;  struct AnotherPacketSource;  struct MyHandler;  struct SDPLoader; @@ -96,7 +97,7 @@ private:      bool mIsSDP;      State mState;      status_t mFinalResult; -    uint32_t mDisconnectReplyID; +    sp<AReplyToken> mDisconnectReplyID;      Mutex mBufferingLock;      bool mBuffering; @@ -115,6 +116,8 @@ private:      int64_t mEOSTimeoutAudio;      int64_t mEOSTimeoutVideo; +    sp<AReplyToken> mSeekReplyID; +      sp<AnotherPacketSource> getSource(bool audio);      void onConnected(); @@ -130,6 +133,7 @@ private:      void setError(status_t err);      void startBufferingIfNecessary();      bool stopBufferingIfNecessary(); +    void finishSeek(status_t err);      DISALLOW_EVIL_CONSTRUCTORS(RTSPSource);  }; diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp index b3f224d..0246b59 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -63,7 +63,7 @@ void NuPlayer::StreamingSource::prepareAsync() {  }  void NuPlayer::StreamingSource::start() { -    mStreamListener = new NuPlayerStreamListener(mSource, 0); +    mStreamListener = new NuPlayerStreamListener(mSource, NULL);      uint32_t sourceFlags = mSource->flags(); @@ -163,7 +163,7 @@ status_t NuPlayer::StreamingSource::postReadBuffer() {          mBuffering = true;      } -    (new AMessage(kWhatReadBuffer, id()))->post(); +    (new AMessage(kWhatReadBuffer, this))->post();      return OK;  } diff --git a/media/libmediaplayerservice/tests/Android.mk b/media/libmediaplayerservice/tests/Android.mk new file mode 100644 index 0000000..8cbf782 --- /dev/null +++ b/media/libmediaplayerservice/tests/Android.mk @@ -0,0 +1,27 @@ +# Build the unit tests. +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE := DrmSessionManager_test + +LOCAL_MODULE_TAGS := tests + +LOCAL_SRC_FILES := \ +	DrmSessionManager_test.cpp \ + +LOCAL_SHARED_LIBRARIES := \ +	liblog \ +	libmediaplayerservice \ +	libutils \ + +LOCAL_C_INCLUDES := \ +	frameworks/av/include \ +	frameworks/av/media/libmediaplayerservice \ + +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true + +LOCAL_32_BIT_ONLY := true + +include $(BUILD_NATIVE_TEST) + diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp new file mode 100644 index 0000000..de350a1 --- /dev/null +++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp @@ -0,0 +1,249 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DrmSessionManager_test" +#include <utils/Log.h> + +#include <gtest/gtest.h> + +#include "Drm.h" +#include "DrmSessionClientInterface.h" +#include "DrmSessionManager.h" +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/ProcessInfoInterface.h> + +namespace android { + +struct FakeProcessInfo : public ProcessInfoInterface { +    FakeProcessInfo() {} +    virtual ~FakeProcessInfo() {} + +    virtual bool getPriority(int pid, int* priority) { +        // For testing, use pid as priority. +        // Lower the value higher the priority. +        *priority = pid; +        return true; +    } + +private: +    DISALLOW_EVIL_CONSTRUCTORS(FakeProcessInfo); +}; + +struct FakeDrm : public DrmSessionClientInterface { +    FakeDrm() {} +    virtual ~FakeDrm() {} + +    virtual bool reclaimSession(const Vector<uint8_t>& sessionId) { +        mReclaimedSessions.push_back(sessionId); +        return true; +    } + +    const Vector<Vector<uint8_t> >& reclaimedSessions() const { +        return mReclaimedSessions; +    } + +private: +    Vector<Vector<uint8_t> > mReclaimedSessions; + +    DISALLOW_EVIL_CONSTRUCTORS(FakeDrm); +}; + +static const int kTestPid1 = 30; +static const int kTestPid2 = 20; +static const uint8_t kTestSessionId1[] = {1, 2, 3}; +static const uint8_t kTestSessionId2[] = {4, 5, 6, 7, 8}; +static const uint8_t kTestSessionId3[] = {9, 0}; + +class DrmSessionManagerTest : public ::testing::Test { +public: +    DrmSessionManagerTest() +        : mDrmSessionManager(new DrmSessionManager(new FakeProcessInfo())), +          mTestDrm1(new FakeDrm()), +          mTestDrm2(new FakeDrm()) { +        GetSessionId(kTestSessionId1, ARRAY_SIZE(kTestSessionId1), &mSessionId1); +        GetSessionId(kTestSessionId2, ARRAY_SIZE(kTestSessionId2), &mSessionId2); +        GetSessionId(kTestSessionId3, ARRAY_SIZE(kTestSessionId3), &mSessionId3); +    } + +protected: +    static void GetSessionId(const uint8_t* ids, size_t num, Vector<uint8_t>* sessionId) { +        for (size_t i = 0; i < num; ++i) { +            sessionId->push_back(ids[i]); +        } +    } + +    static void ExpectEqSessionInfo(const SessionInfo& info, sp<DrmSessionClientInterface> drm, +            const Vector<uint8_t>& sessionId, int64_t timeStamp) { +        EXPECT_EQ(drm, info.drm); +        EXPECT_TRUE(isEqualSessionId(sessionId, info.sessionId)); +        EXPECT_EQ(timeStamp, info.timeStamp); +    } + +    void addSession() { +        mDrmSessionManager->addSession(kTestPid1, mTestDrm1, mSessionId1); +        mDrmSessionManager->addSession(kTestPid2, mTestDrm2, mSessionId2); +        mDrmSessionManager->addSession(kTestPid2, mTestDrm2, mSessionId3); +        const PidSessionInfosMap& map = sessionMap(); +        EXPECT_EQ(2u, map.size()); +        ssize_t index1 = map.indexOfKey(kTestPid1); +        ASSERT_GE(index1, 0); +        const SessionInfos& infos1 = map[index1]; +        EXPECT_EQ(1u, infos1.size()); +        ExpectEqSessionInfo(infos1[0], mTestDrm1, mSessionId1, 0); + +        ssize_t index2 = map.indexOfKey(kTestPid2); +        ASSERT_GE(index2, 0); +        const SessionInfos& infos2 = map[index2]; +        EXPECT_EQ(2u, infos2.size()); +        ExpectEqSessionInfo(infos2[0], mTestDrm2, mSessionId2, 1); +        ExpectEqSessionInfo(infos2[1], mTestDrm2, mSessionId3, 2); +    } + +    const PidSessionInfosMap& sessionMap() { +        return mDrmSessionManager->mSessionMap; +    } + +    void testGetLowestPriority() { +        int pid; +        int priority; +        EXPECT_FALSE(mDrmSessionManager->getLowestPriority_l(&pid, &priority)); + +        addSession(); +        EXPECT_TRUE(mDrmSessionManager->getLowestPriority_l(&pid, &priority)); + +        EXPECT_EQ(kTestPid1, pid); +        FakeProcessInfo processInfo; +        int priority1; +        processInfo.getPriority(kTestPid1, &priority1); +        EXPECT_EQ(priority1, priority); +    } + +    void testGetLeastUsedSession() { +        sp<DrmSessionClientInterface> drm; +        Vector<uint8_t> sessionId; +        EXPECT_FALSE(mDrmSessionManager->getLeastUsedSession_l(kTestPid1, &drm, &sessionId)); + +        addSession(); + +        EXPECT_TRUE(mDrmSessionManager->getLeastUsedSession_l(kTestPid1, &drm, &sessionId)); +        EXPECT_EQ(mTestDrm1, drm); +        EXPECT_TRUE(isEqualSessionId(mSessionId1, sessionId)); + +        EXPECT_TRUE(mDrmSessionManager->getLeastUsedSession_l(kTestPid2, &drm, &sessionId)); +        EXPECT_EQ(mTestDrm2, drm); +        EXPECT_TRUE(isEqualSessionId(mSessionId2, sessionId)); + +        // mSessionId2 is no longer the least used session. +        mDrmSessionManager->useSession(mSessionId2); +        EXPECT_TRUE(mDrmSessionManager->getLeastUsedSession_l(kTestPid2, &drm, &sessionId)); +        EXPECT_EQ(mTestDrm2, drm); +        EXPECT_TRUE(isEqualSessionId(mSessionId3, sessionId)); +    } + +    sp<DrmSessionManager> mDrmSessionManager; +    sp<FakeDrm> mTestDrm1; +    sp<FakeDrm> mTestDrm2; +    Vector<uint8_t> mSessionId1; +    Vector<uint8_t> mSessionId2; +    Vector<uint8_t> mSessionId3; +}; + +TEST_F(DrmSessionManagerTest, addSession) { +    addSession(); +} + +TEST_F(DrmSessionManagerTest, useSession) { +    addSession(); + +    mDrmSessionManager->useSession(mSessionId1); +    mDrmSessionManager->useSession(mSessionId3); + +    const PidSessionInfosMap& map = sessionMap(); +    const SessionInfos& infos1 = map.valueFor(kTestPid1); +    const SessionInfos& infos2 = map.valueFor(kTestPid2); +    ExpectEqSessionInfo(infos1[0], mTestDrm1, mSessionId1, 3); +    ExpectEqSessionInfo(infos2[1], mTestDrm2, mSessionId3, 4); +} + +TEST_F(DrmSessionManagerTest, removeSession) { +    addSession(); + +    mDrmSessionManager->removeSession(mSessionId2); + +    const PidSessionInfosMap& map = sessionMap(); +    EXPECT_EQ(2u, map.size()); +    const SessionInfos& infos1 = map.valueFor(kTestPid1); +    const SessionInfos& infos2 = map.valueFor(kTestPid2); +    EXPECT_EQ(1u, infos1.size()); +    EXPECT_EQ(1u, infos2.size()); +    // mSessionId2 has been removed. +    ExpectEqSessionInfo(infos2[0], mTestDrm2, mSessionId3, 2); +} + +TEST_F(DrmSessionManagerTest, removeDrm) { +    addSession(); + +    sp<FakeDrm> drm = new FakeDrm; +    const uint8_t ids[] = {123}; +    Vector<uint8_t> sessionId; +    GetSessionId(ids, ARRAY_SIZE(ids), &sessionId); +    mDrmSessionManager->addSession(kTestPid2, drm, sessionId); + +    mDrmSessionManager->removeDrm(mTestDrm2); + +    const PidSessionInfosMap& map = sessionMap(); +    const SessionInfos& infos2 = map.valueFor(kTestPid2); +    EXPECT_EQ(1u, infos2.size()); +    // mTestDrm2 has been removed. +    ExpectEqSessionInfo(infos2[0], drm, sessionId, 3); +} + +TEST_F(DrmSessionManagerTest, reclaimSession) { +    EXPECT_FALSE(mDrmSessionManager->reclaimSession(kTestPid1)); +    addSession(); + +    // calling pid priority is too low +    EXPECT_FALSE(mDrmSessionManager->reclaimSession(50)); + +    EXPECT_TRUE(mDrmSessionManager->reclaimSession(10)); +    EXPECT_EQ(1u, mTestDrm1->reclaimedSessions().size()); +    EXPECT_TRUE(isEqualSessionId(mSessionId1, mTestDrm1->reclaimedSessions()[0])); + +    mDrmSessionManager->removeSession(mSessionId1); + +    // add a session from a higher priority process. +    sp<FakeDrm> drm = new FakeDrm; +    const uint8_t ids[] = {1, 3, 5}; +    Vector<uint8_t> sessionId; +    GetSessionId(ids, ARRAY_SIZE(ids), &sessionId); +    mDrmSessionManager->addSession(15, drm, sessionId); + +    EXPECT_TRUE(mDrmSessionManager->reclaimSession(18)); +    EXPECT_EQ(1u, mTestDrm2->reclaimedSessions().size()); +    // mSessionId2 is reclaimed. +    EXPECT_TRUE(isEqualSessionId(mSessionId2, mTestDrm2->reclaimedSessions()[0])); +} + +TEST_F(DrmSessionManagerTest, getLowestPriority) { +    testGetLowestPriority(); +} + +TEST_F(DrmSessionManagerTest, getLeastUsedSession_l) { +    testGetLeastUsedSession(); +} + +} // namespace android diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk index 9707c4a..1353f28 100644 --- a/media/libnbaio/Android.mk +++ b/media/libnbaio/Android.mk @@ -11,7 +11,6 @@ LOCAL_SRC_FILES := \      MonoPipeReader.cpp              \      Pipe.cpp                        \      PipeReader.cpp                  \ -    roundup.c                       \      SourceAudioBufferProvider.cpp  LOCAL_SRC_FILES += NBLog.cpp @@ -27,12 +26,13 @@ LOCAL_SRC_FILES += NBLog.cpp  LOCAL_MODULE := libnbaio  LOCAL_SHARED_LIBRARIES := \ +    libaudioutils \      libbinder \      libcommon_time_client \      libcutils \      libutils \      liblog -LOCAL_STATIC_LIBRARIES += libinstantssq +LOCAL_C_INCLUDES := $(call include-path-for, audio-utils)  include $(BUILD_SHARED_LIBRARY) diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp index 0b65861..129e9ef 100644 --- a/media/libnbaio/MonoPipe.cpp +++ b/media/libnbaio/MonoPipe.cpp @@ -27,7 +27,7 @@  #include <utils/Trace.h>  #include <media/AudioBufferProvider.h>  #include <media/nbaio/MonoPipe.h> -#include <media/nbaio/roundup.h> +#include <audio_utils/roundup.h>  namespace android { diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp index de82229..e4d3ed8 100644 --- a/media/libnbaio/MonoPipeReader.cpp +++ b/media/libnbaio/MonoPipeReader.cpp @@ -39,7 +39,7 @@ ssize_t MonoPipeReader::availableToRead()          return NEGOTIATE;      }      ssize_t ret = android_atomic_acquire_load(&mPipe->mRear) - mPipe->mFront; -    ALOG_ASSERT((0 <= ret) && (ret <= mMaxFrames)); +    ALOG_ASSERT((0 <= ret) && ((size_t) ret <= mPipe->mMaxFrames));      return ret;  } diff --git a/media/libnbaio/Pipe.cpp b/media/libnbaio/Pipe.cpp index 6e0ec8c..13f211d 100644 --- a/media/libnbaio/Pipe.cpp +++ b/media/libnbaio/Pipe.cpp @@ -21,7 +21,7 @@  #include <cutils/compiler.h>  #include <utils/Log.h>  #include <media/nbaio/Pipe.h> -#include <media/nbaio/roundup.h> +#include <audio_utils/roundup.h>  namespace android { diff --git a/media/libnbaio/roundup.c b/media/libnbaio/roundup.c deleted file mode 100644 index 1d552d1..0000000 --- a/media/libnbaio/roundup.c +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - *      http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <media/nbaio/roundup.h> - -unsigned roundup(unsigned v) -{ -    // __builtin_clz is undefined for zero input -    if (v == 0) { -        v = 1; -    } -    int lz = __builtin_clz((int) v); -    unsigned rounded = ((unsigned) 0x80000000) >> lz; -    // 0x800000001 and higher are actually rounded _down_ to prevent overflow -    if (v > rounded && lz > 0) { -        rounded <<= 1; -    } -    return rounded; -} diff --git a/media/libstagefright/AACExtractor.cpp b/media/libstagefright/AACExtractor.cpp index 196f6ee..45e8a30 100644 --- a/media/libstagefright/AACExtractor.cpp +++ b/media/libstagefright/AACExtractor.cpp @@ -360,7 +360,7 @@ bool SniffAAC(          pos += len;          ALOGV("skipped ID3 tag, new starting offset is %lld (0x%016llx)", -             pos, pos); +                (long long)pos, (long long)pos);      }      uint8_t header[2]; diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp index 2e41d80..9d90dbd 100644 --- a/media/libstagefright/AACWriter.cpp +++ b/media/libstagefright/AACWriter.cpp @@ -36,33 +36,19 @@  namespace android { -AACWriter::AACWriter(const char *filename) -    : mFd(-1), -      mInitCheck(NO_INIT), -      mStarted(false), -      mPaused(false), -      mResumed(false), -      mChannelCount(-1), -      mSampleRate(-1), -      mAACProfile(OMX_AUDIO_AACObjectLC) { - -    ALOGV("AACWriter Constructor"); - -    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); -    if (mFd >= 0) { -        mInitCheck = OK; -    } -} -  AACWriter::AACWriter(int fd)      : mFd(dup(fd)),        mInitCheck(mFd < 0? NO_INIT: OK),        mStarted(false),        mPaused(false),        mResumed(false), +      mThread(0), +      mEstimatedSizeBytes(0), +      mEstimatedDurationUs(0),        mChannelCount(-1),        mSampleRate(-1), -      mAACProfile(OMX_AUDIO_AACObjectLC) { +      mAACProfile(OMX_AUDIO_AACObjectLC), +      mFrameDurationUs(0) {  }  AACWriter::~AACWriter() { diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index d49594f..8d9bd21 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -24,6 +24,8 @@  #include <inttypes.h>  #include <utils/Trace.h> +#include <gui/Surface.h> +  #include <media/stagefright/ACodec.h>  #include <binder/MemoryDealer.h> @@ -35,18 +37,20 @@  #include <media/stagefright/foundation/AUtils.h>  #include <media/stagefright/BufferProducerWrapper.h> +#include <media/stagefright/MediaCodec.h>  #include <media/stagefright/MediaCodecList.h>  #include <media/stagefright/MediaDefs.h> -#include <media/stagefright/NativeWindowWrapper.h>  #include <media/stagefright/OMXClient.h>  #include <media/stagefright/OMXCodec.h> - +#include <media/stagefright/PersistentSurface.h> +#include <media/stagefright/SurfaceUtils.h>  #include <media/hardware/HardwareAPI.h>  #include <OMX_AudioExt.h>  #include <OMX_VideoExt.h>  #include <OMX_Component.h>  #include <OMX_IndexExt.h> +#include <OMX_AsString.h>  #include "include/avc_utils.h" @@ -103,6 +107,18 @@ static void InitOMXParams(T *params) {      params->nVersion.s.nStep = 0;  } +struct MessageList : public RefBase { +    MessageList() { +    } +    virtual ~MessageList() { +    } +    std::list<sp<AMessage> > &getList() { return mList; } +private: +    std::list<sp<AMessage> > mList; + +    DISALLOW_EVIL_CONSTRUCTORS(MessageList); +}; +  struct CodecObserver : public BnOMXObserver {      CodecObserver() {} @@ -111,52 +127,78 @@ struct CodecObserver : public BnOMXObserver {      }      // from IOMXObserver -    virtual void onMessage(const omx_message &omx_msg) { -        sp<AMessage> msg = mNotify->dup(); - -        msg->setInt32("type", omx_msg.type); -        msg->setInt32("node", omx_msg.node); - -        switch (omx_msg.type) { -            case omx_message::EVENT: -            { -                msg->setInt32("event", omx_msg.u.event_data.event); -                msg->setInt32("data1", omx_msg.u.event_data.data1); -                msg->setInt32("data2", omx_msg.u.event_data.data2); -                break; -            } +    virtual void onMessages(const std::list<omx_message> &messages) { +        if (messages.empty()) { +            return; +        } -            case omx_message::EMPTY_BUFFER_DONE: -            { -                msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); -                break; +        sp<AMessage> notify = mNotify->dup(); +        bool first = true; +        sp<MessageList> msgList = new MessageList(); +        for (std::list<omx_message>::const_iterator it = messages.cbegin(); +              it != messages.cend(); ++it) { +            const omx_message &omx_msg = *it; +            if (first) { +                notify->setInt32("node", omx_msg.node); +                first = false;              } -            case omx_message::FILL_BUFFER_DONE: -            { -                msg->setInt32( -                        "buffer", omx_msg.u.extended_buffer_data.buffer); -                msg->setInt32( -                        "range_offset", -                        omx_msg.u.extended_buffer_data.range_offset); -                msg->setInt32( -                        "range_length", -                        omx_msg.u.extended_buffer_data.range_length); -                msg->setInt32( -                        "flags", -                        omx_msg.u.extended_buffer_data.flags); -                msg->setInt64( -                        "timestamp", -                        omx_msg.u.extended_buffer_data.timestamp); -                break; -            } +            sp<AMessage> msg = new AMessage; +            msg->setInt32("type", omx_msg.type); +            switch (omx_msg.type) { +                case omx_message::EVENT: +                { +                    msg->setInt32("event", omx_msg.u.event_data.event); +                    msg->setInt32("data1", omx_msg.u.event_data.data1); +                    msg->setInt32("data2", omx_msg.u.event_data.data2); +                    break; +                } -            default: -                TRESPASS(); -                break; -        } +                case omx_message::EMPTY_BUFFER_DONE: +                { +                    msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); +                    msg->setInt32("fence_fd", omx_msg.fenceFd); +                    break; +                } -        msg->post(); +                case omx_message::FILL_BUFFER_DONE: +                { +                    msg->setInt32( +                            "buffer", omx_msg.u.extended_buffer_data.buffer); +                    msg->setInt32( +                            "range_offset", +                            omx_msg.u.extended_buffer_data.range_offset); +                    msg->setInt32( +                            "range_length", +                            omx_msg.u.extended_buffer_data.range_length); +                    msg->setInt32( +                            "flags", +                            omx_msg.u.extended_buffer_data.flags); +                    msg->setInt64( +                            "timestamp", +                            omx_msg.u.extended_buffer_data.timestamp); +                    msg->setInt32( +                            "fence_fd", omx_msg.fenceFd); +                    break; +                } + +                case omx_message::FRAME_RENDERED: +                { +                    msg->setInt64( +                            "media_time_us", omx_msg.u.render_data.timestamp); +                    msg->setInt64( +                            "system_nano", omx_msg.u.render_data.nanoTime); +                    break; +                } + +                default: +                    ALOGE("Unrecognized message type: %d", omx_msg.type); +                    break; +            } +            msgList->getList().push_back(msg); +        } +        notify->setObject("messages", msgList); +        notify->post();      }  protected: @@ -194,15 +236,25 @@ protected:      void postFillThisBuffer(BufferInfo *info);  private: +    // Handles an OMX message. Returns true iff message was handled.      bool onOMXMessage(const sp<AMessage> &msg); -    bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID); +    // Handles a list of messages. Returns true iff messages were handled. +    bool onOMXMessageList(const sp<AMessage> &msg); + +    // returns true iff this message is for this component and the component is alive +    bool checkOMXMessage(const sp<AMessage> &msg); + +    bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd);      bool onOMXFillBufferDone(              IOMX::buffer_id bufferID,              size_t rangeOffset, size_t rangeLength,              OMX_U32 flags, -            int64_t timeUs); +            int64_t timeUs, +            int fenceFd); + +    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);      void getMoreInputDataIfPossible(); @@ -259,9 +311,12 @@ private:      bool onConfigureComponent(const sp<AMessage> &msg);      void onCreateInputSurface(const sp<AMessage> &msg); +    void onSetInputSurface(const sp<AMessage> &msg);      void onStart();      void onShutdown(bool keepComponentAllocated); +    status_t setupInputSurface(); +      DISALLOW_EVIL_CONSTRUCTORS(LoadedState);  }; @@ -317,6 +372,7 @@ protected:      virtual void stateEntered();      virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); +    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);  private:      bool mActive; @@ -335,6 +391,7 @@ protected:      virtual void stateEntered();      virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); +    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);  private:      DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); @@ -401,12 +458,45 @@ private:  //////////////////////////////////////////////////////////////////////////////// +void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { +    if (mFenceFd >= 0) { +        ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", +                mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); +    } +    mFenceFd = fenceFd; +    mIsReadFence = false; +} + +void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { +    if (mFenceFd >= 0) { +        ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", +                mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); +    } +    mFenceFd = fenceFd; +    mIsReadFence = true; +} + +void ACodec::BufferInfo::checkWriteFence(const char *dbg) { +    if (mFenceFd >= 0 && mIsReadFence) { +        ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); +    } +} + +void ACodec::BufferInfo::checkReadFence(const char *dbg) { +    if (mFenceFd >= 0 && !mIsReadFence) { +        ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); +    } +} + +//////////////////////////////////////////////////////////////////////////////// +  ACodec::ACodec()      : mQuirks(0),        mNode(0), +      mNativeWindowUsageBits(0),        mSentFormat(false), +      mIsVideo(false),        mIsEncoder(false), -      mUseMetadataOnEncoderOutput(false),        mFatalError(false),        mShutdownInProgress(false),        mExplicitShutdown(false), @@ -416,10 +506,13 @@ ACodec::ACodec()        mChannelMaskPresent(false),        mChannelMask(0),        mDequeueCounter(0), -      mStoreMetaDataInOutputBuffers(false), -      mMetaDataBuffersToSubmit(0), +      mInputMetadataType(kMetadataBufferTypeInvalid), +      mOutputMetadataType(kMetadataBufferTypeInvalid), +      mLegacyAdaptiveExperiment(false), +      mMetadataBuffersToSubmit(0),        mRepeatFrameDelayUs(-1ll),        mMaxPtsGapUs(-1ll), +      mMaxFps(-1),        mTimePerFrameUs(-1ll),        mTimePerCaptureUs(-1ll),        mCreateInputBuffersSuspended(false), @@ -452,61 +545,81 @@ void ACodec::setNotificationMessage(const sp<AMessage> &msg) {  void ACodec::initiateSetup(const sp<AMessage> &msg) {      msg->setWhat(kWhatSetup); -    msg->setTarget(id()); +    msg->setTarget(this);      msg->post();  }  void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { -    sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); +    sp<AMessage> msg = new AMessage(kWhatSetParameters, this);      msg->setMessage("params", params);      msg->post();  }  void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {      msg->setWhat(kWhatAllocateComponent); -    msg->setTarget(id()); +    msg->setTarget(this);      msg->post();  }  void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {      msg->setWhat(kWhatConfigureComponent); -    msg->setTarget(id()); +    msg->setTarget(this);      msg->post();  } +status_t ACodec::setSurface(const sp<Surface> &surface) { +    sp<AMessage> msg = new AMessage(kWhatSetSurface, this); +    msg->setObject("surface", surface); + +    sp<AMessage> response; +    status_t err = msg->postAndAwaitResponse(&response); + +    if (err == OK) { +        (void)response->findInt32("err", &err); +    } +    return err; +} +  void ACodec::initiateCreateInputSurface() { -    (new AMessage(kWhatCreateInputSurface, id()))->post(); +    (new AMessage(kWhatCreateInputSurface, this))->post(); +} + +void ACodec::initiateSetInputSurface( +        const sp<PersistentSurface> &surface) { +    sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); +    msg->setObject("input-surface", surface); +    msg->post();  }  void ACodec::signalEndOfInputStream() { -    (new AMessage(kWhatSignalEndOfInputStream, id()))->post(); +    (new AMessage(kWhatSignalEndOfInputStream, this))->post();  }  void ACodec::initiateStart() { -    (new AMessage(kWhatStart, id()))->post(); +    (new AMessage(kWhatStart, this))->post();  }  void ACodec::signalFlush() {      ALOGV("[%s] signalFlush", mComponentName.c_str()); -    (new AMessage(kWhatFlush, id()))->post(); +    (new AMessage(kWhatFlush, this))->post();  }  void ACodec::signalResume() { -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();  }  void ACodec::initiateShutdown(bool keepComponentAllocated) { -    sp<AMessage> msg = new AMessage(kWhatShutdown, id()); +    sp<AMessage> msg = new AMessage(kWhatShutdown, this);      msg->setInt32("keepComponentAllocated", keepComponentAllocated);      msg->post();      if (!keepComponentAllocated) {          // ensure shutdown completes in 3 seconds -        (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000); +        (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000);      }  }  void ACodec::signalRequestIDRFrame() { -    (new AMessage(kWhatRequestIDRFrame, id()))->post(); +    (new AMessage(kWhatRequestIDRFrame, this))->post();  }  // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** @@ -514,11 +627,138 @@ void ACodec::signalRequestIDRFrame() {  // This causes a halt if we already signaled an EOS on the input  // port.  For now keep submitting an output buffer if there was an  // EOS on the input port, but not yet on the output port. -void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() { +void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() {      if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && -            mMetaDataBuffersToSubmit > 0) { -        (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post(); +            mMetadataBuffersToSubmit > 0) { +        (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); +    } +} + +status_t ACodec::handleSetSurface(const sp<Surface> &surface) { +    // allow keeping unset surface +    if (surface == NULL) { +        if (mNativeWindow != NULL) { +            ALOGW("cannot unset a surface"); +            return INVALID_OPERATION; +        } +        return OK; +    } + +    // cannot switch from bytebuffers to surface +    if (mNativeWindow == NULL) { +        ALOGW("component was not configured with a surface"); +        return INVALID_OPERATION; +    } + +    ANativeWindow *nativeWindow = surface.get(); +    // if we have not yet started the codec, we can simply set the native window +    if (mBuffers[kPortIndexInput].size() == 0) { +        mNativeWindow = surface; +        return OK; +    } + +    // we do not support changing a tunneled surface after start +    if (mTunneled) { +        ALOGW("cannot change tunneled surface"); +        return INVALID_OPERATION; +    } + +    int usageBits = 0; +    status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); +    if (err != OK) { +        return err; +    } + +    int ignoredFlags = kVideoGrallocUsage; +    // New output surface is not allowed to add new usage flag except ignored ones. +    if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { +        ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); +        return BAD_VALUE; +    } + +    // get min undequeued count. We cannot switch to a surface that has a higher +    // undequeued count than we allocated. +    int minUndequeuedBuffers = 0; +    err = nativeWindow->query( +            nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, +            &minUndequeuedBuffers); +    if (err != 0) { +        ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", +                strerror(-err), -err); +        return err; +    } +    if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { +        ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", +                minUndequeuedBuffers, mNumUndequeuedBuffers); +        return BAD_VALUE;      } + +    // we cannot change the number of output buffers while OMX is running +    // set up surface to the same count +    Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; +    ALOGV("setting up surface for %zu buffers", buffers.size()); + +    err = native_window_set_buffer_count(nativeWindow, buffers.size()); +    if (err != 0) { +        ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), +                -err); +        return err; +    } + +    // need to enable allocation when attaching +    surface->getIGraphicBufferProducer()->allowAllocation(true); + +    // for meta data mode, we move dequeud buffers to the new surface. +    // for non-meta mode, we must move all registered buffers +    for (size_t i = 0; i < buffers.size(); ++i) { +        const BufferInfo &info = buffers[i]; +        // skip undequeued buffers for meta data mode +        if (storingMetadataInDecodedBuffers() +                && !mLegacyAdaptiveExperiment +                && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { +            ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer()); +            continue; +        } +        ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); + +        err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); +        if (err != OK) { +            ALOGE("failed to attach buffer %p to the new surface: %s (%d)", +                    info.mGraphicBuffer->getNativeBuffer(), +                    strerror(-err), -err); +            return err; +        } +    } + +    // cancel undequeued buffers to new surface +    if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { +        for (size_t i = 0; i < buffers.size(); ++i) { +            BufferInfo &info = buffers.editItemAt(i); +            if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { +                ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); +                err = nativeWindow->cancelBuffer( +                        nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); +                info.mFenceFd = -1; +                if (err != OK) { +                    ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", +                            info.mGraphicBuffer->getNativeBuffer(), +                            strerror(-err), -err); +                    return err; +                } +            } +        } +        // disallow further allocation +        (void)surface->getIGraphicBufferProducer()->allowAllocation(false); +    } + +    // push blank buffers to previous window if requested +    if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { +        pushBlankBuffersToNativeWindow(mNativeWindow.get()); +    } + +    mNativeWindow = nativeWindow; +    mNativeWindowUsageBits = usageBits; +    return OK;  }  status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { @@ -529,8 +769,8 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {      status_t err;      if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { -        if (mStoreMetaDataInOutputBuffers) { -            err = allocateOutputMetaDataBuffers(); +        if (storingMetadataInDecodedBuffers()) { +            err = allocateOutputMetadataBuffers();          } else {              err = allocateOutputBuffersFromNativeWindow();          } @@ -543,22 +783,44 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {                  mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));          if (err == OK) { -            ALOGV("[%s] Allocating %u buffers of size %u on %s port", +            MetadataBufferType type = +                portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; +            int32_t bufSize = def.nBufferSize; +            if (type == kMetadataBufferTypeGrallocSource) { +                bufSize = sizeof(VideoGrallocMetadata); +            } else if (type == kMetadataBufferTypeANWBuffer) { +                bufSize = sizeof(VideoNativeMetadata); +            } + +            // If using gralloc or native source input metadata buffers, allocate largest +            // metadata size as we prefer to generate native source metadata, but component +            // may require gralloc source. For camera source, allocate at least enough +            // size for native metadata buffers. +            int32_t allottedSize = bufSize; +            if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { +                bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); +            } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { +                bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata)); +            } + +            ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port",                      mComponentName.c_str(), -                    def.nBufferCountActual, def.nBufferSize, +                    def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type),                      portIndex == kPortIndexInput ? "input" : "output"); -            size_t totalSize = def.nBufferCountActual * def.nBufferSize; +            size_t totalSize = def.nBufferCountActual * bufSize;              mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); -            for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) { -                sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize); +            for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { +                sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize);                  if (mem == NULL || mem->pointer() == NULL) {                      return NO_MEMORY;                  }                  BufferInfo info;                  info.mStatus = BufferInfo::OWNED_BY_US; +                info.mFenceFd = -1; +                info.mRenderInfo = NULL;                  uint32_t requiresAllocateBufferBit =                      (portIndex == kPortIndexInput) @@ -566,27 +828,27 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {                          : OMXCodec::kRequiresAllocateBufferOnOutputPorts;                  if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) -                        || mUseMetadataOnEncoderOutput) { +                        || (portIndex == kPortIndexOutput && usingMetadataOnEncoderOutput())) {                      mem.clear();                      void *ptr;                      err = mOMX->allocateBuffer( -                            mNode, portIndex, def.nBufferSize, &info.mBufferID, +                            mNode, portIndex, bufSize, &info.mBufferID,                              &ptr); -                    int32_t bufSize = mUseMetadataOnEncoderOutput ? -                            (4 + sizeof(buffer_handle_t)) : def.nBufferSize; -                      info.mData = new ABuffer(ptr, bufSize);                  } else if (mQuirks & requiresAllocateBufferBit) {                      err = mOMX->allocateBufferWithBackup( -                            mNode, portIndex, mem, &info.mBufferID); +                            mNode, portIndex, mem, &info.mBufferID, allottedSize);                  } else { -                    err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID); +                    err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize);                  }                  if (mem != NULL) { -                    info.mData = new ABuffer(mem->pointer(), def.nBufferSize); +                    info.mData = new ABuffer(mem->pointer(), bufSize); +                    if (type == kMetadataBufferTypeANWBuffer) { +                        ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; +                    }                  }                  mBuffers[portIndex].push(info); @@ -617,9 +879,8 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {      return OK;  } -status_t ACodec::configureOutputBuffersFromNativeWindow( -        OMX_U32 *bufferCount, OMX_U32 *bufferSize, -        OMX_U32 *minUndequeuedBuffers) { +status_t ACodec::setupNativeWindowSizeFormatAndUsage( +        ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) {      OMX_PARAM_PORTDEFINITIONTYPE def;      InitOMXParams(&def);      def.nPortIndex = kPortIndexOutput; @@ -631,40 +892,6 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(          return err;      } -    err = native_window_set_buffers_geometry( -            mNativeWindow.get(), -            def.format.video.nFrameWidth, -            def.format.video.nFrameHeight, -            def.format.video.eColorFormat); - -    if (err != 0) { -        ALOGE("native_window_set_buffers_geometry failed: %s (%d)", -                strerror(-err), -err); -        return err; -    } - -    if (mRotationDegrees != 0) { -        uint32_t transform = 0; -        switch (mRotationDegrees) { -            case 0: transform = 0; break; -            case 90: transform = HAL_TRANSFORM_ROT_90; break; -            case 180: transform = HAL_TRANSFORM_ROT_180; break; -            case 270: transform = HAL_TRANSFORM_ROT_270; break; -            default: transform = 0; break; -        } - -        if (transform > 0) { -            err = native_window_set_buffers_transform( -                    mNativeWindow.get(), transform); -            if (err != 0) { -                ALOGE("native_window_set_buffers_transform failed: %s (%d)", -                        strerror(-err), -err); -                return err; -            } -        } -    } - -    // Set up the native window.      OMX_U32 usage = 0;      err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);      if (err != 0) { @@ -678,43 +905,34 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(          usage |= GRALLOC_USAGE_PROTECTED;      } -    // Make sure to check whether either Stagefright or the video decoder -    // requested protected buffers. -    if (usage & GRALLOC_USAGE_PROTECTED) { -        // Verify that the ANativeWindow sends images directly to -        // SurfaceFlinger. -        int queuesToNativeWindow = 0; -        err = mNativeWindow->query( -                mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, -                &queuesToNativeWindow); -        if (err != 0) { -            ALOGE("error authenticating native window: %d", err); -            return err; -        } -        if (queuesToNativeWindow != 1) { -            ALOGE("native window could not be authenticated"); -            return PERMISSION_DENIED; -        } -    } +    usage |= kVideoGrallocUsage; +    *finalUsage = usage; -    int consumerUsage = 0; -    err = mNativeWindow->query( -            mNativeWindow.get(), NATIVE_WINDOW_CONSUMER_USAGE_BITS, -            &consumerUsage); -    if (err != 0) { -        ALOGW("failed to get consumer usage bits. ignoring"); -        err = 0; -    } +    ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); +    return setNativeWindowSizeFormatAndUsage( +            nativeWindow, +            def.format.video.nFrameWidth, +            def.format.video.nFrameHeight, +            def.format.video.eColorFormat, +            mRotationDegrees, +            usage); +} -    ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec) + %#x(Consumer) = %#x", -            omxUsage, usage, consumerUsage, usage | consumerUsage); -    usage |= consumerUsage; -    err = native_window_set_usage( -            mNativeWindow.get(), -            usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); +status_t ACodec::configureOutputBuffersFromNativeWindow( +        OMX_U32 *bufferCount, OMX_U32 *bufferSize, +        OMX_U32 *minUndequeuedBuffers) { +    OMX_PARAM_PORTDEFINITIONTYPE def; +    InitOMXParams(&def); +    def.nPortIndex = kPortIndexOutput; -    if (err != 0) { -        ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); +    status_t err = mOMX->getParameter( +            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + +    if (err == OK) { +        err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); +    } +    if (err != OK) { +        mNativeWindowUsageBits = 0;          return err;      } @@ -798,6 +1016,11 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {          return err;      mNumUndequeuedBuffers = minUndequeuedBuffers; +    if (!storingMetadataInDecodedBuffers()) { +        static_cast<Surface*>(mNativeWindow.get()) +                ->getIGraphicBufferProducer()->allowAllocation(true); +    } +      ALOGV("[%s] Allocating %u buffers from a native window of size %u on "           "output port",           mComponentName.c_str(), bufferCount, bufferSize); @@ -805,7 +1028,8 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {      // Dequeue buffers and send them to OMX      for (OMX_U32 i = 0; i < bufferCount; i++) {          ANativeWindowBuffer *buf; -        err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf); +        int fenceFd; +        err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);          if (err != 0) {              ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);              break; @@ -814,6 +1038,9 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {          sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));          BufferInfo info;          info.mStatus = BufferInfo::OWNED_BY_US; +        info.mFenceFd = fenceFd; +        info.mIsReadFence = false; +        info.mRenderInfo = NULL;          info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);          info.mGraphicBuffer = graphicBuffer;          mBuffers[kPortIndexOutput].push(info); @@ -850,16 +1077,23 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {      for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {          BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); -        status_t error = cancelBufferToNativeWindow(info); -        if (err == 0) { -            err = error; +        if (info->mStatus == BufferInfo::OWNED_BY_US) { +            status_t error = cancelBufferToNativeWindow(info); +            if (err == 0) { +                err = error; +            }          }      } +    if (!storingMetadataInDecodedBuffers()) { +        static_cast<Surface*>(mNativeWindow.get()) +                ->getIGraphicBufferProducer()->allowAllocation(false); +    } +      return err;  } -status_t ACodec::allocateOutputMetaDataBuffers() { +status_t ACodec::allocateOutputMetadataBuffers() {      OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;      status_t err = configureOutputBuffersFromNativeWindow(              &bufferCount, &bufferSize, &minUndequeuedBuffers); @@ -870,26 +1104,32 @@ status_t ACodec::allocateOutputMetaDataBuffers() {      ALOGV("[%s] Allocating %u meta buffers on output port",           mComponentName.c_str(), bufferCount); -    size_t totalSize = bufferCount * 8; +    size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? +            sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); +    size_t totalSize = bufferCount * bufSize;      mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec");      // Dequeue buffers and send them to OMX      for (OMX_U32 i = 0; i < bufferCount; i++) {          BufferInfo info;          info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; +        info.mFenceFd = -1; +        info.mRenderInfo = NULL;          info.mGraphicBuffer = NULL;          info.mDequeuedAt = mDequeueCounter; -        sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate( -                sizeof(struct VideoDecoderOutputMetaData)); +        sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize);          if (mem == NULL || mem->pointer() == NULL) {              return NO_MEMORY;          } +        if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { +            ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; +        }          info.mData = new ABuffer(mem->pointer(), mem->size());          // we use useBuffer for metadata regardless of quirks          err = mOMX->useBuffer( -                mNode, kPortIndexOutput, mem, &info.mBufferID); +                mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size());          mBuffers[kPortIndexOutput].push(info); @@ -897,28 +1137,111 @@ status_t ACodec::allocateOutputMetaDataBuffers() {               mComponentName.c_str(), info.mBufferID, mem->pointer());      } -    mMetaDataBuffersToSubmit = bufferCount - minUndequeuedBuffers; +    if (mLegacyAdaptiveExperiment) { +        // preallocate and preregister buffers +        static_cast<Surface *>(mNativeWindow.get()) +                ->getIGraphicBufferProducer()->allowAllocation(true); + +        ALOGV("[%s] Allocating %u buffers from a native window of size %u on " +             "output port", +             mComponentName.c_str(), bufferCount, bufferSize); + +        // Dequeue buffers then cancel them all +        for (OMX_U32 i = 0; i < bufferCount; i++) { +            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); + +            ANativeWindowBuffer *buf; +            int fenceFd; +            err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); +            if (err != 0) { +                ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); +                break; +            } + +            sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); +            mOMX->updateGraphicBufferInMeta( +                    mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); +            info->mStatus = BufferInfo::OWNED_BY_US; +            info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); +            info->mGraphicBuffer = graphicBuffer; +        } + +        for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { +            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); +            if (info->mStatus == BufferInfo::OWNED_BY_US) { +                status_t error = cancelBufferToNativeWindow(info); +                if (err == OK) { +                    err = error; +                } +            } +        } + +        static_cast<Surface*>(mNativeWindow.get()) +                ->getIGraphicBufferProducer()->allowAllocation(false); +    } + +    mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;      return err;  } -status_t ACodec::submitOutputMetaDataBuffer() { -    CHECK(mStoreMetaDataInOutputBuffers); -    if (mMetaDataBuffersToSubmit == 0) +status_t ACodec::submitOutputMetadataBuffer() { +    CHECK(storingMetadataInDecodedBuffers()); +    if (mMetadataBuffersToSubmit == 0)          return OK;      BufferInfo *info = dequeueBufferFromNativeWindow(); -    if (info == NULL) +    if (info == NULL) {          return ERROR_IO; +    }      ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",            mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); -    --mMetaDataBuffersToSubmit; -    CHECK_EQ(mOMX->fillBuffer(mNode, info->mBufferID), -             (status_t)OK); +    --mMetadataBuffersToSubmit; +    info->checkWriteFence("submitOutputMetadataBuffer"); +    status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); +    info->mFenceFd = -1; +    if (err == OK) { +        info->mStatus = BufferInfo::OWNED_BY_COMPONENT; +    } -    info->mStatus = BufferInfo::OWNED_BY_COMPONENT; -    return OK; +    return err; +} + +status_t ACodec::waitForFence(int fd, const char *dbg ) { +    status_t res = OK; +    if (fd >= 0) { +        sp<Fence> fence = new Fence(fd); +        res = fence->wait(IOMX::kFenceTimeoutMs); +        ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); +    } +    return res; +} + +// static +const char *ACodec::_asString(BufferInfo::Status s) { +    switch (s) { +        case BufferInfo::OWNED_BY_US:            return "OUR"; +        case BufferInfo::OWNED_BY_COMPONENT:     return "COMPONENT"; +        case BufferInfo::OWNED_BY_UPSTREAM:      return "UPSTREAM"; +        case BufferInfo::OWNED_BY_DOWNSTREAM:    return "DOWNSTREAM"; +        case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; +        case BufferInfo::UNRECOGNIZED:           return "UNRECOGNIZED"; +        default:                                 return "?"; +    } +} + +void ACodec::dumpBuffers(OMX_U32 portIndex) { +    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); +    ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), +            portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); +    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { +        const BufferInfo &info = mBuffers[portIndex][i]; +        ALOGI("  slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", +                i, info.mBufferID, info.mGraphicBuffer.get(), +                info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), +                _asString(info.mStatus), info.mStatus, info.mDequeuedAt); +    }  }  status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { @@ -927,17 +1250,59 @@ status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {      ALOGV("[%s] Calling cancelBuffer on buffer %u",           mComponentName.c_str(), info->mBufferID); +    info->checkWriteFence("cancelBufferToNativeWindow");      int err = mNativeWindow->cancelBuffer( -        mNativeWindow.get(), info->mGraphicBuffer.get(), -1); +        mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); +    info->mFenceFd = -1;      ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",              mComponentName.c_str(), info->mBufferID); - +    // change ownership even if cancelBuffer fails      info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;      return err;  } +void ACodec::updateRenderInfoForDequeuedBuffer( +        ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { + +    info->mRenderInfo = +        mRenderTracker.updateInfoForDequeuedBuffer( +                buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); + +    // check for any fences already signaled +    notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); +} + +void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { +    if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { +        mRenderTracker.dumpRenderQueue(); +    } +} + +void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { +    sp<AMessage> msg = mNotify->dup(); +    msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); +    std::list<FrameRenderTracker::Info> done = +        mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); + +    // unlink untracked frames +    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); +            it != done.cend(); ++it) { +        ssize_t index = it->getIndex(); +        if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { +            mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; +        } else if (index >= 0) { +            // THIS SHOULD NEVER HAPPEN +            ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); +        } +    } + +    if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { +        msg->post(); +    } +} +  ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {      ANativeWindowBuffer *buf;      CHECK(mNativeWindow.get() != NULL); @@ -953,26 +1318,61 @@ ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {          return NULL;      } -    if (native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf) != 0) { -        ALOGE("dequeueBuffer failed."); -        return NULL; -    } +    int fenceFd = -1; +    do { +        status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); +        if (err != 0) { +            ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); +            return NULL; +        } + +        bool stale = false; +        for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { +            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); + +            if (info->mGraphicBuffer != NULL && +                    info->mGraphicBuffer->handle == buf->handle) { +                // Since consumers can attach buffers to BufferQueues, it is possible +                // that a known yet stale buffer can return from a surface that we +                // once used.  We can simply ignore this as we have already dequeued +                // this buffer properly.  NOTE: this does not eliminate all cases, +                // e.g. it is possible that we have queued the valid buffer to the +                // NW, and a stale copy of the same buffer gets dequeued - which will +                // be treated as the valid buffer by ACodec. +                if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { +                    ALOGI("dequeued stale buffer %p. discarding", buf); +                    stale = true; +                    break; +                } + +                ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); +                info->mStatus = BufferInfo::OWNED_BY_US; +                info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); +                updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); +                return info; +            } +        } +        // It is also possible to receive a previously unregistered buffer +        // in non-meta mode. These should be treated as stale buffers. The +        // same is possible in meta mode, in which case, it will be treated +        // as a normal buffer, which is not desirable. +        // TODO: fix this. +        if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { +            ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); +            stale = true; +        } +        if (stale) { +            // TODO: detach stale buffer, but there is no API yet to do it. +            buf = NULL; +        } +    } while (buf == NULL); + +    // get oldest undequeued buffer      BufferInfo *oldest = NULL;      for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) {          BufferInfo *info =              &mBuffers[kPortIndexOutput].editItemAt(i); - -        if (info->mGraphicBuffer != NULL && -            info->mGraphicBuffer->handle == buf->handle) { -            CHECK_EQ((int)info->mStatus, -                     (int)BufferInfo::OWNED_BY_NATIVE_WINDOW); - -            info->mStatus = BufferInfo::OWNED_BY_US; - -            return info; -        } -          if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&              (oldest == NULL ||               // avoid potential issues from counter rolling over @@ -982,48 +1382,64 @@ ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {          }      } -    if (oldest) { -        CHECK(mStoreMetaDataInOutputBuffers); +    // it is impossible dequeue a buffer when there are no buffers with ANW +    CHECK(oldest != NULL); +    // it is impossible to dequeue an unknown buffer in non-meta mode, as the +    // while loop above does not complete +    CHECK(storingMetadataInDecodedBuffers()); -        // discard buffer in LRU info and replace with new buffer -        oldest->mGraphicBuffer = new GraphicBuffer(buf, false); -        oldest->mStatus = BufferInfo::OWNED_BY_US; +    // discard buffer in LRU info and replace with new buffer +    oldest->mGraphicBuffer = new GraphicBuffer(buf, false); +    oldest->mStatus = BufferInfo::OWNED_BY_US; +    oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); +    mRenderTracker.untrackFrame(oldest->mRenderInfo); +    oldest->mRenderInfo = NULL; -        mOMX->updateGraphicBufferInMeta( -                mNode, kPortIndexOutput, oldest->mGraphicBuffer, -                oldest->mBufferID); - -        VideoDecoderOutputMetaData *metaData = -            reinterpret_cast<VideoDecoderOutputMetaData *>( -                    oldest->mData->base()); -        CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource); +    mOMX->updateGraphicBufferInMeta( +            mNode, kPortIndexOutput, oldest->mGraphicBuffer, +            oldest->mBufferID); +    if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { +        VideoGrallocMetadata *grallocMeta = +            reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base());          ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", -                oldest - &mBuffers[kPortIndexOutput][0], +                (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),                  mDequeueCounter - oldest->mDequeuedAt, -                metaData->pHandle, +                (void *)(uintptr_t)grallocMeta->pHandle,                  oldest->mGraphicBuffer->handle, oldest->mData->base()); - -        return oldest; +    } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { +        VideoNativeMetadata *nativeMeta = +            reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); +        ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", +                (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), +                mDequeueCounter - oldest->mDequeuedAt, +                (void *)(uintptr_t)nativeMeta->pBuffer, +                oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());      } -    TRESPASS(); - -    return NULL; +    updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); +    return oldest;  }  status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { -    for (size_t i = mBuffers[portIndex].size(); i-- > 0;) { -        CHECK_EQ((status_t)OK, freeBuffer(portIndex, i)); +    status_t err = OK; +    for (size_t i = mBuffers[portIndex].size(); i > 0;) { +        i--; +        status_t err2 = freeBuffer(portIndex, i); +        if (err == OK) { +            err = err2; +        }      } +    // clear mDealer even on an error      mDealer[portIndex].clear(); - -    return OK; +    return err;  }  status_t ACodec::freeOutputBuffersNotOwnedByComponent() { -    for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { +    status_t err = OK; +    for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { +        i--;          BufferInfo *info =              &mBuffers[kPortIndexOutput].editItemAt(i); @@ -1031,36 +1447,65 @@ status_t ACodec::freeOutputBuffersNotOwnedByComponent() {          // or being drained.          if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&              info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { -            CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); +            status_t err2 = freeBuffer(kPortIndexOutput, i); +            if (err == OK) { +                err = err2; +            }          }      } -    return OK; +    return err;  }  status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {      BufferInfo *info = &mBuffers[portIndex].editItemAt(i); +    status_t err = OK; + +    // there should not be any fences in the metadata +    MetadataBufferType type = +        portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; +    if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL +            && info->mData->size() >= sizeof(VideoNativeMetadata)) { +        int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; +        if (fenceFd >= 0) { +            ALOGW("unreleased fence (%d) in %s metadata buffer %zu", +                    fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); +        } +    } + +    switch (info->mStatus) { +        case BufferInfo::OWNED_BY_US: +            if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { +                (void)cancelBufferToNativeWindow(info); +            } +            // fall through -    CHECK(info->mStatus == BufferInfo::OWNED_BY_US -            || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); +        case BufferInfo::OWNED_BY_NATIVE_WINDOW: +            err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); +            break; -    if (portIndex == kPortIndexOutput && mNativeWindow != NULL -            && info->mStatus == BufferInfo::OWNED_BY_US) { -        cancelBufferToNativeWindow(info); +        default: +            ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); +            err = FAILED_TRANSACTION; +            break;      } -    CHECK_EQ(mOMX->freeBuffer( -                mNode, portIndex, info->mBufferID), -             (status_t)OK); +    if (info->mFenceFd >= 0) { +        ::close(info->mFenceFd); +    } -    mBuffers[portIndex].removeAt(i); +    if (portIndex == kPortIndexOutput) { +        mRenderTracker.untrackFrame(info->mRenderInfo, i); +        info->mRenderInfo = NULL; +    } -    return OK; +    // remove buffer even if mOMX->freeBuffer fails +    mBuffers[portIndex].removeAt(i); +    return err;  }  ACodec::BufferInfo *ACodec::findBufferByID( -        uint32_t portIndex, IOMX::buffer_id bufferID, -        ssize_t *index) { +        uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {      for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {          BufferInfo *info = &mBuffers[portIndex].editItemAt(i); @@ -1072,8 +1517,7 @@ ACodec::BufferInfo *ACodec::findBufferByID(          }      } -    TRESPASS(); - +    ALOGE("Could not find buffer with ID %u", bufferID);      return NULL;  } @@ -1186,6 +1630,9 @@ status_t ACodec::configureCodec(      mIsEncoder = encoder; +    mInputMetadataType = kMetadataBufferTypeInvalid; +    mOutputMetadataType = kMetadataBufferTypeInvalid; +      status_t err = setComponentRole(encoder /* isEncoder */, mime);      if (err != OK) { @@ -1203,15 +1650,27 @@ status_t ACodec::configureCodec(      if (encoder              && msg->findInt32("store-metadata-in-buffers", &storeMeta)              && storeMeta != 0) { -        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE); - +        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);          if (err != OK) { -              ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", +            ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",                      mComponentName.c_str(), err); -              return err; -          } -      } +            return err; +        } +        // For this specific case we could be using camera source even if storeMetaDataInBuffers +        // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. +        if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { +            mInputMetadataType = kMetadataBufferTypeCameraSource; +        } + +        uint32_t usageBits; +        if (mOMX->getParameter( +                mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, +                &usageBits, sizeof(usageBits)) == OK) { +            inputFormat->setInt32( +                    "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); +        } +    }      int32_t prependSPSPPS = 0;      if (encoder @@ -1244,19 +1703,16 @@ status_t ACodec::configureCodec(      // sps/pps to idr frames, since in metadata mode the bitstream is in an      // opaque handle, to which we don't have access.      int32_t video = !strncasecmp(mime, "video/", 6); +    mIsVideo = video;      if (encoder && video) {          OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS              && msg->findInt32("store-metadata-in-buffers-output", &storeMeta)              && storeMeta != 0); -        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable); - +        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);          if (err != OK) {              ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",                  mComponentName.c_str(), err); -            mUseMetadataOnEncoderOutput = 0; -        } else { -            mUseMetadataOnEncoderOutput = enable;          }          if (!msg->findInt64( @@ -1269,6 +1725,10 @@ status_t ACodec::configureCodec(              mMaxPtsGapUs = -1ll;          } +        if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { +            mMaxFps = -1; +        } +          if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {              mTimePerCaptureUs = -1ll;          } @@ -1284,7 +1744,7 @@ status_t ACodec::configureCodec(      sp<RefBase> obj;      bool haveNativeWindow = msg->findObject("native-window", &obj)              && obj != NULL && video && !encoder; -    mStoreMetaDataInOutputBuffers = false; +    mLegacyAdaptiveExperiment = false;      if (video && !encoder) {          inputFormat->setInt32("adaptive-playback", false); @@ -1299,9 +1759,8 @@ status_t ACodec::configureCodec(          }      }      if (haveNativeWindow) { -        sp<NativeWindowWrapper> windowWrapper( -                static_cast<NativeWindowWrapper *>(obj.get())); -        sp<ANativeWindow> nativeWindow = windowWrapper->getNativeWindow(); +        sp<ANativeWindow> nativeWindow = +            static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get()));          // START of temporary support for automatic FRC - THIS WILL BE REMOVED          int32_t autoFrc; @@ -1370,7 +1829,7 @@ status_t ACodec::configureCodec(              // Always try to enable dynamic output buffers on native surface              err = mOMX->storeMetaDataInBuffers( -                    mNode, kPortIndexOutput, OMX_TRUE); +                    mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);              if (err != OK) {                  ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",                          mComponentName.c_str(), err); @@ -1422,7 +1881,10 @@ status_t ACodec::configureCodec(              } else {                  ALOGV("[%s] storeMetaDataInBuffers succeeded",                          mComponentName.c_str()); -                mStoreMetaDataInOutputBuffers = true; +                CHECK(storingMetadataInDecodedBuffers()); +                mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( +                        "legacy-adaptive", !msg->contains("no-experiments")); +                  inputFormat->setInt32("adaptive-playback", true);              } @@ -1460,28 +1922,31 @@ status_t ACodec::configureCodec(          }          if (haveNativeWindow) { -            sp<NativeWindowWrapper> nativeWindow( -                    static_cast<NativeWindowWrapper *>(obj.get())); -            CHECK(nativeWindow != NULL); -            mNativeWindow = nativeWindow->getNativeWindow(); - -            native_window_set_scaling_mode( -                    mNativeWindow.get(), NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); +            mNativeWindow = static_cast<Surface *>(obj.get());          }          // initialize native window now to get actual output format          // TODO: this is needed for some encoders even though they don't use native window -        CHECK_EQ((status_t)OK, initNativeWindow()); +        err = initNativeWindow(); +        if (err != OK) { +            return err; +        }          // fallback for devices that do not handle flex-YUV for native buffers          if (haveNativeWindow) {              int32_t requestedColorFormat = OMX_COLOR_FormatUnused;              if (msg->findInt32("color-format", &requestedColorFormat) &&                      requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { -                CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); +                status_t err = getPortFormat(kPortIndexOutput, outputFormat); +                if (err != OK) { +                    return err; +                }                  int32_t colorFormat = OMX_COLOR_FormatUnused;                  OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; -                CHECK(outputFormat->findInt32("color-format", &colorFormat)); +                if (!outputFormat->findInt32("color-format", &colorFormat)) { +                    ALOGE("ouptut port did not have a color format (wrong domain?)"); +                    return BAD_VALUE; +                }                  ALOGD("[%s] Requested output format %#x and got %#x.",                          mComponentName.c_str(), requestedColorFormat, colorFormat);                  if (!isFlexibleColorFormat( @@ -1491,11 +1956,13 @@ status_t ACodec::configureCodec(                      // to SW renderer                      ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());                      mNativeWindow.clear(); +                    mNativeWindowUsageBits = 0;                      haveNativeWindow = false;                      usingSwRenderer = true; -                    if (mStoreMetaDataInOutputBuffers) { -                        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_FALSE); -                        mStoreMetaDataInOutputBuffers = false; +                    if (storingMetadataInDecodedBuffers()) { +                        err = mOMX->storeMetaDataInBuffers( +                                mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); +                        mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case                          // TODO: implement adaptive-playback support for bytebuffer mode.                          // This is done by SW codecs, but most HW codecs don't support it.                          inputFormat->setInt32("adaptive-playback", false); @@ -1603,7 +2070,7 @@ status_t ACodec::configureCodec(              err = setupG711Codec(encoder, sampleRate, numChannels);          }      } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { -        int32_t numChannels, sampleRate, compressionLevel = -1; +        int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1;          if (encoder &&                  (!msg->findInt32("channel-count", &numChannels)                          || !msg->findInt32("sample-rate", &sampleRate))) { @@ -1685,16 +2152,78 @@ status_t ACodec::configureCodec(          err = setMinBufferSize(kPortIndexInput, 8192);  // XXX      } -    mBaseOutputFormat = outputFormat; +    int32_t priority; +    if (msg->findInt32("priority", &priority)) { +        err = setPriority(priority); +    } -    CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK); -    CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK); -    mInputFormat = inputFormat; -    mOutputFormat = outputFormat; +    int32_t rateInt = -1; +    float rateFloat = -1; +    if (!msg->findFloat("operating-rate", &rateFloat)) { +        msg->findInt32("operating-rate", &rateInt); +        rateFloat = (float)rateInt;  // 16MHz (FLINTMAX) is OK for upper bound. +    } +    if (rateFloat > 0) { +        err = setOperatingRate(rateFloat, video); +    } +    mBaseOutputFormat = outputFormat; + +    err = getPortFormat(kPortIndexInput, inputFormat); +    if (err == OK) { +        err = getPortFormat(kPortIndexOutput, outputFormat); +        if (err == OK) { +            mInputFormat = inputFormat; +            mOutputFormat = outputFormat; +        } +    }      return err;  } +status_t ACodec::setPriority(int32_t priority) { +    if (priority < 0) { +        return BAD_VALUE; +    } +    OMX_PARAM_U32TYPE config; +    InitOMXParams(&config); +    config.nU32 = (OMX_U32)priority; +    status_t temp = mOMX->setConfig( +            mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, +            &config, sizeof(config)); +    if (temp != OK) { +        ALOGI("codec does not support config priority (err %d)", temp); +    } +    return OK; +} + +status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { +    if (rateFloat < 0) { +        return BAD_VALUE; +    } +    OMX_U32 rate; +    if (isVideo) { +        if (rateFloat > 65535) { +            return BAD_VALUE; +        } +        rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); +    } else { +        if (rateFloat > UINT_MAX) { +            return BAD_VALUE; +        } +        rate = (OMX_U32)(rateFloat); +    } +    OMX_PARAM_U32TYPE config; +    InitOMXParams(&config); +    config.nU32 = rate; +    status_t err = mOMX->setConfig( +            mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, +            &config, sizeof(config)); +    if (err != OK) { +        ALOGI("codec does not support config operating rate (err %d)", err); +    } +    return OK; +} +  status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {      OMX_PARAM_PORTDEFINITIONTYPE def;      InitOMXParams(&def); @@ -1727,7 +2256,10 @@ status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {          return err;      } -    CHECK(def.nBufferSize >= size); +    if (def.nBufferSize < size) { +        ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); +        return FAILED_TRANSACTION; +    }      return OK;  } @@ -2055,7 +2587,9 @@ status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {  }  status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { -    CHECK(!encoder);  // XXX TODO +    if (encoder) { +        return INVALID_OPERATION; +    }      return setupRawAudioFormat(              kPortIndexInput, sampleRate, numChannels); @@ -2566,7 +3100,9 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {              break;      } -    ALOGI("setupVideoEncoder succeeded"); +    if (err == OK) { +        ALOGI("setupVideoEncoder succeeded"); +    }      return err;  } @@ -3164,8 +3700,9 @@ status_t ACodec::setVideoFormatOnPort(      status_t err = mOMX->getParameter(              mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - -    CHECK_EQ(err, (status_t)OK); +    if (err != OK) { +        return err; +    }      if (portIndex == kPortIndexInput) {          // XXX Need a (much) better heuristic to compute input buffer sizes. @@ -3175,7 +3712,10 @@ status_t ACodec::setVideoFormatOnPort(          }      } -    CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); +    if (def.eDomain != OMX_PortDomainVideo) { +        ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); +        return FAILED_TRANSACTION; +    }      video_def->nFrameWidth = width;      video_def->nFrameHeight = height; @@ -3239,8 +3779,8 @@ void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {      while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers              && dequeueBufferFromNativeWindow() != NULL) {          // these buffers will be submitted as regular buffers; account for this -        if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) { -            --mMetaDataBuffersToSubmit; +        if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { +            --mMetadataBuffersToSubmit;          }      }  } @@ -3452,17 +3992,20 @@ bool ACodec::isFlexibleColorFormat(  }  status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { -    // TODO: catch errors an return them instead of using CHECK +    const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output";      OMX_PARAM_PORTDEFINITIONTYPE def;      InitOMXParams(&def);      def.nPortIndex = portIndex; -    CHECK_EQ(mOMX->getParameter( -                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)), -             (status_t)OK); +    status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); +    if (err != OK) { +        return err; +    } -    CHECK_EQ((int)def.eDir, -            (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)); +    if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { +        ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); +        return BAD_VALUE; +    }      switch (def.eDomain) {          case OMX_PortDomainVideo: @@ -3496,7 +4039,7 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                                              sizeof(describeParams.sMediaImage)));                              MediaImage *img = &describeParams.sMediaImage; -                            ALOGV("[%s] MediaImage { F(%zux%zu) @%zu+%zu+%zu @%zu+%zu+%zu @%zu+%zu+%zu }", +                            ALOGV("[%s] MediaImage { F(%ux%u) @%u+%u+%u @%u+%u+%u @%u+%u+%u }",                                      mComponentName.c_str(), img->mWidth, img->mHeight,                                      img->mPlane[0].mOffset, img->mPlane[0].mColInc, img->mPlane[0].mRowInc,                                      img->mPlane[1].mOffset, img->mPlane[1].mColInc, img->mPlane[1].mRowInc, @@ -3525,12 +4068,16 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                          rect.nHeight = videoDef->nFrameHeight;                      } -                    CHECK_GE(rect.nLeft, 0); -                    CHECK_GE(rect.nTop, 0); -                    CHECK_GE(rect.nWidth, 0u); -                    CHECK_GE(rect.nHeight, 0u); -                    CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); -                    CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); +                    if (rect.nLeft < 0 || +                        rect.nTop < 0 || +                        rect.nLeft + rect.nWidth > videoDef->nFrameWidth || +                        rect.nTop + rect.nHeight > videoDef->nFrameHeight) { +                        ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", +                                rect.nLeft, rect.nTop, +                                rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, +                                videoDef->nFrameWidth, videoDef->nFrameHeight); +                        return BAD_VALUE; +                    }                      notify->setRect(                              "crop", @@ -3587,7 +4134,13 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                  default:                  { -                    CHECK(mIsEncoder ^ (portIndex == kPortIndexInput)); +                    if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { +                        // should be CodingUnused +                        ALOGE("Raw port video compression format is %s(%d)", +                                asString(videoDef->eCompressionFormat), +                                videoDef->eCompressionFormat); +                        return BAD_VALUE; +                    }                      AString mime;                      if (GetMimeTypeForVideoCoding(                          videoDef->eCompressionFormat, &mime) != OK) { @@ -3618,20 +4171,25 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioPcm, -                                ¶ms, sizeof(params)), -                             (status_t)OK); - -                    CHECK_GT(params.nChannels, 0); -                    CHECK(params.nChannels == 1 || params.bInterleaved); -                    CHECK_EQ(params.nBitPerSample, 16u); - -                    CHECK_EQ((int)params.eNumData, -                             (int)OMX_NumericalDataSigned); +                    err = mOMX->getParameter( +                            mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    } -                    CHECK_EQ((int)params.ePCMMode, -                             (int)OMX_AUDIO_PCMModeLinear); +                    if (params.nChannels <= 0 +                            || (params.nChannels != 1 && !params.bInterleaved) +                            || params.nBitPerSample != 16u +                            || params.eNumData != OMX_NumericalDataSigned +                            || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { +                        ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ", +                                params.nChannels, +                                params.bInterleaved ? " interleaved" : "", +                                params.nBitPerSample, +                                asString(params.eNumData), params.eNumData, +                                asString(params.ePCMMode), params.ePCMMode); +                        return FAILED_TRANSACTION; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);                      notify->setInt32("channel-count", params.nChannels); @@ -3649,10 +4207,11 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioAac, -                                ¶ms, sizeof(params)), -                             (status_t)OK); +                    err = mOMX->getParameter( +                            mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);                      notify->setInt32("channel-count", params.nChannels); @@ -3666,21 +4225,18 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioAmr, -                                ¶ms, sizeof(params)), -                             (status_t)OK); +                    err = mOMX->getParameter( +                            mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setInt32("channel-count", 1);                      if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { -                        notify->setString( -                                "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); - +                        notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);                          notify->setInt32("sample-rate", 16000);                      } else { -                        notify->setString( -                                "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); - +                        notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);                          notify->setInt32("sample-rate", 8000);                      }                      break; @@ -3692,10 +4248,11 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioFlac, -                                ¶ms, sizeof(params)), -                             (status_t)OK); +                    err = mOMX->getParameter( +                            mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);                      notify->setInt32("channel-count", params.nChannels); @@ -3709,10 +4266,11 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioMp3, -                                ¶ms, sizeof(params)), -                             (status_t)OK); +                    err = mOMX->getParameter( +                            mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);                      notify->setInt32("channel-count", params.nChannels); @@ -3726,10 +4284,11 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioVorbis, -                                ¶ms, sizeof(params)), -                             (status_t)OK); +                    err = mOMX->getParameter( +                            mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);                      notify->setInt32("channel-count", params.nChannels); @@ -3743,11 +4302,12 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ((status_t)OK, mOMX->getParameter( -                            mNode, -                            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, -                            ¶ms, -                            sizeof(params))); +                    err = mOMX->getParameter( +                            mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, +                            ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3);                      notify->setInt32("channel-count", params.nChannels); @@ -3761,11 +4321,12 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ((status_t)OK, mOMX->getParameter( -                            mNode, -                            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, -                            ¶ms, -                            sizeof(params))); +                    err = mOMX->getParameter( +                            mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, +                            ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3);                      notify->setInt32("channel-count", params.nChannels); @@ -3779,11 +4340,12 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ((status_t)OK, mOMX->getParameter( -                            mNode, -                            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, -                            ¶ms, -                            sizeof(params))); +                    err = mOMX->getParameter( +                            mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, +                            ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS);                      notify->setInt32("channel-count", params.nChannels); @@ -3797,11 +4359,11 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ((status_t)OK, mOMX->getParameter( -                            mNode, -                            (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, -                            ¶ms, -                            sizeof(params))); +                    err = mOMX->getParameter( +                            mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      const char *mime = NULL;                      if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { @@ -3819,30 +4381,33 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) {                  case OMX_AUDIO_CodingGSMFR:                  { -                    OMX_AUDIO_PARAM_MP3TYPE params; +                    OMX_AUDIO_PARAM_PCMMODETYPE params;                      InitOMXParams(¶ms);                      params.nPortIndex = portIndex; -                    CHECK_EQ(mOMX->getParameter( -                                mNode, OMX_IndexParamAudioPcm, -                                ¶ms, sizeof(params)), -                             (status_t)OK); +                    err = mOMX->getParameter( +                                mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); +                    if (err != OK) { +                        return err; +                    }                      notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM);                      notify->setInt32("channel-count", params.nChannels); -                    notify->setInt32("sample-rate", params.nSampleRate); +                    notify->setInt32("sample-rate", params.nSamplingRate);                      break;                  }                  default: -                    ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding); -                    TRESPASS(); +                    ALOGE("Unsupported audio coding: %s(%d)\n", +                            asString(audioDef->eEncoding), audioDef->eEncoding); +                    return BAD_TYPE;              }              break;          }          default: -            TRESPASS(); +            ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); +            return BAD_TYPE;      }      return OK; @@ -3852,7 +4417,10 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {      sp<AMessage> notify = mBaseOutputFormat->dup();      notify->setInt32("what", kWhatOutputFormatChanged); -    CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK); +    if (getPortFormat(kPortIndexOutput, notify) != OK) { +        ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); +        return; +    }      AString mime;      CHECK(notify->findString("mime", &mime)); @@ -3872,9 +4440,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {          if (mSkipCutBuffer != NULL) {              size_t prevbufsize = mSkipCutBuffer->size();              if (prevbufsize != 0) { -                ALOGW("Replacing SkipCutBuffer holding %d " -                      "bytes", -                      prevbufsize); +                ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize);              }          }          mSkipCutBuffer = new SkipCutBuffer( @@ -3908,150 +4474,6 @@ void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {      notify->post();  } -status_t ACodec::pushBlankBuffersToNativeWindow() { -    status_t err = NO_ERROR; -    ANativeWindowBuffer* anb = NULL; -    int numBufs = 0; -    int minUndequeuedBufs = 0; - -    // We need to reconnect to the ANativeWindow as a CPU client to ensure that -    // no frames get dropped by SurfaceFlinger assuming that these are video -    // frames. -    err = native_window_api_disconnect(mNativeWindow.get(), -            NATIVE_WINDOW_API_MEDIA); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", -                strerror(-err), -err); -        return err; -    } - -    err = native_window_api_connect(mNativeWindow.get(), -            NATIVE_WINDOW_API_CPU); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: api_connect failed: %s (%d)", -                strerror(-err), -err); -        return err; -    } - -    err = native_window_set_buffers_geometry(mNativeWindow.get(), 1, 1, -            HAL_PIXEL_FORMAT_RGBX_8888); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: set_buffers_geometry failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    err = native_window_set_scaling_mode(mNativeWindow.get(), -                NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank_frames: set_scaling_mode failed: %s (%d)", -              strerror(-err), -err); -        goto error; -    } - -    err = native_window_set_usage(mNativeWindow.get(), -            GRALLOC_USAGE_SW_WRITE_OFTEN); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: set_usage failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    err = mNativeWindow->query(mNativeWindow.get(), -            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query " -                "failed: %s (%d)", strerror(-err), -err); -        goto error; -    } - -    numBufs = minUndequeuedBufs + 1; -    err = native_window_set_buffer_count(mNativeWindow.get(), numBufs); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    // We  push numBufs + 1 buffers to ensure that we've drawn into the same -    // buffer twice.  This should guarantee that the buffer has been displayed -    // on the screen and then been replaced, so an previous video frames are -    // guaranteed NOT to be currently displayed. -    for (int i = 0; i < numBufs + 1; i++) { -        err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        sp<GraphicBuffer> buf(new GraphicBuffer(anb, false)); - -        // Fill the buffer with the a 1x1 checkerboard pattern ;) -        uint32_t* img = NULL; -        err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img)); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: lock failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        *img = 0; - -        err = buf->unlock(); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: unlock failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        err = mNativeWindow->queueBuffer(mNativeWindow.get(), -                buf->getNativeBuffer(), -1); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        anb = NULL; -    } - -error: - -    if (err != NO_ERROR) { -        // Clean up after an error. -        if (anb != NULL) { -            mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1); -        } - -        native_window_api_disconnect(mNativeWindow.get(), -                NATIVE_WINDOW_API_CPU); -        native_window_api_connect(mNativeWindow.get(), -                NATIVE_WINDOW_API_MEDIA); - -        return err; -    } else { -        // Clean up after success. -        err = native_window_api_disconnect(mNativeWindow.get(), -                NATIVE_WINDOW_API_CPU); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", -                    strerror(-err), -err); -            return err; -        } - -        err = native_window_api_connect(mNativeWindow.get(), -                NATIVE_WINDOW_API_MEDIA); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: api_connect failed: %s (%d)", -                    strerror(-err), -err); -            return err; -        } - -        return NO_ERROR; -    } -} -  ////////////////////////////////////////////////////////////////////////////////  ACodec::PortDescription::PortDescription() { @@ -4119,12 +4541,40 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {              break;          } -        case ACodec::kWhatOMXMessage: +        case ACodec::kWhatOMXMessageList:          { +            return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; +        } + +        case ACodec::kWhatOMXMessageItem: +        { +            // no need to check as we already did it for kWhatOMXMessageList              return onOMXMessage(msg);          } +        case ACodec::kWhatOMXMessage: +        { +            return checkOMXMessage(msg) ? onOMXMessage(msg) : true; +        } + +        case ACodec::kWhatSetSurface: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); + +            sp<RefBase> obj; +            CHECK(msg->findObject("surface", &obj)); + +            status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); + +            sp<AMessage> response = new AMessage; +            response->setInt32("err", err); +            response->postReply(replyID); +            break; +        } +          case ACodec::kWhatCreateInputSurface: +        case ACodec::kWhatSetInputSurface:          case ACodec::kWhatSignalEndOfInputStream:          {              // This may result in an app illegal state exception. @@ -4161,21 +4611,51 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {      return true;  } -bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { -    int32_t type; -    CHECK(msg->findInt32("type", &type)); - +bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) {      // there is a possibility that this is an outstanding message for a      // codec that we have already destroyed -    if (mCodec->mNode == NULL) { +    if (mCodec->mNode == 0) {          ALOGI("ignoring message as already freed component: %s",                  msg->debugString().c_str()); -        return true; +        return false;      }      IOMX::node_id nodeID;      CHECK(msg->findInt32("node", (int32_t*)&nodeID)); -    CHECK_EQ(nodeID, mCodec->mNode); +    if (nodeID != mCodec->mNode) { +        ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); +        return false; +    } +    return true; +} + +bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { +    sp<RefBase> obj; +    CHECK(msg->findObject("messages", &obj)); +    sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); + +    bool receivedRenderedEvents = false; +    for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); +          it != msgList->getList().cend(); ++it) { +        (*it)->setWhat(ACodec::kWhatOMXMessageItem); +        mCodec->handleMessage(*it); +        int32_t type; +        CHECK((*it)->findInt32("type", &type)); +        if (type == omx_message::FRAME_RENDERED) { +            receivedRenderedEvents = true; +        } +    } + +    if (receivedRenderedEvents) { +        // NOTE: all buffers are rendered in this case +        mCodec->notifyOfRenderedFrames(); +    } +    return true; +} + +bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { +    int32_t type; +    CHECK(msg->findInt32("type", &type));      switch (type) {          case omx_message::EVENT: @@ -4205,9 +4685,12 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {          case omx_message::EMPTY_BUFFER_DONE:          {              IOMX::buffer_id bufferID; +            int32_t fenceFd; +              CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); +            CHECK(msg->findInt32("fence_fd", &fenceFd)); -            return onOMXEmptyBufferDone(bufferID); +            return onOMXEmptyBufferDone(bufferID, fenceFd);          }          case omx_message::FILL_BUFFER_DONE: @@ -4215,37 +4698,56 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {              IOMX::buffer_id bufferID;              CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); -            int32_t rangeOffset, rangeLength, flags; +            int32_t rangeOffset, rangeLength, flags, fenceFd;              int64_t timeUs;              CHECK(msg->findInt32("range_offset", &rangeOffset));              CHECK(msg->findInt32("range_length", &rangeLength));              CHECK(msg->findInt32("flags", &flags));              CHECK(msg->findInt64("timestamp", &timeUs)); +            CHECK(msg->findInt32("fence_fd", &fenceFd));              return onOMXFillBufferDone(                      bufferID,                      (size_t)rangeOffset, (size_t)rangeLength,                      (OMX_U32)flags, -                    timeUs); +                    timeUs, +                    fenceFd); +        } + +        case omx_message::FRAME_RENDERED: +        { +            int64_t mediaTimeUs, systemNano; + +            CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); +            CHECK(msg->findInt64("system_nano", &systemNano)); + +            return onOMXFrameRendered( +                    mediaTimeUs, systemNano);          }          default: -            TRESPASS(); -            break; +            ALOGE("Unexpected message type: %d", type); +            return false;      }  } +bool ACodec::BaseState::onOMXFrameRendered( +        int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { +    // ignore outside of Executing and PortSettingsChanged states +    return true; +} +  bool ACodec::BaseState::onOMXEvent(          OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {      if (event != OMX_EventError) { -        ALOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)", +        ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)",               mCodec->mComponentName.c_str(), event, data1, data2);          return false;      } -    ALOGE("[%s] ERROR(0x%08lx)", mCodec->mComponentName.c_str(), data1); +    ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1);      // verify OMX component sends back an error we expect.      OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; @@ -4258,16 +4760,29 @@ bool ACodec::BaseState::onOMXEvent(      return true;  } -bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) { -    ALOGV("[%s] onOMXEmptyBufferDone %p", +bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { +    ALOGV("[%s] onOMXEmptyBufferDone %u",           mCodec->mComponentName.c_str(), bufferID); -    BufferInfo *info = -        mCodec->findBufferByID(kPortIndexInput, bufferID); - -    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); +    BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); +    BufferInfo::Status status = BufferInfo::getSafeStatus(info); +    if (status != BufferInfo::OWNED_BY_COMPONENT) { +        ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); +        mCodec->dumpBuffers(kPortIndexInput); +        if (fenceFd >= 0) { +            ::close(fenceFd); +        } +        return false; +    }      info->mStatus = BufferInfo::OWNED_BY_US; +    // input buffers cannot take fences, so wait for any fence now +    (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); +    fenceFd = -1; + +    // still save fence for completeness +    info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); +      // We're in "store-metadata-in-buffers" mode, the underlying      // OMX component had access to data that's implicitly refcounted      // by this "MediaBuffer" object. Now that the OMX component has @@ -4285,12 +4800,10 @@ bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) {              postFillThisBuffer(info);              break; +        case FREE_BUFFERS:          default: -        { -            CHECK_EQ((int)mode, (int)FREE_BUFFERS); -            TRESPASS();  // Not currently used -            break; -        } +            ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); +            return false;      }      return true; @@ -4310,7 +4823,7 @@ void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) {      info->mData->meta()->clear();      notify->setBuffer("buffer", info->mData); -    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id()); +    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec);      reply->setInt32("buffer-id", info->mBufferID);      notify->setMessage("reply", reply); @@ -4351,7 +4864,13 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {      }      BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); -    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM); +    BufferInfo::Status status = BufferInfo::getSafeStatus(info); +    if (status != BufferInfo::OWNED_BY_UPSTREAM) { +        ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); +        mCodec->dumpBuffers(kPortIndexInput); +        mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +        return; +    }      info->mStatus = BufferInfo::OWNED_BY_US; @@ -4370,6 +4889,12 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {          case RESUBMIT_BUFFERS:          {              if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { +                // Do not send empty input buffer w/o EOS to the component. +                if (buffer->size() == 0 && !eos) { +                    postFillThisBuffer(info); +                    break; +                } +                  int64_t timeUs;                  CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); @@ -4385,28 +4910,34 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {                  }                  if (buffer != info->mData) { -                    ALOGV("[%s] Needs to copy input data for buffer %p. (%p != %p)", +                    ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)",                           mCodec->mComponentName.c_str(),                           bufferID,                           buffer.get(), info->mData.get()); -                    CHECK_LE(buffer->size(), info->mData->capacity()); +                    if (buffer->size() > info->mData->capacity()) { +                        ALOGE("data size (%zu) is greated than buffer capacity (%zu)", +                                buffer->size(),           // this is the data received +                                info->mData->capacity()); // this is out buffer size +                        mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +                        return; +                    }                      memcpy(info->mData->data(), buffer->data(), buffer->size());                  }                  if (flags & OMX_BUFFERFLAG_CODECCONFIG) { -                    ALOGV("[%s] calling emptyBuffer %p w/ codec specific data", +                    ALOGV("[%s] calling emptyBuffer %u w/ codec specific data",                           mCodec->mComponentName.c_str(), bufferID);                  } else if (flags & OMX_BUFFERFLAG_EOS) { -                    ALOGV("[%s] calling emptyBuffer %p w/ EOS", +                    ALOGV("[%s] calling emptyBuffer %u w/ EOS",                           mCodec->mComponentName.c_str(), bufferID);                  } else {  #if TRACK_BUFFER_TIMING -                    ALOGI("[%s] calling emptyBuffer %p w/ time %lld us", -                         mCodec->mComponentName.c_str(), bufferID, timeUs); +                    ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", +                         mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);  #else -                    ALOGV("[%s] calling emptyBuffer %p w/ time %lld us", -                         mCodec->mComponentName.c_str(), bufferID, timeUs); +                    ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", +                         mCodec->mComponentName.c_str(), bufferID, (long long)timeUs);  #endif                  } @@ -4417,61 +4948,69 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {                  mCodec->mBufferStats.add(timeUs, stats);  #endif -                if (mCodec->mStoreMetaDataInOutputBuffers) { +                if (mCodec->storingMetadataInDecodedBuffers()) {                      // try to submit an output buffer for each input buffer                      PortMode outputMode = getPortMode(kPortIndexOutput); -                    ALOGV("MetaDataBuffersToSubmit=%u portMode=%s", -                            mCodec->mMetaDataBuffersToSubmit, +                    ALOGV("MetadataBuffersToSubmit=%u portMode=%s", +                            mCodec->mMetadataBuffersToSubmit,                              (outputMode == FREE_BUFFERS ? "FREE" :                               outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT"));                      if (outputMode == RESUBMIT_BUFFERS) { -                        mCodec->submitOutputMetaDataBuffer(); +                        mCodec->submitOutputMetadataBuffer();                      }                  } - -                CHECK_EQ(mCodec->mOMX->emptyBuffer( -                            mCodec->mNode, -                            bufferID, -                            0, -                            buffer->size(), -                            flags, -                            timeUs), -                         (status_t)OK); - +                info->checkReadFence("onInputBufferFilled"); +                status_t err2 = mCodec->mOMX->emptyBuffer( +                    mCodec->mNode, +                    bufferID, +                    0, +                    buffer->size(), +                    flags, +                    timeUs, +                    info->mFenceFd); +                info->mFenceFd = -1; +                if (err2 != OK) { +                    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); +                    return; +                }                  info->mStatus = BufferInfo::OWNED_BY_COMPONENT; -                if (!eos) { +                if (!eos && err == OK) {                      getMoreInputDataIfPossible();                  } else { -                    ALOGV("[%s] Signalled EOS on the input port", -                         mCodec->mComponentName.c_str()); +                    ALOGV("[%s] Signalled EOS (%d) on the input port", +                         mCodec->mComponentName.c_str(), err);                      mCodec->mPortEOS[kPortIndexInput] = true;                      mCodec->mInputEOSResult = err;                  }              } else if (!mCodec->mPortEOS[kPortIndexInput]) { -                if (err != ERROR_END_OF_STREAM) { -                    ALOGV("[%s] Signalling EOS on the input port " -                         "due to error %d", +                if (err != OK && err != ERROR_END_OF_STREAM) { +                    ALOGV("[%s] Signalling EOS on the input port due to error %d",                           mCodec->mComponentName.c_str(), err);                  } else {                      ALOGV("[%s] Signalling EOS on the input port",                           mCodec->mComponentName.c_str());                  } -                ALOGV("[%s] calling emptyBuffer %p signalling EOS", +                ALOGV("[%s] calling emptyBuffer %u signalling EOS",                       mCodec->mComponentName.c_str(), bufferID); -                CHECK_EQ(mCodec->mOMX->emptyBuffer( -                            mCodec->mNode, -                            bufferID, -                            0, -                            0, -                            OMX_BUFFERFLAG_EOS, -                            0), -                         (status_t)OK); - +                info->checkReadFence("onInputBufferFilled"); +                status_t err2 = mCodec->mOMX->emptyBuffer( +                        mCodec->mNode, +                        bufferID, +                        0, +                        0, +                        OMX_BUFFERFLAG_EOS, +                        0, +                        info->mFenceFd); +                info->mFenceFd = -1; +                if (err2 != OK) { +                    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); +                    return; +                }                  info->mStatus = BufferInfo::OWNED_BY_COMPONENT;                  mCodec->mPortEOS[kPortIndexInput] = true; @@ -4480,8 +5019,11 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) {              break;          } +        case FREE_BUFFERS: +            break; +          default: -            CHECK_EQ((int)mode, (int)FREE_BUFFERS); +            ALOGE("invalid port mode: %d", mode);              break;      }  } @@ -4519,11 +5061,13 @@ bool ACodec::BaseState::onOMXFillBufferDone(          IOMX::buffer_id bufferID,          size_t rangeOffset, size_t rangeLength,          OMX_U32 flags, -        int64_t timeUs) { +        int64_t timeUs, +        int fenceFd) {      ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",           mCodec->mComponentName.c_str(), bufferID, timeUs, flags);      ssize_t index; +    status_t err= OK;  #if TRACK_BUFFER_TIMING      index = mCodec->mBufferStats.indexOfKey(timeUs); @@ -4542,12 +5086,36 @@ bool ACodec::BaseState::onOMXFillBufferDone(      BufferInfo *info =          mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); - -    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); +    BufferInfo::Status status = BufferInfo::getSafeStatus(info); +    if (status != BufferInfo::OWNED_BY_COMPONENT) { +        ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); +        mCodec->dumpBuffers(kPortIndexOutput); +        mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +        if (fenceFd >= 0) { +            ::close(fenceFd); +        } +        return true; +    }      info->mDequeuedAt = ++mCodec->mDequeueCounter;      info->mStatus = BufferInfo::OWNED_BY_US; +    if (info->mRenderInfo != NULL) { +        // The fence for an emptied buffer must have signaled, but there still could be queued +        // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, +        // as we will soon requeue this buffer to the surface. While in theory we could still keep +        // track of buffers that are requeued to the surface, it is better to add support to the +        // buffer-queue to notify us of released buffers and their fences (in the future). +        mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); +    } + +    // byte buffers cannot take fences, so wait for any fence now +    if (mCodec->mNativeWindow == NULL) { +        (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); +        fenceFd = -1; +    } +    info->setReadFence(fenceFd, "onOMXFillBufferDone"); +      PortMode mode = getPortMode(kPortIndexOutput);      switch (mode) { @@ -4561,24 +5129,39 @@ bool ACodec::BaseState::onOMXFillBufferDone(                  ALOGV("[%s] calling fillBuffer %u",                       mCodec->mComponentName.c_str(), info->mBufferID); -                CHECK_EQ(mCodec->mOMX->fillBuffer( -                            mCodec->mNode, info->mBufferID), -                         (status_t)OK); +                err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); +                info->mFenceFd = -1; +                if (err != OK) { +                    mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); +                    return true; +                }                  info->mStatus = BufferInfo::OWNED_BY_COMPONENT;                  break;              }              sp<AMessage> reply = -                new AMessage(kWhatOutputBufferDrained, mCodec->id()); +                new AMessage(kWhatOutputBufferDrained, mCodec);              if (!mCodec->mSentFormat && rangeLength > 0) {                  mCodec->sendFormatChange(reply);              } - -            if (mCodec->mUseMetadataOnEncoderOutput) { -                native_handle_t* handle = -                        *(native_handle_t**)(info->mData->data() + 4); +            if (mCodec->usingMetadataOnEncoderOutput()) { +                native_handle_t *handle = NULL; +                VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); +                VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); +                if (info->mData->size() >= sizeof(grallocMeta) +                        && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { +                    handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; +                } else if (info->mData->size() >= sizeof(nativeMeta) +                        && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { +#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS +                    // ANativeWindowBuffer is only valid on 32-bit/mediaserver process +                    handle = NULL; +#else +                    handle = (native_handle_t *)nativeMeta.pBuffer->handle; +#endif +                }                  info->mData->meta()->setPointer("handle", handle);                  info->mData->meta()->setInt32("rangeOffset", rangeOffset);                  info->mData->meta()->setInt32("rangeLength", rangeLength); @@ -4625,14 +5208,17 @@ bool ACodec::BaseState::onOMXFillBufferDone(              break;          } -        default: -        { -            CHECK_EQ((int)mode, (int)FREE_BUFFERS); - -            CHECK_EQ((status_t)OK, -                     mCodec->freeBuffer(kPortIndexOutput, index)); +        case FREE_BUFFERS: +            err = mCodec->freeBuffer(kPortIndexOutput, index); +            if (err != OK) { +                mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); +                return true; +            }              break; -        } + +        default: +            ALOGE("Invalid port mode: %d", mode); +            return false;      }      return true; @@ -4642,15 +5228,19 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {      IOMX::buffer_id bufferID;      CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));      ssize_t index; -    BufferInfo *info = -        mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); -    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); +    BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); +    BufferInfo::Status status = BufferInfo::getSafeStatus(info); +    if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { +        ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); +        mCodec->dumpBuffers(kPortIndexOutput); +        mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +        return; +    }      android_native_rect_t crop; -    if (msg->findRect("crop", -            &crop.left, &crop.top, &crop.right, &crop.bottom)) { -        CHECK_EQ(0, native_window_set_crop( -                mCodec->mNativeWindow.get(), &crop)); +    if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { +        status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); +        ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err);      }      int32_t render; @@ -4660,37 +5250,45 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {          ATRACE_NAME("render");          // The client wants this buffer to be rendered. +        // save buffers sent to the surface so we can get render time when they return +        int64_t mediaTimeUs = -1; +        info->mData->meta()->findInt64("timeUs", &mediaTimeUs); +        if (mediaTimeUs >= 0) { +            mCodec->mRenderTracker.onFrameQueued( +                    mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); +        } +          int64_t timestampNs = 0;          if (!msg->findInt64("timestampNs", ×tampNs)) { -            // TODO: it seems like we should use the timestamp -            // in the (media)buffer as it potentially came from -            // an input surface, but we did not propagate it prior to -            // API 20.  Perhaps check for target SDK version. -#if 0 +            // use media timestamp if client did not request a specific render timestamp              if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { -                ALOGV("using buffer PTS of %" PRId64, timestampNs); +                ALOGV("using buffer PTS of %lld", (long long)timestampNs);                  timestampNs *= 1000;              } -#endif          }          status_t err;          err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); -        if (err != OK) { -            ALOGW("failed to set buffer timestamp: %d", err); -        } +        ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); -        if ((err = mCodec->mNativeWindow->queueBuffer( -                    mCodec->mNativeWindow.get(), -                    info->mGraphicBuffer.get(), -1)) == OK) { +        info->checkReadFence("onOutputBufferDrained before queueBuffer"); +        err = mCodec->mNativeWindow->queueBuffer( +                    mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); +        info->mFenceFd = -1; +        if (err == OK) {              info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;          } else { +            ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err);              mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));              info->mStatus = BufferInfo::OWNED_BY_US; +            // keeping read fence as write fence to avoid clobbering +            info->mIsReadFence = false;          }      } else {          if (mCodec->mNativeWindow != NULL &&              (info->mData == NULL || info->mData->size() != 0)) { +            // move read fence into write fence to avoid clobbering +            info->mIsReadFence = false;              ATRACE_NAME("frame-drop");          }          info->mStatus = BufferInfo::OWNED_BY_US; @@ -4725,24 +5323,32 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {                  if (info != NULL) {                      ALOGV("[%s] calling fillBuffer %u",                           mCodec->mComponentName.c_str(), info->mBufferID); - -                    CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), -                             (status_t)OK); - -                    info->mStatus = BufferInfo::OWNED_BY_COMPONENT; +                    info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); +                    status_t err = mCodec->mOMX->fillBuffer( +                            mCodec->mNode, info->mBufferID, info->mFenceFd); +                    info->mFenceFd = -1; +                    if (err == OK) { +                        info->mStatus = BufferInfo::OWNED_BY_COMPONENT; +                    } else { +                        mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); +                    }                  }              }              break;          } -        default: +        case FREE_BUFFERS:          { -            CHECK_EQ((int)mode, (int)FREE_BUFFERS); - -            CHECK_EQ((status_t)OK, -                     mCodec->freeBuffer(kPortIndexOutput, index)); +            status_t err = mCodec->freeBuffer(kPortIndexOutput, index); +            if (err != OK) { +                mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); +            }              break;          } + +        default: +            ALOGE("Invalid port mode: %d", mode); +            return;      }  } @@ -4761,11 +5367,13 @@ void ACodec::UninitializedState::stateEntered() {      }      mCodec->mNativeWindow.clear(); -    mCodec->mNode = NULL; +    mCodec->mNativeWindowUsageBits = 0; +    mCodec->mNode = 0;      mCodec->mOMX.clear();      mCodec->mQuirks = 0;      mCodec->mFlags = 0; -    mCodec->mUseMetadataOnEncoderOutput = 0; +    mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; +    mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid;      mCodec->mComponentName.clear();  } @@ -4839,14 +5447,17 @@ void ACodec::UninitializedState::onSetup(  bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {      ALOGV("onAllocateComponent"); -    CHECK(mCodec->mNode == NULL); +    CHECK(mCodec->mNode == 0);      OMXClient client; -    CHECK_EQ(client.connect(), (status_t)OK); +    if (client.connect() != OK) { +        mCodec->signalError(OMX_ErrorUndefined, NO_INIT); +        return false; +    }      sp<IOMX> omx = client.interface(); -    sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id()); +    sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec);      mDeathNotifier = new DeathNotifier(notify);      if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) { @@ -4887,8 +5498,9 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {      }      sp<CodecObserver> observer = new CodecObserver; -    IOMX::node_id node = NULL; +    IOMX::node_id node = 0; +    status_t err = NAME_NOT_FOUND;      for (size_t matchIndex = 0; matchIndex < matchingCodecs.size();              ++matchIndex) {          componentName = matchingCodecs.itemAt(matchIndex).mName.string(); @@ -4897,7 +5509,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {          pid_t tid = gettid();          int prevPriority = androidGetThreadPriority(tid);          androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); -        status_t err = omx->allocateNode(componentName.c_str(), observer, &node); +        err = omx->allocateNode(componentName.c_str(), observer, &node);          androidSetThreadPriority(tid, prevPriority);          if (err == OK) { @@ -4906,25 +5518,26 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {              ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str());          } -        node = NULL; +        node = 0;      } -    if (node == NULL) { +    if (node == 0) {          if (!mime.empty()) { -            ALOGE("Unable to instantiate a %scoder for type '%s'.", -                    encoder ? "en" : "de", mime.c_str()); +            ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", +                    encoder ? "en" : "de", mime.c_str(), err);          } else { -            ALOGE("Unable to instantiate codec '%s'.", componentName.c_str()); +            ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err);          } -        mCodec->signalError(OMX_ErrorComponentNotFound); +        mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err));          return false;      } -    notify = new AMessage(kWhatOMXMessage, mCodec->id()); +    notify = new AMessage(kWhatOMXMessageList, mCodec);      observer->setNotificationMessage(notify);      mCodec->mComponentName = componentName; +    mCodec->mRenderTracker.setComponentName(componentName);      mCodec->mFlags = 0;      if (componentName.endsWith(".secure")) { @@ -4964,7 +5577,7 @@ void ACodec::LoadedState::stateEntered() {      mCodec->mInputEOSResult = OK;      mCodec->mDequeueCounter = 0; -    mCodec->mMetaDataBuffersToSubmit = 0; +    mCodec->mMetadataBuffersToSubmit = 0;      mCodec->mRepeatFrameDelayUs = -1ll;      mCodec->mInputFormat.clear();      mCodec->mOutputFormat.clear(); @@ -4985,7 +5598,7 @@ void ACodec::LoadedState::stateEntered() {  void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {      if (!keepComponentAllocated) { -        CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); +        (void)mCodec->mOMX->freeNode(mCodec->mNode);          mCodec->changeState(mCodec->mUninitializedState);      } @@ -5016,6 +5629,13 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case ACodec::kWhatSetInputSurface: +        { +            onSetInputSurface(msg); +            handled = true; +            break; +        } +          case ACodec::kWhatStart:          {              onStart(); @@ -5057,13 +5677,15 @@ bool ACodec::LoadedState::onConfigureComponent(          const sp<AMessage> &msg) {      ALOGV("onConfigureComponent"); -    CHECK(mCodec->mNode != NULL); +    CHECK(mCodec->mNode != 0); +    status_t err = OK;      AString mime; -    CHECK(msg->findString("mime", &mime)); - -    status_t err = mCodec->configureCodec(mime.c_str(), msg); - +    if (!msg->findString("mime", &mime)) { +        err = BAD_VALUE; +    } else { +        err = mCodec->configureCodec(mime.c_str(), msg); +    }      if (err != OK) {          ALOGE("[%s] configureCodec returning error %d",                mCodec->mComponentName.c_str(), err); @@ -5083,20 +5705,10 @@ bool ACodec::LoadedState::onConfigureComponent(      return true;  } -void ACodec::LoadedState::onCreateInputSurface( -        const sp<AMessage> & /* msg */) { -    ALOGV("onCreateInputSurface"); - -    sp<AMessage> notify = mCodec->mNotify->dup(); -    notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); - -    sp<IGraphicBufferProducer> bufferProducer; -    status_t err; - -    err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput, -            &bufferProducer); +status_t ACodec::LoadedState::setupInputSurface() { +    status_t err = OK; -    if (err == OK && mCodec->mRepeatFrameDelayUs > 0ll) { +    if (mCodec->mRepeatFrameDelayUs > 0ll) {          err = mCodec->mOMX->setInternalOption(                  mCodec->mNode,                  kPortIndexInput, @@ -5109,10 +5721,11 @@ void ACodec::LoadedState::onCreateInputSurface(                    "frames (err %d)",                    mCodec->mComponentName.c_str(),                    err); +            return err;          }      } -    if (err == OK && mCodec->mMaxPtsGapUs > 0ll) { +    if (mCodec->mMaxPtsGapUs > 0ll) {          err = mCodec->mOMX->setInternalOption(                  mCodec->mNode,                  kPortIndexInput, @@ -5124,10 +5737,27 @@ void ACodec::LoadedState::onCreateInputSurface(              ALOGE("[%s] Unable to configure max timestamp gap (err %d)",                      mCodec->mComponentName.c_str(),                      err); +            return err; +        } +    } + +    if (mCodec->mMaxFps > 0) { +        err = mCodec->mOMX->setInternalOption( +                mCodec->mNode, +                kPortIndexInput, +                IOMX::INTERNAL_OPTION_MAX_FPS, +                &mCodec->mMaxFps, +                sizeof(mCodec->mMaxFps)); + +        if (err != OK) { +            ALOGE("[%s] Unable to configure max fps (err %d)", +                    mCodec->mComponentName.c_str(), +                    err); +            return err;          }      } -    if (err == OK && mCodec->mTimePerCaptureUs > 0ll +    if (mCodec->mTimePerCaptureUs > 0ll              && mCodec->mTimePerFrameUs > 0ll) {          int64_t timeLapse[2];          timeLapse[0] = mCodec->mTimePerFrameUs; @@ -5143,10 +5773,11 @@ void ACodec::LoadedState::onCreateInputSurface(              ALOGE("[%s] Unable to configure time lapse (err %d)",                      mCodec->mComponentName.c_str(),                      err); +            return err;          }      } -    if (err == OK && mCodec->mCreateInputBuffersSuspended) { +    if (mCodec->mCreateInputBuffersSuspended) {          bool suspend = true;          err = mCodec->mOMX->setInternalOption(                  mCodec->mNode, @@ -5159,9 +5790,36 @@ void ACodec::LoadedState::onCreateInputSurface(              ALOGE("[%s] Unable to configure option to suspend (err %d)",                    mCodec->mComponentName.c_str(),                    err); +            return err;          }      } +    uint32_t usageBits; +    if (mCodec->mOMX->getParameter( +            mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, +            &usageBits, sizeof(usageBits)) == OK) { +        mCodec->mInputFormat->setInt32( +                "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); +    } + +    return OK; +} + +void ACodec::LoadedState::onCreateInputSurface( +        const sp<AMessage> & /* msg */) { +    ALOGV("onCreateInputSurface"); + +    sp<AMessage> notify = mCodec->mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); + +    sp<IGraphicBufferProducer> bufferProducer; +    status_t err = mCodec->mOMX->createInputSurface( +            mCodec->mNode, kPortIndexInput, &bufferProducer, &mCodec->mInputMetadataType); + +    if (err == OK) { +        err = setupInputSurface(); +    } +      if (err == OK) {          notify->setObject("input-surface",                  new BufferProducerWrapper(bufferProducer)); @@ -5176,14 +5834,45 @@ void ACodec::LoadedState::onCreateInputSurface(      notify->post();  } +void ACodec::LoadedState::onSetInputSurface( +        const sp<AMessage> &msg) { +    ALOGV("onSetInputSurface"); + +    sp<AMessage> notify = mCodec->mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); + +    sp<RefBase> obj; +    CHECK(msg->findObject("input-surface", &obj)); +    sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); + +    status_t err = mCodec->mOMX->setInputSurface( +            mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), +            &mCodec->mInputMetadataType); + +    if (err == OK) { +        err = setupInputSurface(); +    } + +    if (err != OK) { +        // Can't use mCodec->signalError() here -- MediaCodec won't forward +        // the error through because it's in the "configured" state.  We +        // send a kWhatInputSurfaceAccepted with an error value instead. +        ALOGE("[%s] onSetInputSurface returning error %d", +                mCodec->mComponentName.c_str(), err); +        notify->setInt32("err", err); +    } +    notify->post(); +} +  void ACodec::LoadedState::onStart() {      ALOGV("onStart"); -    CHECK_EQ(mCodec->mOMX->sendCommand( -                mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), -             (status_t)OK); - -    mCodec->changeState(mCodec->mLoadedToIdleState); +    status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); +    if (err != OK) { +        mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); +    } else { +        mCodec->changeState(mCodec->mLoadedToIdleState); +    }  }  //////////////////////////////////////////////////////////////////////////////// @@ -5257,14 +5946,25 @@ bool ACodec::LoadedToIdleState::onOMXEvent(      switch (event) {          case OMX_EventCmdComplete:          { -            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); -            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); +            status_t err = OK; +            if (data1 != (OMX_U32)OMX_CommandStateSet +                    || data2 != (OMX_U32)OMX_StateIdle) { +                ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", +                        asString((OMX_COMMANDTYPE)data1), data1, +                        asString((OMX_STATETYPE)data2), data2); +                err = FAILED_TRANSACTION; +            } -            CHECK_EQ(mCodec->mOMX->sendCommand( -                        mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting), -                     (status_t)OK); +            if (err == OK) { +                err = mCodec->mOMX->sendCommand( +                    mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); +            } -            mCodec->changeState(mCodec->mIdleToExecutingState); +            if (err != OK) { +                mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); +            } else { +                mCodec->changeState(mCodec->mIdleToExecutingState); +            }              return true;          } @@ -5325,8 +6025,14 @@ bool ACodec::IdleToExecutingState::onOMXEvent(      switch (event) {          case OMX_EventCmdComplete:          { -            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); -            CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting); +            if (data1 != (OMX_U32)OMX_CommandStateSet +                    || data2 != (OMX_U32)OMX_StateExecuting) { +                ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", +                        asString((OMX_COMMANDTYPE)data1), data1, +                        asString((OMX_STATETYPE)data2), data2); +                mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +                return true; +            }              mCodec->mExecutingState->resume();              mCodec->changeState(mCodec->mExecutingState); @@ -5358,59 +6064,77 @@ void ACodec::ExecutingState::submitOutputMetaBuffers() {          BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);          if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { -            if (mCodec->submitOutputMetaDataBuffer() != OK) +            if (mCodec->submitOutputMetadataBuffer() != OK)                  break;          }      }      // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** -    mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); +    mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();  }  void ACodec::ExecutingState::submitRegularOutputBuffers() { +    bool failed = false;      for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) {          BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i);          if (mCodec->mNativeWindow != NULL) { -            CHECK(info->mStatus == BufferInfo::OWNED_BY_US -                    || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); +            if (info->mStatus != BufferInfo::OWNED_BY_US +                    && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { +                ALOGE("buffers should be owned by us or the surface"); +                failed = true; +                break; +            }              if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {                  continue;              }          } else { -            CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); +            if (info->mStatus != BufferInfo::OWNED_BY_US) { +                ALOGE("buffers should be owned by us"); +                failed = true; +                break; +            }          } -        ALOGV("[%s] calling fillBuffer %p", -             mCodec->mComponentName.c_str(), info->mBufferID); +        ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); -        CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), -                 (status_t)OK); +        info->checkWriteFence("submitRegularOutputBuffers"); +        status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); +        info->mFenceFd = -1; +        if (err != OK) { +            failed = true; +            break; +        }          info->mStatus = BufferInfo::OWNED_BY_COMPONENT;      } + +    if (failed) { +        mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +    }  }  void ACodec::ExecutingState::submitOutputBuffers() {      submitRegularOutputBuffers(); -    if (mCodec->mStoreMetaDataInOutputBuffers) { +    if (mCodec->storingMetadataInDecodedBuffers()) {          submitOutputMetaBuffers();      }  }  void ACodec::ExecutingState::resume() {      if (mActive) { -        ALOGV("[%s] We're already active, no need to resume.", -             mCodec->mComponentName.c_str()); - +        ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str());          return;      }      submitOutputBuffers();      // Post all available input buffers -    CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u); +    if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { +        ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); +    } +      for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) {          BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i);          if (info->mStatus == BufferInfo::OWNED_BY_US) { @@ -5424,6 +6148,7 @@ void ACodec::ExecutingState::resume() {  void ACodec::ExecutingState::stateEntered() {      ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); +    mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC));      mCodec->processDeferredMessages();  } @@ -5443,11 +6168,16 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {              mActive = false; -            CHECK_EQ(mCodec->mOMX->sendCommand( -                        mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), -                     (status_t)OK); - -            mCodec->changeState(mCodec->mExecutingToIdleState); +            status_t err = mCodec->mOMX->sendCommand( +                    mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); +            if (err != OK) { +                if (keepComponentAllocated) { +                    mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +                } +                // TODO: do some recovery here. +            } else { +                mCodec->changeState(mCodec->mExecutingToIdleState); +            }              handled = true;              break; @@ -5456,7 +6186,7 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {          case kWhatFlush:          {              ALOGV("[%s] ExecutingState flushing now " -                 "(codec owns %d/%d input, %d/%d output).", +                 "(codec owns %zu/%zu input, %zu/%zu output).",                      mCodec->mComponentName.c_str(),                      mCodec->countBuffersOwnedByComponent(kPortIndexInput),                      mCodec->mBuffers[kPortIndexInput].size(), @@ -5465,11 +6195,13 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {              mActive = false; -            CHECK_EQ(mCodec->mOMX->sendCommand( -                        mCodec->mNode, OMX_CommandFlush, OMX_ALL), -                     (status_t)OK); +            status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); +            if (err != OK) { +                mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +            } else { +                mCodec->changeState(mCodec->mFlushingState); +            } -            mCodec->changeState(mCodec->mFlushingState);              handled = true;              break;          } @@ -5518,13 +6250,13 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {          }          // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** -        case kWhatSubmitOutputMetaDataBufferIfEOS: +        case kWhatSubmitOutputMetadataBufferIfEOS:          {              if (mCodec->mPortEOS[kPortIndexInput] &&                      !mCodec->mPortEOS[kPortIndexOutput]) { -                status_t err = mCodec->submitOutputMetaDataBuffer(); +                status_t err = mCodec->submitOutputMetadataBuffer();                  if (err == OK) { -                    mCodec->signalSubmitOutputMetaDataBufferIfEOS_workaround(); +                    mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround();                  }              }              return true; @@ -5604,6 +6336,15 @@ status_t ACodec::setParameters(const sp<AMessage> ¶ms) {          }      } +    float rate; +    if (params->findFloat("operating-rate", &rate) && rate > 0) { +        status_t err = setOperatingRate(rate, mIsVideo); +        if (err != OK) { +            ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); +            return err; +        } +    } +      return OK;  } @@ -5618,6 +6359,11 @@ void ACodec::onSignalEndOfInputStream() {      notify->post();  } +bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { +    mCodec->onFrameRendered(mediaTimeUs, systemNano); +    return true; +} +  bool ACodec::ExecutingState::onOMXEvent(          OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {      switch (event) { @@ -5626,7 +6372,7 @@ bool ACodec::ExecutingState::onOMXEvent(              CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);              if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { -                mCodec->mMetaDataBuffersToSubmit = 0; +                mCodec->mMetadataBuffersToSubmit = 0;                  CHECK_EQ(mCodec->mOMX->sendCommand(                              mCodec->mNode,                              OMX_CommandPortDisable, kPortIndexOutput), @@ -5637,8 +6383,13 @@ bool ACodec::ExecutingState::onOMXEvent(                  mCodec->changeState(mCodec->mOutputPortSettingsChangedState);              } else if (data2 == OMX_IndexConfigCommonOutputCrop) {                  mCodec->mSentFormat = false; + +                if (mCodec->mTunneled) { +                    sp<AMessage> dummy = new AMessage(kWhatOutputBufferDrained, mCodec); +                    mCodec->sendFormatChange(dummy); +                }              } else { -                ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx", +                ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x",                       mCodec->mComponentName.c_str(), data2);              } @@ -5705,31 +6456,46 @@ void ACodec::OutputPortSettingsChangedState::stateEntered() {           mCodec->mComponentName.c_str());  } +bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( +        int64_t mediaTimeUs, nsecs_t systemNano) { +    mCodec->onFrameRendered(mediaTimeUs, systemNano); +    return true; +} +  bool ACodec::OutputPortSettingsChangedState::onOMXEvent(          OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {      switch (event) {          case OMX_EventCmdComplete:          {              if (data1 == (OMX_U32)OMX_CommandPortDisable) { -                CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); +                if (data2 != (OMX_U32)kPortIndexOutput) { +                    ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); +                    return false; +                } -                ALOGV("[%s] Output port now disabled.", -                        mCodec->mComponentName.c_str()); +                ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); -                CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty()); -                mCodec->mDealer[kPortIndexOutput].clear(); +                status_t err = OK; +                if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { +                    ALOGE("disabled port should be empty, but has %zu buffers", +                            mCodec->mBuffers[kPortIndexOutput].size()); +                    err = FAILED_TRANSACTION; +                } else { +                    mCodec->mDealer[kPortIndexOutput].clear(); +                } -                CHECK_EQ(mCodec->mOMX->sendCommand( -                            mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput), -                         (status_t)OK); +                if (err == OK) { +                    err = mCodec->mOMX->sendCommand( +                            mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); +                } -                status_t err; -                if ((err = mCodec->allocateBuffersOnPort( -                                kPortIndexOutput)) != OK) { -                    ALOGE("Failed to allocate output port buffers after " -                         "port reconfiguration (error 0x%08x)", -                         err); +                if (err == OK) { +                    err = mCodec->allocateBuffersOnPort(kPortIndexOutput); +                    ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " +                            "reconfiguration: (%d)", err); +                } +                if (err != OK) {                      mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err));                      // This is technically not correct, but appears to be @@ -5744,12 +6510,19 @@ bool ACodec::OutputPortSettingsChangedState::onOMXEvent(                  return true;              } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { -                CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); +                if (data2 != (OMX_U32)kPortIndexOutput) { +                    ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); +                    return false; +                }                  mCodec->mSentFormat = false; -                ALOGV("[%s] Output port now reenabled.", -                        mCodec->mComponentName.c_str()); +                if (mCodec->mTunneled) { +                    sp<AMessage> dummy = new AMessage(kWhatOutputBufferDrained, mCodec); +                    mCodec->sendFormatChange(dummy); +                } + +                ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str());                  if (mCodec->mExecutingState->active()) {                      mCodec->mExecutingState->submitOutputBuffers(); @@ -5783,7 +6556,7 @@ bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) {          {              // Don't send me a flush request if you previously wanted me              // to shutdown. -            TRESPASS(); +            ALOGW("Ignoring flush request in ExecutingToIdleState");              break;          } @@ -5815,8 +6588,14 @@ bool ACodec::ExecutingToIdleState::onOMXEvent(      switch (event) {          case OMX_EventCmdComplete:          { -            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); -            CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); +            if (data1 != (OMX_U32)OMX_CommandStateSet +                    || data2 != (OMX_U32)OMX_StateIdle) { +                ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", +                        asString((OMX_COMMANDTYPE)data1), data1, +                        asString((OMX_STATETYPE)data2), data2); +                mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +                return true; +            }              mComponentNowIdle = true; @@ -5839,12 +6618,15 @@ bool ACodec::ExecutingToIdleState::onOMXEvent(  void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {      if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { -        CHECK_EQ(mCodec->mOMX->sendCommand( -                    mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded), -                 (status_t)OK); - -        CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK); -        CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK); +        status_t err = mCodec->mOMX->sendCommand( +                mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); +        if (err == OK) { +            err = mCodec->freeBuffersOnPort(kPortIndexInput); +            status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); +            if (err == OK) { +                err = err2; +            } +        }          if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown)                  && mCodec->mNativeWindow != NULL) { @@ -5852,7 +6634,12 @@ void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() {              // them has made it to the display.  This allows the OMX              // component teardown to zero out any protected buffers              // without the risk of scanning out one of those buffers. -            mCodec->pushBlankBuffersToNativeWindow(); +            pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); +        } + +        if (err != OK) { +            mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +            return;          }          mCodec->changeState(mCodec->mIdleToLoadedState); @@ -5895,7 +6682,7 @@ bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) {          {              // Don't send me a flush request if you previously wanted me              // to shutdown. -            TRESPASS(); +            ALOGE("Got flush request in IdleToLoadedState");              break;          } @@ -5916,8 +6703,14 @@ bool ACodec::IdleToLoadedState::onOMXEvent(      switch (event) {          case OMX_EventCmdComplete:          { -            CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); -            CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded); +            if (data1 != (OMX_U32)OMX_CommandStateSet +                    || data2 != (OMX_U32)OMX_StateLoaded) { +                ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", +                        asString((OMX_COMMANDTYPE)data1), data1, +                        asString((OMX_STATETYPE)data2), data2); +                mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +                return true; +            }              mCodec->changeState(mCodec->mLoadedState); @@ -5968,28 +6761,41 @@ bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) {  bool ACodec::FlushingState::onOMXEvent(          OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { -    ALOGV("[%s] FlushingState onOMXEvent(%d,%ld)", -            mCodec->mComponentName.c_str(), event, data1); +    ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", +            mCodec->mComponentName.c_str(), event, (OMX_S32)data1);      switch (event) {          case OMX_EventCmdComplete:          { -            CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush); +            if (data1 != (OMX_U32)OMX_CommandFlush) { +                ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", +                        asString((OMX_COMMANDTYPE)data1), data1, data2); +                mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); +                return true; +            }              if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { -                CHECK(!mFlushComplete[data2]); +                if (mFlushComplete[data2]) { +                    ALOGW("Flush already completed for %s port", +                            data2 == kPortIndexInput ? "input" : "output"); +                    return true; +                }                  mFlushComplete[data2] = true; -                if (mFlushComplete[kPortIndexInput] -                        && mFlushComplete[kPortIndexOutput]) { +                if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) {                      changeStateIfWeOwnAllBuffers();                  } -            } else { -                CHECK_EQ(data2, OMX_ALL); -                CHECK(mFlushComplete[kPortIndexInput]); -                CHECK(mFlushComplete[kPortIndexOutput]); +            } else if (data2 == OMX_ALL) { +                if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { +                    ALOGW("received flush complete event for OMX_ALL before ports have been" +                            "flushed (%d/%d)", +                            mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); +                    return false; +                }                  changeStateIfWeOwnAllBuffers(); +            } else { +                ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2);              }              return true; @@ -5997,7 +6803,7 @@ bool ACodec::FlushingState::onOMXEvent(          case OMX_EventPortSettingsChanged:          { -            sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id()); +            sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec);              msg->setInt32("type", omx_message::EVENT);              msg->setInt32("node", mCodec->mNode);              msg->setInt32("event", event); @@ -6039,6 +6845,8 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {          // the native window for rendering. Let's get those back as well.          mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); +        mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); +          sp<AMessage> notify = mCodec->mNotify->dup();          notify->setInt32("what", CodecBase::kWhatFlushCompleted);          notify->post(); diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp index 9aa7d95..f53d7f0 100644 --- a/media/libstagefright/AMRWriter.cpp +++ b/media/libstagefright/AMRWriter.cpp @@ -31,19 +31,6 @@  namespace android { -AMRWriter::AMRWriter(const char *filename) -    : mFd(-1), -      mInitCheck(NO_INIT), -      mStarted(false), -      mPaused(false), -      mResumed(false) { - -    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); -    if (mFd >= 0) { -        mInitCheck = OK; -    } -} -  AMRWriter::AMRWriter(int fd)      : mFd(dup(fd)),        mInitCheck(mFd < 0? NO_INIT: OK), diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 2629afc..2529aa7 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -12,6 +12,7 @@ LOCAL_SRC_FILES:=                         \          AudioPlayer.cpp                   \          AudioSource.cpp                   \          AwesomePlayer.cpp                 \ +        CallbackDataSource.cpp            \          CameraSource.cpp                  \          CameraSourceTimeLapse.cpp         \          ClockEstimator.cpp                \ @@ -22,6 +23,7 @@ LOCAL_SRC_FILES:=                         \          ESDS.cpp                          \          FileSource.cpp                    \          FLACExtractor.cpp                 \ +        FrameRenderTracker.cpp            \          HTTPBase.cpp                      \          JPEGSource.cpp                    \          MP3Extractor.cpp                  \ @@ -31,11 +33,14 @@ LOCAL_SRC_FILES:=                         \          MediaAdapter.cpp                  \          MediaBuffer.cpp                   \          MediaBufferGroup.cpp              \ +        MediaClock.cpp                    \          MediaCodec.cpp                    \          MediaCodecList.cpp                \ +        MediaCodecListOverrides.cpp       \          MediaCodecSource.cpp              \          MediaDefs.cpp                     \          MediaExtractor.cpp                \ +        MediaSync.cpp                     \          MidiExtractor.cpp                 \          http/MediaHTTP.cpp                \          MediaMuxer.cpp                    \ @@ -46,17 +51,20 @@ LOCAL_SRC_FILES:=                         \          OMXClient.cpp                     \          OMXCodec.cpp                      \          OggExtractor.cpp                  \ +        ProcessInfo.cpp                   \          SampleIterator.cpp                \          SampleTable.cpp                   \          SkipCutBuffer.cpp                 \          StagefrightMediaScanner.cpp       \          StagefrightMetadataRetriever.cpp  \          SurfaceMediaSource.cpp            \ +        SurfaceUtils.cpp                  \          ThrottledSource.cpp               \          TimeSource.cpp                    \          TimedEventQueue.cpp               \          Utils.cpp                         \          VBRISeeker.cpp                    \ +        VideoFrameScheduler.cpp           \          WAVExtractor.cpp                  \          WVMExtractor.cpp                  \          XINGSeeker.cpp                    \ @@ -84,6 +92,7 @@ LOCAL_SHARED_LIBRARIES := \          libicuuc \          liblog \          libmedia \ +        libmediautils \          libnetd_client \          libopus \          libsonivox \ @@ -101,6 +110,7 @@ LOCAL_STATIC_LIBRARIES := \          libstagefright_color_conversion \          libstagefright_aacenc \          libstagefright_matroska \ +        libstagefright_mediafilter \          libstagefright_webm \          libstagefright_timedtext \          libvpx \ @@ -108,15 +118,23 @@ LOCAL_STATIC_LIBRARIES := \          libstagefright_mpeg2ts \          libstagefright_id3 \          libFLAC \ -        libmedia_helper +        libmedia_helper \  LOCAL_SHARED_LIBRARIES += \          libstagefright_enc_common \          libstagefright_avc_common \          libstagefright_foundation \ -        libdl +        libdl \ +        libRScpp \ + +LOCAL_CFLAGS += -Wno-multichar -Werror -Wno-error=deprecated-declarations -Wall + +# enable experiments only in userdebug and eng builds +ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT))) +LOCAL_CFLAGS += -DENABLE_STAGEFRIGHT_EXPERIMENTS +endif -LOCAL_CFLAGS += -Wno-multichar +LOCAL_CLANG := true  LOCAL_MODULE:= libstagefright diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp index e24824b..dd9d393 100644 --- a/media/libstagefright/AudioPlayer.cpp +++ b/media/libstagefright/AudioPlayer.cpp @@ -408,11 +408,22 @@ void AudioPlayer::notifyAudioEOS() {      }  } -status_t AudioPlayer::setPlaybackRatePermille(int32_t ratePermille) { +status_t AudioPlayer::setPlaybackRate(const AudioPlaybackRate &rate) {      if (mAudioSink.get() != NULL) { -        return mAudioSink->setPlaybackRatePermille(ratePermille); +        return mAudioSink->setPlaybackRate(rate);      } else if (mAudioTrack != 0){ -        return mAudioTrack->setSampleRate(ratePermille * mSampleRate / 1000); +        return mAudioTrack->setPlaybackRate(rate); +    } else { +        return NO_INIT; +    } +} + +status_t AudioPlayer::getPlaybackRate(AudioPlaybackRate *rate /* nonnull */) { +    if (mAudioSink.get() != NULL) { +        return mAudioSink->getPlaybackRate(rate); +    } else if (mAudioTrack != 0) { +        *rate = mAudioTrack->getPlaybackRate(); +        return OK;      } else {          return NO_INIT;      } diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp index 804f131..3505844 100644 --- a/media/libstagefright/AudioSource.cpp +++ b/media/libstagefright/AudioSource.cpp @@ -50,14 +50,19 @@ static void AudioRecordCallbackFunction(int event, void *user, void *info) {  }  AudioSource::AudioSource( -        audio_source_t inputSource, uint32_t sampleRate, uint32_t channelCount) +        audio_source_t inputSource, const String16 &opPackageName, +        uint32_t sampleRate, uint32_t channelCount, uint32_t outSampleRate)      : mStarted(false),        mSampleRate(sampleRate), +      mOutSampleRate(outSampleRate > 0 ? outSampleRate : sampleRate),        mPrevSampleTimeUs(0), +      mFirstSampleTimeUs(-1ll),        mNumFramesReceived(0),        mNumClientOwnedBuffers(0) { -    ALOGV("sampleRate: %d, channelCount: %d", sampleRate, channelCount); +    ALOGV("sampleRate: %u, outSampleRate: %u, channelCount: %u", +            sampleRate, outSampleRate, channelCount);      CHECK(channelCount == 1 || channelCount == 2); +    CHECK(sampleRate > 0);      size_t minFrameCount;      status_t status = AudioRecord::getMinFrameCount(&minFrameCount, @@ -78,11 +83,15 @@ AudioSource::AudioSource(          mRecord = new AudioRecord(                      inputSource, sampleRate, AUDIO_FORMAT_PCM_16_BIT,                      audio_channel_in_mask_from_count(channelCount), +                    opPackageName,                      (size_t) (bufCount * frameCount),                      AudioRecordCallbackFunction,                      this,                      frameCount /*notificationFrames*/);          mInitCheck = mRecord->initCheck(); +        if (mInitCheck != OK) { +            mRecord.clear(); +        }      } else {          mInitCheck = status;      } @@ -256,6 +265,15 @@ status_t AudioSource::read(              (int16_t *) buffer->data(), buffer->range_length() >> 1);      } +    if (mSampleRate != mOutSampleRate) { +        if (mFirstSampleTimeUs < 0) { +            mFirstSampleTimeUs = timeUs; +        } +        timeUs = mFirstSampleTimeUs + (timeUs - mFirstSampleTimeUs) +                * (int64_t)mSampleRate / (int64_t)mOutSampleRate; +        buffer->meta_data()->setInt64(kKeyTime, timeUs); +    } +      *out = buffer;      return OK;  } diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 87eef1e..4e6c2a6 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -113,11 +113,11 @@ struct AwesomeLocalRenderer : public AwesomeRenderer {          CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));          render((const uint8_t *)buffer->data() + buffer->range_offset(), -               buffer->range_length(), timeUs * 1000); +               buffer->range_length(), timeUs, timeUs * 1000);      } -    void render(const void *data, size_t size, int64_t timestampNs) { -        mTarget->render(data, size, timestampNs, NULL, mFormat); +    void render(const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs) { +        (void)mTarget->render(data, size, mediaTimeUs, renderTimeNs, NULL, mFormat);      }  protected: @@ -241,6 +241,8 @@ AwesomePlayer::AwesomePlayer()      mClockEstimator = new WindowedLinearFitEstimator(); +    mPlaybackSettings = AUDIO_PLAYBACK_RATE_DEFAULT; +      reset();  } @@ -360,7 +362,7 @@ status_t AwesomePlayer::setDataSource(      return setDataSource_l(dataSource);  } -status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) { +status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source __unused) {      return INVALID_OPERATION;  } @@ -422,7 +424,7 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {      mBitrate = totalBitRate; -    ALOGV("mBitrate = %lld bits/sec", mBitrate); +    ALOGV("mBitrate = %lld bits/sec", (long long)mBitrate);      {          Mutex::Autolock autoLock(mStatsLock); @@ -1009,6 +1011,10 @@ status_t AwesomePlayer::play_l() {                  return err;              }          } + +        if (mAudioPlayer != NULL) { +            mAudioPlayer->setPlaybackRate(mPlaybackSettings); +        }      }      if (mTimeSource == NULL && mAudioPlayer == NULL) { @@ -1131,6 +1137,10 @@ status_t AwesomePlayer::startAudioPlayer_l(bool sendErrorNotification) {      }      if (err == OK) { +        err = mAudioPlayer->setPlaybackRate(mPlaybackSettings); +    } + +    if (err == OK) {          modifyFlags(AUDIO_RUNNING, SET);          mWatchForAudioEOS = true; @@ -1722,7 +1732,7 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {      // we are now resuming.  Signal new position to media time provider.      // Cannot signal another SEEK_COMPLETE, as existing clients may not expect      // multiple SEEK_COMPLETE responses to a single seek() request. -    if (mSeekNotificationSent && abs(mSeekTimeUs - videoTimeUs) > 10000) { +    if (mSeekNotificationSent && llabs((long long)(mSeekTimeUs - videoTimeUs)) > 10000) {          // notify if we are resuming more than 10ms away from desired seek time          notifyListener_l(MEDIA_SKIPPED);      } @@ -2358,7 +2368,7 @@ status_t AwesomePlayer::finishSetDataSource_l() {                          }                          CHECK_GE(metaDataSize, 0ll); -                        ALOGV("metaDataSize = %lld bytes", metaDataSize); +                        ALOGV("metaDataSize = %lld bytes", (long long)metaDataSize);                      }                      usleep(200000); @@ -2553,14 +2563,6 @@ status_t AwesomePlayer::setParameter(int key, const Parcel &request) {          {              return setCacheStatCollectFreq(request);          } -        case KEY_PARAMETER_PLAYBACK_RATE_PERMILLE: -        { -            if (mAudioPlayer != NULL) { -                return mAudioPlayer->setPlaybackRatePermille(request.readInt32()); -            } else { -                return NO_INIT; -            } -        }          default:          {              return ERROR_UNSUPPORTED; @@ -2597,6 +2599,58 @@ status_t AwesomePlayer::getParameter(int key, Parcel *reply) {      }  } +status_t AwesomePlayer::setPlaybackSettings(const AudioPlaybackRate &rate) { +    Mutex::Autolock autoLock(mLock); +    // cursory sanity check for non-audio and paused cases +    if ((rate.mSpeed != 0.f && rate.mSpeed < AUDIO_TIMESTRETCH_SPEED_MIN) +        || rate.mSpeed > AUDIO_TIMESTRETCH_SPEED_MAX +        || rate.mPitch < AUDIO_TIMESTRETCH_SPEED_MIN +        || rate.mPitch > AUDIO_TIMESTRETCH_SPEED_MAX) { +        return BAD_VALUE; +    } + +    status_t err = OK; +    if (rate.mSpeed == 0.f) { +        if (mFlags & PLAYING) { +            modifyFlags(CACHE_UNDERRUN, CLEAR); // same as pause +            err = pause_l(); +        } +        if (err == OK) { +            // save settings (using old speed) in case player is resumed +            AudioPlaybackRate newRate = rate; +            newRate.mSpeed = mPlaybackSettings.mSpeed; +            mPlaybackSettings = newRate; +        } +        return err; +    } +    if (mAudioPlayer != NULL) { +        err = mAudioPlayer->setPlaybackRate(rate); +    } +    if (err == OK) { +        mPlaybackSettings = rate; +        if (!(mFlags & PLAYING)) { +            play_l(); +        } +    } +    return err; +} + +status_t AwesomePlayer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) { +    if (mAudioPlayer != NULL) { +        status_t err = mAudioPlayer->getPlaybackRate(rate); +        if (err == OK) { +            mPlaybackSettings = *rate; +            Mutex::Autolock autoLock(mLock); +            if (!(mFlags & PLAYING)) { +                rate->mSpeed = 0.f; +            } +        } +        return err; +    } +    *rate = mPlaybackSettings; +    return OK; +} +  status_t AwesomePlayer::getTrackInfo(Parcel *reply) const {      Mutex::Autolock autoLock(mLock);      size_t trackCount = mExtractor->countTracks(); diff --git a/media/libstagefright/CallbackDataSource.cpp b/media/libstagefright/CallbackDataSource.cpp new file mode 100644 index 0000000..e17fdf8 --- /dev/null +++ b/media/libstagefright/CallbackDataSource.cpp @@ -0,0 +1,160 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "CallbackDataSource" +#include <utils/Log.h> + +#include "include/CallbackDataSource.h" + +#include <binder/IMemory.h> +#include <media/IDataSource.h> +#include <media/stagefright/foundation/ADebug.h> + +#include <algorithm> + +namespace android { + +CallbackDataSource::CallbackDataSource( +    const sp<IDataSource>& binderDataSource) +    : mIDataSource(binderDataSource) { +    // Set up the buffer to read into. +    mMemory = mIDataSource->getIMemory(); +} + +CallbackDataSource::~CallbackDataSource() { +    ALOGV("~CallbackDataSource"); +    mIDataSource->close(); +} + +status_t CallbackDataSource::initCheck() const { +    if (mMemory == NULL) { +        return UNKNOWN_ERROR; +    } +    return OK; +} + +ssize_t CallbackDataSource::readAt(off64_t offset, void* data, size_t size) { +    if (mMemory == NULL) { +        return -1; +    } + +    // IDataSource can only read up to mMemory->size() bytes at a time, but this +    // method should be able to read any number of bytes, so read in a loop. +    size_t totalNumRead = 0; +    size_t numLeft = size; +    const size_t bufferSize = mMemory->size(); + +    while (numLeft > 0) { +        size_t numToRead = std::min(numLeft, bufferSize); +        ssize_t numRead = +            mIDataSource->readAt(offset + totalNumRead, numToRead); +        // A negative return value represents an error. Pass it on. +        if (numRead < 0) { +            return numRead; +        } +        // A zero return value signals EOS. Return the bytes read so far. +        if (numRead == 0) { +            return totalNumRead; +        } +        if ((size_t)numRead > numToRead) { +            return ERROR_OUT_OF_RANGE; +        } +        CHECK(numRead >= 0 && (size_t)numRead <= bufferSize); +        memcpy(((uint8_t*)data) + totalNumRead, mMemory->pointer(), numRead); +        numLeft -= numRead; +        totalNumRead += numRead; +    } + +    return totalNumRead; +} + +status_t CallbackDataSource::getSize(off64_t *size) { +    status_t err = mIDataSource->getSize(size); +    if (err != OK) { +        return err; +    } +    if (*size < 0) { +        // IDataSource will set size to -1 to indicate unknown size, but +        // DataSource returns ERROR_UNSUPPORTED for that. +        return ERROR_UNSUPPORTED; +    } +    return OK; +} + +TinyCacheSource::TinyCacheSource(const sp<DataSource>& source) +    : mSource(source), mCachedOffset(0), mCachedSize(0) { +} + +status_t TinyCacheSource::initCheck() const { +    return mSource->initCheck(); +} + +ssize_t TinyCacheSource::readAt(off64_t offset, void* data, size_t size) { +    if (size >= kCacheSize) { +        return mSource->readAt(offset, data, size); +    } + +    // Check if the cache satisfies the read. +    if (mCachedOffset <= offset +            && offset < (off64_t) (mCachedOffset + mCachedSize)) { +        if (offset + size <= mCachedOffset + mCachedSize) { +            memcpy(data, &mCache[offset - mCachedOffset], size); +            return size; +        } else { +            // If the cache hits only partially, flush the cache and read the +            // remainder. + +            // This value is guaranteed to be greater than 0 because of the +            // enclosing if statement. +            const ssize_t remaining = mCachedOffset + mCachedSize - offset; +            memcpy(data, &mCache[offset - mCachedOffset], remaining); +            const ssize_t readMore = readAt(offset + remaining, +                    (uint8_t*)data + remaining, size - remaining); +            if (readMore < 0) { +                return readMore; +            } +            return remaining + readMore; +        } +    } + +    // Fill the cache and copy to the caller. +    const ssize_t numRead = mSource->readAt(offset, mCache, kCacheSize); +    if (numRead <= 0) { +        return numRead; +    } +    if ((size_t)numRead > kCacheSize) { +        return ERROR_OUT_OF_RANGE; +    } + +    mCachedSize = numRead; +    mCachedOffset = offset; +    CHECK(mCachedSize <= kCacheSize && mCachedOffset >= 0); +    const size_t numToReturn = std::min(size, (size_t)numRead); +    memcpy(data, mCache, numToReturn); + +    return numToReturn; +} + +status_t TinyCacheSource::getSize(off64_t *size) { +    return mSource->getSize(size); +} + +uint32_t TinyCacheSource::flags() { +    return mSource->flags(); +} + +} // namespace android diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index ad12bdd..66280da 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -608,6 +608,16 @@ status_t CameraSource::startCameraRecording() {          }      } +    err = mCamera->sendCommand( +        CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace); + +    // This could happen for CameraHAL1 clients; thus the failure is +    // not a fatal error +    if (err != OK) { +        ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d", +                mEncoderFormat, mEncoderDataSpace, err); +    } +      err = OK;      if (mCameraFlags & FLAGS_HOT_CAMERA) {          mCamera->unlock(); @@ -645,6 +655,9 @@ status_t CameraSource::start(MetaData *meta) {      mStartTimeUs = 0;      mNumInputBuffers = 0; +    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; +    mEncoderDataSpace = HAL_DATASPACE_BT709; +      if (meta) {          int64_t startTimeUs;          if (meta->findInt64(kKeyTime, &startTimeUs)) { @@ -656,6 +669,14 @@ status_t CameraSource::start(MetaData *meta) {              CHECK_GT(nBuffers, 0);              mNumInputBuffers = nBuffers;          } + +        // apply encoder color format if specified +        if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) { +            ALOGV("Using encoder format: %#x", mEncoderFormat); +        } +        if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) { +            ALOGV("Using encoder data space: %#x", mEncoderDataSpace); +        }      }      status_t err; @@ -851,22 +872,15 @@ status_t CameraSource::read(  }  void CameraSource::dataCallbackTimestamp(int64_t timestampUs, -        int32_t msgType, const sp<IMemory> &data) { -    ALOGV("dataCallbackTimestamp: timestamp %" PRId64 " us", timestampUs); +        int32_t msgType __unused, const sp<IMemory> &data) { +    ALOGV("dataCallbackTimestamp: timestamp %lld us", (long long)timestampUs);      Mutex::Autolock autoLock(mLock);      if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) { -        ALOGV("Drop frame at %" PRId64 "/%" PRId64 " us", timestampUs, mStartTimeUs); +        ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);          releaseOneRecordingFrame(data);          return;      } -    if (mNumFramesReceived > 0) { -        CHECK(timestampUs > mLastFrameTimestampUs); -        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { -            ++mNumGlitches; -        } -    } -      // May need to skip frame or modify timestamp. Currently implemented      // by the subclass CameraSourceTimeLapse.      if (skipCurrentFrame(timestampUs)) { @@ -874,6 +888,18 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,          return;      } +    if (mNumFramesReceived > 0) { +        if (timestampUs <= mLastFrameTimestampUs) { +            ALOGW("Dropping frame with backward timestamp %lld (last %lld)", +                    (long long)timestampUs, (long long)mLastFrameTimestampUs); +            releaseOneRecordingFrame(data); +            return; +        } +        if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { +            ++mNumGlitches; +        } +    } +      mLastFrameTimestampUs = timestampUs;      if (mNumFramesReceived == 0) {          mFirstFrameTimeUs = timestampUs; @@ -913,7 +939,7 @@ void CameraSource::ProxyListener::dataCallbackTimestamp(      mSource->dataCallbackTimestamp(timestamp / 1000, msgType, dataPtr);  } -void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who) { +void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {      ALOGI("Camera recording proxy died");  } diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index f7dcf35..75ef288 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -19,6 +19,7 @@  #include "include/AMRExtractor.h"  #include "include/AACExtractor.h" +#include "include/CallbackDataSource.h"  #include "include/DRMExtractor.h"  #include "include/FLACExtractor.h"  #include "include/HTTPBase.h" @@ -281,6 +282,10 @@ sp<DataSource> DataSource::CreateMediaHTTP(const sp<IMediaHTTPService> &httpServ      }  } +sp<DataSource> DataSource::CreateFromIDataSource(const sp<IDataSource> &source) { +    return new TinyCacheSource(new CallbackDataSource(source)); +} +  String8 DataSource::getMIMEType() const {      return String8("application/octet-stream");  } diff --git a/media/libstagefright/FLACExtractor.cpp b/media/libstagefright/FLACExtractor.cpp index fa7251c..89a91f7 100644 --- a/media/libstagefright/FLACExtractor.cpp +++ b/media/libstagefright/FLACExtractor.cpp @@ -651,10 +651,10 @@ MediaBuffer *FLACParser::readBuffer(bool doSeek, FLAC__uint64 sample)      if (doSeek) {          // We implement the seek callback, so this works without explicit flush          if (!FLAC__stream_decoder_seek_absolute(mDecoder, sample)) { -            ALOGE("FLACParser::readBuffer seek to sample %llu failed", sample); +            ALOGE("FLACParser::readBuffer seek to sample %lld failed", (long long)sample);              return NULL;          } -        ALOGV("FLACParser::readBuffer seek to sample %llu succeeded", sample); +        ALOGV("FLACParser::readBuffer seek to sample %lld succeeded", (long long)sample);      } else {          if (!FLAC__stream_decoder_process_single(mDecoder)) {              ALOGE("FLACParser::readBuffer process_single failed"); @@ -674,7 +674,10 @@ MediaBuffer *FLACParser::readBuffer(bool doSeek, FLAC__uint64 sample)      if (mWriteHeader.sample_rate != getSampleRate() ||          mWriteHeader.channels != getChannels() ||          mWriteHeader.bits_per_sample != getBitsPerSample()) { -        ALOGE("FLACParser::readBuffer write changed parameters mid-stream"); +        ALOGE("FLACParser::readBuffer write changed parameters mid-stream: %d/%d/%d -> %d/%d/%d", +                getSampleRate(), getChannels(), getBitsPerSample(), +                mWriteHeader.sample_rate, mWriteHeader.channels, mWriteHeader.bits_per_sample); +        return NULL;      }      // acquire a media buffer      CHECK(mGroup != NULL); diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp index a7ca3da..565f156 100644 --- a/media/libstagefright/FileSource.cpp +++ b/media/libstagefright/FileSource.cpp @@ -14,6 +14,10 @@   * limitations under the License.   */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "FileSource" +#include <utils/Log.h> +  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/FileSource.h>  #include <sys/types.h> @@ -107,7 +111,7 @@ ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) {     } else {          off64_t result = lseek64(mFd, offset + mOffset, SEEK_SET);          if (result == -1) { -            ALOGE("seek to %lld failed", offset + mOffset); +            ALOGE("seek to %lld failed", (long long)(offset + mOffset));              return UNKNOWN_ERROR;          } diff --git a/media/libstagefright/FrameRenderTracker.cpp b/media/libstagefright/FrameRenderTracker.cpp new file mode 100644 index 0000000..917870f --- /dev/null +++ b/media/libstagefright/FrameRenderTracker.cpp @@ -0,0 +1,191 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "FrameRenderTracker" + +#include <inttypes.h> +#include <gui/Surface.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/FrameRenderTracker.h> + +namespace android { + +FrameRenderTracker::FrameRenderTracker() +    : mLastRenderTimeNs(-1), +      mComponentName("unknown component") { +} + +FrameRenderTracker::~FrameRenderTracker() { +} + +void FrameRenderTracker::setComponentName(const AString &componentName) { +    mComponentName = componentName; +} + +void FrameRenderTracker::clear(nsecs_t lastRenderTimeNs) { +    mRenderQueue.clear(); +    mLastRenderTimeNs = lastRenderTimeNs; +} + +void FrameRenderTracker::onFrameQueued( +        int64_t mediaTimeUs, const sp<GraphicBuffer> &graphicBuffer, const sp<Fence> &fence) { +    mRenderQueue.emplace_back(mediaTimeUs, graphicBuffer, fence); +} + +FrameRenderTracker::Info *FrameRenderTracker::updateInfoForDequeuedBuffer( +        ANativeWindowBuffer *buf, int fenceFd, int index) { +    if (index < 0) { +        return NULL; +    } + +    // see if this is a buffer that was to be rendered +    std::list<Info>::iterator renderInfo = mRenderQueue.end(); +    for (std::list<Info>::iterator it = mRenderQueue.begin(); +            it != mRenderQueue.end(); ++it) { +        if (it->mGraphicBuffer->handle == buf->handle) { +            renderInfo = it; +            break; +        } +    } +    if (renderInfo == mRenderQueue.end()) { +        // could have been canceled after fence has signaled +        return NULL; +    } + +    if (renderInfo->mIndex >= 0) { +        // buffer has been dequeued before, so there is nothing to do +        return NULL; +    } + +    // was this frame dropped (we could also infer this if the fence is invalid or a dup of +    // the queued fence; however, there is no way to figure that out.) +    if (fenceFd < 0) { +        // frame is new or was dropped +        mRenderQueue.erase(renderInfo); +        return NULL; +    } + +    // store dequeue fence and buffer index +    renderInfo->mFence = new Fence(::dup(fenceFd)); +    renderInfo->mIndex = index; +    return &*renderInfo; +} + +status_t FrameRenderTracker::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { +    // ensure monotonic timestamps +    if (mLastRenderTimeNs >= systemNano) { +        ALOGW("[%s] Ignoring out of order/stale system nano %lld for media time %lld from codec.", +                mComponentName.c_str(), (long long)systemNano, (long long)mediaTimeUs); +        return BAD_VALUE; +    } + +    nsecs_t now = systemTime(SYSTEM_TIME_MONOTONIC); +    if (systemNano > now) { +        ALOGW("[%s] Ignoring system nano %lld in the future for media time %lld from codec.", +                mComponentName.c_str(), (long long)systemNano, (long long)mediaTimeUs); +        return OK; +    } + +    mRenderQueue.emplace_back(mediaTimeUs, systemNano); +    mLastRenderTimeNs = systemNano; +    return OK; +} + +std::list<FrameRenderTracker::Info> FrameRenderTracker::checkFencesAndGetRenderedFrames( +        const FrameRenderTracker::Info *until, bool dropIncomplete) { +    std::list<Info> done; + +    // complete any frames queued prior to this and drop any incomplete ones if requested +    for (std::list<Info>::iterator it = mRenderQueue.begin(); +            it != mRenderQueue.end(); ) { +        bool drop = false; // whether to drop each frame +        if (it->mIndex < 0) { +            // frame not yet dequeued (or already rendered on a tunneled surface) +            drop = dropIncomplete; +        } else if (it->mFence != NULL) { +            // check if fence signaled +            nsecs_t signalTime = it->mFence->getSignalTime(); +            if (signalTime < 0) { // invalid fence +                drop = true; +            } else if (signalTime == INT64_MAX) { // unsignaled fence +                drop = dropIncomplete; +            } else { // signaled +                // save render time +                it->mFence.clear(); +                it->mRenderTimeNs = signalTime; +            } +        } +        bool foundFrame = (Info *)&*it == until; + +        // Return frames with signaled fences at the start of the queue, as they are +        // in submit order, and we don't have to wait for any in-between frames. +        // Also return any dropped frames. +        if (drop || (it->mFence == NULL && it == mRenderQueue.begin())) { +            // (unrendered) dropped frames have their mRenderTimeNs still set to -1 +            done.splice(done.end(), mRenderQueue, it++); +        } else { +            ++it; +        } +        if (foundFrame) { +            break; +        } +    } + +    return done; +} + +void FrameRenderTracker::untrackFrame(const FrameRenderTracker::Info *info, ssize_t index) { +    if (info == NULL && index == SSIZE_MAX) { +        // nothing to do +        return; +    } + +    for (std::list<Info>::iterator it = mRenderQueue.begin(); +            it != mRenderQueue.end(); ) { +        if (&*it == info) { +            mRenderQueue.erase(it++); +        } else { +            if (it->mIndex > index) { +                --(it->mIndex); +            } +            ++it; +        } +    } +} + +void FrameRenderTracker::dumpRenderQueue() const { +    ALOGI("[%s] Render Queue: (last render time: %lldns)", +            mComponentName.c_str(), (long long)mLastRenderTimeNs); +    for (std::list<Info>::const_iterator it = mRenderQueue.cbegin(); +            it != mRenderQueue.cend(); ++it) { +        if (it->mFence == NULL) { +            ALOGI("  RENDERED: handle: %p, media time: %lldus, index: %zd, render time: %lldns", +                    it->mGraphicBuffer == NULL ? NULL : it->mGraphicBuffer->handle, +                    (long long)it->mMediaTimeUs, it->mIndex, (long long)it->mRenderTimeNs); +        } else if (it->mIndex < 0) { +            ALOGI("    QUEUED: handle: %p, media time: %lldus, fence: %s", +                    it->mGraphicBuffer->handle, (long long)it->mMediaTimeUs, +                    it->mFence->isValid() ? "YES" : "NO"); +        } else { +            ALOGI("  DEQUEUED: handle: %p, media time: %lldus, index: %zd", +                    it->mGraphicBuffer->handle, (long long)it->mMediaTimeUs, it->mIndex); +        } +    } +} + +}  // namespace android diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp index 0c2ff15..068a77f 100644 --- a/media/libstagefright/HTTPBase.cpp +++ b/media/libstagefright/HTTPBase.cpp @@ -34,10 +34,10 @@ HTTPBase::HTTPBase()      : mNumBandwidthHistoryItems(0),        mTotalTransferTimeUs(0),        mTotalTransferBytes(0), +      mMaxBandwidthHistoryItems(100),        mPrevBandwidthMeasureTimeUs(0),        mPrevEstimatedBandWidthKbps(0), -      mBandWidthCollectFreqMs(5000), -      mMaxBandwidthHistoryItems(100) { +      mBandWidthCollectFreqMs(5000) {  }  void HTTPBase::addBandwidthMeasurement( @@ -75,7 +75,11 @@ void HTTPBase::addBandwidthMeasurement(  bool HTTPBase::estimateBandwidth(int32_t *bandwidth_bps) {      Mutex::Autolock autoLock(mLock); -    if (mNumBandwidthHistoryItems < 2) { +    // Do not do bandwidth estimation if we don't have enough samples, or +    // total bytes download are too small (<64K). +    // Bandwidth estimation from these samples can often shoot up and cause +    // unwanted bw adaption behaviors. +    if (mNumBandwidthHistoryItems < 2 || mTotalTransferBytes < 65536) {          return false;      } diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp index 4a63152..2e54e8c 100644 --- a/media/libstagefright/MP3Extractor.cpp +++ b/media/libstagefright/MP3Extractor.cpp @@ -82,7 +82,7 @@ static bool Resync(              *inout_pos += len;              ALOGV("skipped ID3 tag, new starting offset is %lld (0x%016llx)", -                 *inout_pos, *inout_pos); +                    (long long)*inout_pos, (long long)*inout_pos);          }          if (post_id3_pos != NULL) { @@ -103,9 +103,9 @@ static bool Resync(      uint8_t *tmp = buf;      do { -        if (pos >= *inout_pos + kMaxBytesChecked) { +        if (pos >= (off64_t)(*inout_pos + kMaxBytesChecked)) {              // Don't scan forever. -            ALOGV("giving up at offset %lld", pos); +            ALOGV("giving up at offset %lld", (long long)pos);              break;          } @@ -155,7 +155,7 @@ static bool Resync(              continue;          } -        ALOGV("found possible 1st frame at %lld (header = 0x%08x)", pos, header); +        ALOGV("found possible 1st frame at %lld (header = 0x%08x)", (long long)pos, header);          // We found what looks like a valid frame,          // now find its successors. @@ -186,7 +186,7 @@ static bool Resync(                  break;              } -            ALOGV("found subsequent frame #%d at %lld", j + 2, test_pos); +            ALOGV("found subsequent frame #%d at %lld", j + 2, (long long)test_pos);              test_pos += test_frame_size;          } @@ -282,6 +282,41 @@ MP3Extractor::MP3Extractor(      mFirstFramePos = pos;      mFixedHeader = header; +    mMeta = new MetaData; +    sp<XINGSeeker> seeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos); + +    if (seeker == NULL) { +        mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos); +    } else { +        mSeeker = seeker; +        int encd = seeker->getEncoderDelay(); +        int encp = seeker->getEncoderPadding(); +        if (encd != 0 || encp != 0) { +            mMeta->setInt32(kKeyEncoderDelay, encd); +            mMeta->setInt32(kKeyEncoderPadding, encp); +        } +    } + +    if (mSeeker != NULL) { +        // While it is safe to send the XING/VBRI frame to the decoder, this will +        // result in an extra 1152 samples being output. In addition, the bitrate +        // of the Xing header might not match the rest of the file, which could +        // lead to problems when seeking. The real first frame to decode is after +        // the XING/VBRI frame, so skip there. +        size_t frame_size; +        int sample_rate; +        int num_channels; +        int bitrate; +        GetMPEGAudioFrameSize( +                header, &frame_size, &sample_rate, &num_channels, &bitrate); +        pos += frame_size; +        if (!Resync(mDataSource, 0, &pos, &post_id3_pos, &header)) { +            // mInitCheck will remain NO_INIT +            return; +        } +        mFirstFramePos = pos; +        mFixedHeader = header; +    }      size_t frame_size;      int sample_rate; @@ -292,8 +327,6 @@ MP3Extractor::MP3Extractor(      unsigned layer = 4 - ((header >> 17) & 3); -    mMeta = new MetaData; -      switch (layer) {          case 1:              mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I); @@ -312,27 +345,6 @@ MP3Extractor::MP3Extractor(      mMeta->setInt32(kKeyBitRate, bitrate * 1000);      mMeta->setInt32(kKeyChannelCount, num_channels); -    sp<XINGSeeker> seeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos); - -    if (seeker == NULL) { -        mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos); -    } else { -        mSeeker = seeker; -        int encd = seeker->getEncoderDelay(); -        int encp = seeker->getEncoderPadding(); -        if (encd != 0 || encp != 0) { -            mMeta->setInt32(kKeyEncoderDelay, encd); -            mMeta->setInt32(kKeyEncoderPadding, encp); -        } -    } - -    if (mSeeker != NULL) { -        // While it is safe to send the XING/VBRI frame to the decoder, this will -        // result in an extra 1152 samples being output. The real first frame to -        // decode is after the XING/VBRI frame, so skip there. -        mFirstFramePos += frame_size; -    } -      int64_t durationUs;      if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) { diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp index 9856f92..ef07aa0 100644 --- a/media/libstagefright/MPEG2TSWriter.cpp +++ b/media/libstagefright/MPEG2TSWriter.cpp @@ -135,7 +135,7 @@ void MPEG2TSWriter::SourceInfo::start(const sp<AMessage> ¬ify) {      mNotify = notify; -    (new AMessage(kWhatStart, id()))->post(); +    (new AMessage(kWhatStart, this))->post();  }  void MPEG2TSWriter::SourceInfo::stop() { @@ -361,7 +361,7 @@ bool MPEG2TSWriter::SourceInfo::flushAACFrames() {  }  void MPEG2TSWriter::SourceInfo::readMore() { -    (new AMessage(kWhatRead, id()))->post(); +    (new AMessage(kWhatRead, this))->post();  }  void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { @@ -480,19 +480,6 @@ MPEG2TSWriter::MPEG2TSWriter(int fd)      init();  } -MPEG2TSWriter::MPEG2TSWriter(const char *filename) -    : mFile(fopen(filename, "wb")), -      mWriteCookie(NULL), -      mWriteFunc(NULL), -      mStarted(false), -      mNumSourcesDone(0), -      mNumTSPacketsWritten(0), -      mNumTSPacketsBeforeMeta(0), -      mPATContinuityCounter(0), -      mPMTContinuityCounter(0) { -    init(); -} -  MPEG2TSWriter::MPEG2TSWriter(          void *cookie,          ssize_t (*write)(void *cookie, const void *data, size_t size)) @@ -565,7 +552,7 @@ status_t MPEG2TSWriter::start(MetaData * /* param */) {      for (size_t i = 0; i < mSources.size(); ++i) {          sp<AMessage> notify = -            new AMessage(kWhatSourceNotify, mReflector->id()); +            new AMessage(kWhatSourceNotify, mReflector);          notify->setInt32("source-index", i); diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index e2b1675..a76334f 100644..100755 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -358,6 +358,8 @@ static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t  MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source)      : mMoofOffset(0), +      mMoofFound(false), +      mMdatFound(false),        mDataSource(source),        mInitCheck(NO_INIT),        mHasVideo(false), @@ -494,7 +496,9 @@ status_t MPEG4Extractor::readMetaData() {      off64_t offset = 0;      status_t err; -    while (true) { +    bool sawMoovOrSidx = false; + +    while (!(sawMoovOrSidx && (mMdatFound || mMoofFound))) {          off64_t orig_offset = offset;          err = parseChunk(&offset, 0); @@ -503,26 +507,12 @@ status_t MPEG4Extractor::readMetaData() {          } else if (offset <= orig_offset) {              // only continue parsing if the offset was advanced,              // otherwise we might end up in an infinite loop -            ALOGE("did not advance: 0x%lld->0x%lld", orig_offset, offset); +            ALOGE("did not advance: %lld->%lld", (long long)orig_offset, (long long)offset);              err = ERROR_MALFORMED;              break; -        } else if (err == OK) { -            continue; -        } - -        uint32_t hdr[2]; -        if (mDataSource->readAt(offset, hdr, 8) < 8) { -            break; -        } -        uint32_t chunk_type = ntohl(hdr[1]); -        if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { -            // store the offset of the first segment -            mMoofOffset = offset; -        } else if (chunk_type != FOURCC('m', 'd', 'a', 't')) { -            // keep parsing until we get to the data -            continue; +        } else if (err == UNKNOWN_ERROR) { +            sawMoovOrSidx = true;          } -        break;      }      if (mInitCheck == OK) { @@ -539,11 +529,11 @@ status_t MPEG4Extractor::readMetaData() {      CHECK_NE(err, (status_t)NO_INIT);      // copy pssh data into file metadata -    int psshsize = 0; +    uint64_t psshsize = 0;      for (size_t i = 0; i < mPssh.size(); i++) {          psshsize += 20 + mPssh[i].datalen;      } -    if (psshsize) { +    if (psshsize > 0 && psshsize <= UINT32_MAX) {          char *buf = (char*)malloc(psshsize);          char *ptr = buf;          for (size_t i = 0; i < mPssh.size(); i++) { @@ -753,6 +743,17 @@ static bool underMetaDataPath(const Vector<uint32_t> &path) {          && path[3] == FOURCC('i', 'l', 's', 't');  } +static bool underQTMetaPath(const Vector<uint32_t> &path, int32_t depth) { +    return path.size() >= 2 +            && path[0] == FOURCC('m', 'o', 'o', 'v') +            && path[1] == FOURCC('m', 'e', 't', 'a') +            && (depth == 2 +            || (depth == 3 +                    && (path[2] == FOURCC('h', 'd', 'l', 'r') +                    ||  path[2] == FOURCC('i', 'l', 's', 't') +                    ||  path[2] == FOURCC('k', 'e', 'y', 's')))); +} +  // Given a time in seconds since Jan 1 1904, produce a human-readable string.  static void convertTimeToDate(int64_t time_1904, String8 *s) {      time_t time_1970 = time_1904 - (((66 * 365 + 17) * 24) * 3600); @@ -764,7 +765,7 @@ static void convertTimeToDate(int64_t time_1904, String8 *s) {  }  status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { -    ALOGV("entering parseChunk %lld/%d", *offset, depth); +    ALOGV("entering parseChunk %lld/%d", (long long)*offset, depth);      uint32_t hdr[2];      if (mDataSource->readAt(*offset, hdr, 8) < 8) {          return ERROR_IO; @@ -808,7 +809,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {      char chunk[5];      MakeFourCCString(chunk_type, chunk); -    ALOGV("chunk: %s @ %lld, %d", chunk, *offset, depth); +    ALOGV("chunk: %s @ %lld, %d", chunk, (long long)*offset, depth);      if (kUseHexDump) {          static const char kWhitespace[] = "                                        "; @@ -868,6 +869,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('s', 'c', 'h', 'i'):          case FOURCC('e', 'd', 't', 's'):          { +            if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) { +                // store the offset of the first segment +                mMoofFound = true; +                mMoofOffset = *offset; +            } +              if (chunk_type == FOURCC('s', 't', 'b', 'l')) {                  ALOGV("sampleTable chunk is %" PRIu64 " bytes long.", chunk_size); @@ -882,6 +889,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                      }                  } +                if (mLastTrack == NULL) +                    return ERROR_MALFORMED; +                  mLastTrack->sampleTable = new SampleTable(mDataSource);              } @@ -1036,6 +1046,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              }              original_fourcc = ntohl(original_fourcc);              ALOGV("read original format: %d", original_fourcc); + +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc));              uint32_t num_channels = 0;              uint32_t sample_rate = 0; @@ -1091,6 +1105,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  return ERROR_IO;              } +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId);              mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize);              mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16); @@ -1125,7 +1142,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              }              pssh.datalen = ntohl(psshdatalen);              ALOGV("pssh data size: %d", pssh.datalen); -            if (pssh.datalen + 20 > chunk_size) { +            if (chunk_size < 20 || pssh.datalen > chunk_size - 20) {                  // pssh data length exceeds size of containing box                  return ERROR_MALFORMED;              } @@ -1206,7 +1223,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                      duration = ntohl(duration32);                  }              } -            if (duration != 0) { +            if (duration != 0 && mLastTrack->timescale != 0) {                  mLastTrack->meta->setInt64(                          kKeyDuration, (duration * 1000000) / mLastTrack->timescale);              } @@ -1270,6 +1287,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  // display the timed text.                  // For encrypted files, there may also be more than one entry.                  const char *mime; + +                if (mLastTrack == NULL) +                    return ERROR_MALFORMED; +                  CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));                  if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) &&                          strcasecmp(mime, "application/octet-stream")) { @@ -1316,6 +1337,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              uint16_t sample_size = U16_AT(&buffer[18]);              uint32_t sample_rate = U32_AT(&buffer[24]) >> 16; +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              if (chunk_type != FOURCC('e', 'n', 'c', 'a')) {                  // if the chunk type is enca, we'll get the type from the sinf/frma box later                  mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); @@ -1377,6 +1401,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              // printf("*** coding='%s' width=%d height=%d\n",              //        chunk, width, height); +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              if (chunk_type != FOURCC('e', 'n', 'c', 'v')) {                  // if the chunk type is encv, we'll get the type from the sinf/frma box later                  mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type)); @@ -1402,6 +1429,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('s', 't', 'c', 'o'):          case FOURCC('c', 'o', '6', '4'):          { +            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) +                return ERROR_MALFORMED; +              status_t err =                  mLastTrack->sampleTable->setChunkOffsetParams(                          chunk_type, data_offset, chunk_data_size); @@ -1417,6 +1447,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('s', 't', 's', 'c'):          { +            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) +                return ERROR_MALFORMED; +              status_t err =                  mLastTrack->sampleTable->setSampleToChunkParams(                          data_offset, chunk_data_size); @@ -1433,6 +1466,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('s', 't', 's', 'z'):          case FOURCC('s', 't', 'z', '2'):          { +            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) +                return ERROR_MALFORMED; +              status_t err =                  mLastTrack->sampleTable->setSampleSizeParams(                          chunk_type, data_offset, chunk_data_size); @@ -1514,6 +1550,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('s', 't', 't', 's'):          { +            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) +                return ERROR_MALFORMED; +              *offset += chunk_size;              status_t err = @@ -1529,6 +1568,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('c', 't', 't', 's'):          { +            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) +                return ERROR_MALFORMED; +              *offset += chunk_size;              status_t err = @@ -1544,6 +1586,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('s', 't', 's', 's'):          { +            if ((mLastTrack == NULL) || (mLastTrack->sampleTable == NULL)) +                return ERROR_MALFORMED; +              *offset += chunk_size;              status_t err = @@ -1557,13 +1602,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              break;          } -        // ©xyz +        // \xA9xyz          case FOURCC(0xA9, 'x', 'y', 'z'):          {              *offset += chunk_size; -            // Best case the total data length inside "©xyz" box -            // would be 8, for instance "©xyz" + "\x00\x04\x15\xc7" + "0+0/", +            // Best case the total data length inside "\xA9xyz" box +            // would be 8, for instance "\xA9xyz" + "\x00\x04\x15\xc7" + "0+0/",              // where "\x00\x04" is the text string length with value = 4,              // "\0x15\xc7" is the language code = en, and "0+0" is a              // location (string) value with longitude = 0 and latitude = 0. @@ -1616,6 +1661,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  return ERROR_MALFORMED;              } +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              mLastTrack->meta->setData(                      kKeyESDS, kTypeESDS, &buffer[4], chunk_data_size - 4); @@ -1633,7 +1681,18 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                      return err;                  }              } - +            if (mPath.size() >= 2 +                    && mPath[mPath.size() - 2] == FOURCC('m', 'p', '4', 'v')) { +                // Check if the video is MPEG2 +                ESDS esds(&buffer[4], chunk_data_size - 4); + +                uint8_t objectTypeIndication; +                if (esds.getObjectTypeIndication(&objectTypeIndication) == OK) { +                    if (objectTypeIndication >= 0x60 && objectTypeIndication <= 0x65) { +                        mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG2); +                    } +                } +            }              break;          } @@ -1648,6 +1707,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  return ERROR_IO;              } +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              mLastTrack->meta->setData(                      kKeyAVCC, kTypeAVCC, buffer->data(), chunk_data_size); @@ -1662,6 +1724,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  return ERROR_IO;              } +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              mLastTrack->meta->setData(                      kKeyHVCC, kTypeHVCC, buffer->data(), chunk_data_size); @@ -1686,7 +1751,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              char buffer[23];              if (chunk_data_size != 7 &&                  chunk_data_size != 23) { -                ALOGE("Incorrect D263 box size %lld", chunk_data_size); +                ALOGE("Incorrect D263 box size %lld", (long long)chunk_data_size);                  return ERROR_MALFORMED;              } @@ -1695,6 +1760,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  return ERROR_IO;              } +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              mLastTrack->meta->setData(kKeyD263, kTypeD263, buffer, chunk_data_size);              break; @@ -1702,31 +1770,35 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('m', 'e', 't', 'a'):          { -            uint8_t buffer[4]; -            if (chunk_data_size < (off64_t)sizeof(buffer)) { -                *offset += chunk_size; -                return ERROR_MALFORMED; -            } +            off64_t stop_offset = *offset + chunk_size; +            *offset = data_offset; +            bool isParsingMetaKeys = underQTMetaPath(mPath, 2); +            if (!isParsingMetaKeys) { +                uint8_t buffer[4]; +                if (chunk_data_size < (off64_t)sizeof(buffer)) { +                    *offset += chunk_size; +                    return ERROR_MALFORMED; +                } -            if (mDataSource->readAt( -                        data_offset, buffer, 4) < 4) { -                *offset += chunk_size; -                return ERROR_IO; -            } +                if (mDataSource->readAt( +                            data_offset, buffer, 4) < 4) { +                    *offset += chunk_size; +                    return ERROR_IO; +                } -            if (U32_AT(buffer) != 0) { -                // Should be version 0, flags 0. +                if (U32_AT(buffer) != 0) { +                    // Should be version 0, flags 0. -                // If it's not, let's assume this is one of those -                // apparently malformed chunks that don't have flags -                // and completely different semantics than what's -                // in the MPEG4 specs and skip it. -                *offset += chunk_size; -                return OK; +                    // If it's not, let's assume this is one of those +                    // apparently malformed chunks that don't have flags +                    // and completely different semantics than what's +                    // in the MPEG4 specs and skip it. +                    *offset += chunk_size; +                    return OK; +                } +                *offset +=  sizeof(buffer);              } -            off64_t stop_offset = *offset + chunk_size; -            *offset = data_offset + sizeof(buffer);              while (*offset < stop_offset) {                  status_t err = parseChunk(offset, depth + 1);                  if (err != OK) { @@ -1792,7 +1864,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  }                  duration = d32;              } -            if (duration != 0) { +            if (duration != 0 && mHeaderTimescale != 0) {                  mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);              } @@ -1841,7 +1913,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {                  return ERROR_MALFORMED;              } -            if (duration != 0) { +            if (duration != 0 && mHeaderTimescale != 0) {                  mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);              } @@ -1851,6 +1923,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('m', 'd', 'a', 't'):          {              ALOGV("mdat chunk, drm: %d", mIsDrm); + +            mMdatFound = true; +              if (!mIsDrm) {                  *offset += chunk_size;                  break; @@ -1867,12 +1942,19 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          {              *offset += chunk_size; +            if (underQTMetaPath(mPath, 3)) { +                break; +            } +              uint32_t buffer;              if (mDataSource->readAt(                          data_offset + 8, &buffer, 4) < 4) {                  return ERROR_IO;              } +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              uint32_t type = ntohl(buffer);              // For the 3GPP file format, the handler-type within the 'hdlr' box              // shall be 'text'. We also want to support 'sbtl' handler type @@ -1884,6 +1966,16 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              break;          } +        case FOURCC('k', 'e', 'y', 's'): +        { +            *offset += chunk_size; + +            if (underQTMetaPath(mPath, 3)) { +                parseQTMetaKey(data_offset, chunk_data_size); +            } +            break; +        } +          case FOURCC('t', 'r', 'e', 'x'):          {              *offset += chunk_size; @@ -1905,6 +1997,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          case FOURCC('t', 'x', '3', 'g'):          { +            if (mLastTrack == NULL) +                return ERROR_MALFORMED; +              uint32_t type;              const void *data;              size_t size = 0; @@ -1950,8 +2045,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {              *offset += chunk_size;              if (mFileMetaData != NULL) { -                ALOGV("chunk_data_size = %lld and data_offset = %lld", -                        chunk_data_size, data_offset); +                ALOGV("chunk_data_size = %" PRId64 " and data_offset = %" PRId64, +                      chunk_data_size, data_offset);                  if (chunk_data_size < 0 || static_cast<uint64_t>(chunk_data_size) >= SIZE_MAX - 1) {                      return ERROR_MALFORMED; @@ -2023,6 +2118,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {          default:          { +            // check if we're parsing 'ilst' for meta keys +            // if so, treat type as a number (key-id). +            if (underQTMetaPath(mPath, 3)) { +                parseQTMetaVal(chunk_type, data_offset, chunk_data_size); +            } +              *offset += chunk_size;              break;          } @@ -2058,6 +2159,8 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {          return ERROR_MALFORMED;      }      ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale); +    if (timeScale == 0) +        return ERROR_MALFORMED;      uint64_t earliestPresentationTime;      uint64_t firstOffset; @@ -2141,6 +2244,9 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {      uint64_t sidxDuration = total_duration * 1000000 / timeScale; +    if (mLastTrack == NULL) +        return ERROR_MALFORMED; +      int64_t metaDuration;      if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) {          mLastTrack->meta->setInt64(kKeyDuration, sidxDuration); @@ -2148,7 +2254,108 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {      return OK;  } +status_t MPEG4Extractor::parseQTMetaKey(off64_t offset, size_t size) { +    if (size < 8) { +        return ERROR_MALFORMED; +    } + +    uint32_t count; +    if (!mDataSource->getUInt32(offset + 4, &count)) { +        return ERROR_MALFORMED; +    } + +    if (mMetaKeyMap.size() > 0) { +        ALOGW("'keys' atom seen again, discarding existing entries"); +        mMetaKeyMap.clear(); +    } + +    off64_t keyOffset = offset + 8; +    off64_t stopOffset = offset + size; +    for (size_t i = 1; i <= count; i++) { +        if (keyOffset + 8 > stopOffset) { +            return ERROR_MALFORMED; +        } + +        uint32_t keySize; +        if (!mDataSource->getUInt32(keyOffset, &keySize) +                || keySize < 8 +                || keyOffset + keySize > stopOffset) { +            return ERROR_MALFORMED; +        } + +        uint32_t type; +        if (!mDataSource->getUInt32(keyOffset + 4, &type) +                || type != FOURCC('m', 'd', 't', 'a')) { +            return ERROR_MALFORMED; +        } + +        keySize -= 8; +        keyOffset += 8; + +        sp<ABuffer> keyData = new ABuffer(keySize); +        if (keyData->data() == NULL) { +            return ERROR_MALFORMED; +        } +        if (mDataSource->readAt( +                keyOffset, keyData->data(), keySize) < (ssize_t) keySize) { +            return ERROR_MALFORMED; +        } + +        AString key((const char *)keyData->data(), keySize); +        mMetaKeyMap.add(i, key); + +        keyOffset += keySize; +    } +    return OK; +} + +status_t MPEG4Extractor::parseQTMetaVal( +        int32_t keyId, off64_t offset, size_t size) { +    ssize_t index = mMetaKeyMap.indexOfKey(keyId); +    if (index < 0) { +        // corresponding key is not present, ignore +        return ERROR_MALFORMED; +    } +    if (size <= 16) { +        return ERROR_MALFORMED; +    } +    uint32_t dataSize; +    if (!mDataSource->getUInt32(offset, &dataSize) +            || dataSize > size || dataSize <= 16) { +        return ERROR_MALFORMED; +    } +    uint32_t atomFourCC; +    if (!mDataSource->getUInt32(offset + 4, &atomFourCC) +            || atomFourCC != FOURCC('d', 'a', 't', 'a')) { +        return ERROR_MALFORMED; +    } +    uint32_t dataType; +    if (!mDataSource->getUInt32(offset + 8, &dataType) +            || ((dataType & 0xff000000) != 0)) { +        // not well-known type +        return ERROR_MALFORMED; +    } + +    dataSize -= 16; +    offset += 16; + +    if (dataType == 23 && dataSize >= 4) { +        // BE Float32 +        uint32_t val; +        if (!mDataSource->getUInt32(offset, &val)) { +            return ERROR_MALFORMED; +        } +        if (!strcasecmp(mMetaKeyMap[index].c_str(), "com.android.capture.fps")) { +            mFileMetaData->setFloat(kKeyCaptureFramerate, *(float *)&val); +        } +    } else { +        // add more keys if needed +        ALOGV("ignoring key: type %d, size %d", dataType, dataSize); +    } + +    return OK; +}  status_t MPEG4Extractor::parseTrackHeader(          off64_t data_offset, off64_t data_size) { @@ -2191,6 +2398,9 @@ status_t MPEG4Extractor::parseTrackHeader(          return ERROR_UNSUPPORTED;      } +    if (mLastTrack == NULL) +        return ERROR_MALFORMED; +      mLastTrack->meta->setInt32(kKeyTrackID, id);      size_t matrixOffset = dynSize + 16; @@ -2262,7 +2472,7 @@ status_t MPEG4Extractor::parseITunesMetaData(off64_t offset, size_t size) {      uint32_t metadataKey = 0;      char chunk[5];      MakeFourCCString(mPath[4], chunk); -    ALOGV("meta: %s @ %lld", chunk, offset); +    ALOGV("meta: %s @ %lld", chunk, (long long)offset);      switch ((int32_t)mPath[4]) {          case FOURCC(0xa9, 'a', 'l', 'b'):          { @@ -2373,6 +2583,9 @@ status_t MPEG4Extractor::parseITunesMetaData(off64_t offset, size_t size) {                      int32_t delay, padding;                      if (sscanf(mLastCommentData,                                 " %*x %x %x %*x", &delay, &padding) == 2) { +                        if (mLastTrack == NULL) +                            return ERROR_MALFORMED; +                          mLastTrack->meta->setInt32(kKeyEncoderDelay, delay);                          mLastTrack->meta->setInt32(kKeyEncoderPadding, padding);                      } @@ -2661,6 +2874,7 @@ status_t MPEG4Extractor::verifyTrack(Track *track) {              return ERROR_MALFORMED;          }      } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4) +            || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG2)              || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {          if (!track->meta->findData(kKeyESDS, &type, &data, &size)                  || type != kTypeESDS) { @@ -2745,6 +2959,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(      if (objectTypeIndication == 0xe1) {          // This isn't MPEG4 audio at all, it's QCELP 14k... +        if (mLastTrack == NULL) +            return ERROR_MALFORMED; +          mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_QCELP);          return OK;      } @@ -2765,7 +2982,7 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(      }      if (kUseHexDump) { -        printf("ESD of size %d\n", csd_size); +        printf("ESD of size %zu\n", csd_size);          hexdump(csd, csd_size);      } @@ -2793,6 +3010,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(          objectType = 32 + br.getBits(6);      } +    if (mLastTrack == NULL) +        return ERROR_MALFORMED; +      //keep AOT type      mLastTrack->meta->setInt32(kKeyAACAOT, objectType); @@ -2801,12 +3021,11 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(      int32_t sampleRate = 0;      int32_t numChannels = 0;      if (freqIndex == 15) { -        if (csd_size < 5) { -            return ERROR_MALFORMED; -        } +        if (br.numBitsLeft() < 28) return ERROR_MALFORMED;          sampleRate = br.getBits(24);          numChannels = br.getBits(4);      } else { +        if (br.numBitsLeft() < 4) return ERROR_MALFORMED;          numChannels = br.getBits(4);          if (freqIndex == 13 || freqIndex == 14) { @@ -2817,12 +3036,14 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(      }      if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 table 1.13 +        if (br.numBitsLeft() < 4) return ERROR_MALFORMED;          uint32_t extFreqIndex = br.getBits(4);          int32_t extSampleRate __unused;          if (extFreqIndex == 15) {              if (csd_size < 8) {                  return ERROR_MALFORMED;              } +            if (br.numBitsLeft() < 24) return ERROR_MALFORMED;              extSampleRate = br.getBits(24);          } else {              if (extFreqIndex == 13 || extFreqIndex == 14) { @@ -2859,20 +3080,24 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(      {          if (objectType == AOT_SBR || objectType == AOT_PS) { +            if (br.numBitsLeft() < 5) return ERROR_MALFORMED;              objectType = br.getBits(5);              if (objectType == AOT_ESCAPE) { +                if (br.numBitsLeft() < 6) return ERROR_MALFORMED;                  objectType = 32 + br.getBits(6);              }          }          if (objectType == AOT_AAC_LC || objectType == AOT_ER_AAC_LC ||                  objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL ||                  objectType == AOT_ER_BSAC) { +            if (br.numBitsLeft() < 2) return ERROR_MALFORMED;              const int32_t frameLengthFlag __unused = br.getBits(1);              const int32_t dependsOnCoreCoder = br.getBits(1);              if (dependsOnCoreCoder ) { +                if (br.numBitsLeft() < 14) return ERROR_MALFORMED;                  const int32_t coreCoderDelay __unused = br.getBits(14);              } @@ -2892,7 +3117,7 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(                      extensionFlag = 1;                      break;                  default: -                    TRESPASS(); +                    return ERROR_MALFORMED;                      break;                  }                  ALOGW("csd missing extension flag; assuming %d for object type %u.", @@ -2902,6 +3127,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(              if (numChannels == 0) {                  int32_t channelsEffectiveNum = 0;                  int32_t channelsNum = 0; +                if (br.numBitsLeft() < 32) { +                    return ERROR_MALFORMED; +                }                  const int32_t ElementInstanceTag __unused = br.getBits(4);                  const int32_t Profile __unused = br.getBits(2);                  const int32_t SamplingFrequencyIndex __unused = br.getBits(4); @@ -2913,35 +3141,44 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(                  const int32_t NumValidCcElements __unused = br.getBits(4);                  const int32_t MonoMixdownPresent = br.getBits(1); +                  if (MonoMixdownPresent != 0) { +                    if (br.numBitsLeft() < 4) return ERROR_MALFORMED;                      const int32_t MonoMixdownElementNumber __unused = br.getBits(4);                  } +                if (br.numBitsLeft() < 1) return ERROR_MALFORMED;                  const int32_t StereoMixdownPresent = br.getBits(1);                  if (StereoMixdownPresent != 0) { +                    if (br.numBitsLeft() < 4) return ERROR_MALFORMED;                      const int32_t StereoMixdownElementNumber __unused = br.getBits(4);                  } +                if (br.numBitsLeft() < 1) return ERROR_MALFORMED;                  const int32_t MatrixMixdownIndexPresent = br.getBits(1);                  if (MatrixMixdownIndexPresent != 0) { +                    if (br.numBitsLeft() < 3) return ERROR_MALFORMED;                      const int32_t MatrixMixdownIndex __unused = br.getBits(2);                      const int32_t PseudoSurroundEnable __unused = br.getBits(1);                  }                  int i;                  for (i=0; i < NumFrontChannelElements; i++) { +                    if (br.numBitsLeft() < 5) return ERROR_MALFORMED;                      const int32_t FrontElementIsCpe = br.getBits(1);                      const int32_t FrontElementTagSelect __unused = br.getBits(4);                      channelsNum += FrontElementIsCpe ? 2 : 1;                  }                  for (i=0; i < NumSideChannelElements; i++) { +                    if (br.numBitsLeft() < 5) return ERROR_MALFORMED;                      const int32_t SideElementIsCpe = br.getBits(1);                      const int32_t SideElementTagSelect __unused = br.getBits(4);                      channelsNum += SideElementIsCpe ? 2 : 1;                  }                  for (i=0; i < NumBackChannelElements; i++) { +                    if (br.numBitsLeft() < 5) return ERROR_MALFORMED;                      const int32_t BackElementIsCpe = br.getBits(1);                      const int32_t BackElementTagSelect __unused = br.getBits(4);                      channelsNum += BackElementIsCpe ? 2 : 1; @@ -2949,6 +3186,7 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(                  channelsEffectiveNum = channelsNum;                  for (i=0; i < NumLfeChannelElements; i++) { +                    if (br.numBitsLeft() < 4) return ERROR_MALFORMED;                      const int32_t LfeElementTagSelect __unused = br.getBits(4);                      channelsNum += 1;                  } @@ -2963,6 +3201,9 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(          return ERROR_UNSUPPORTED;      } +    if (mLastTrack == NULL) +        return ERROR_MALFORMED; +      int32_t prevSampleRate;      CHECK(mLastTrack->meta->findInt32(kKeySampleRate, &prevSampleRate)); @@ -3099,16 +3340,24 @@ status_t MPEG4Source::start(MetaData *params) {          mWantsNALFragments = false;      } -    mGroup = new MediaBufferGroup; - -    int32_t max_size; -    CHECK(mFormat->findInt32(kKeyMaxInputSize, &max_size)); +    int32_t tmp; +    CHECK(mFormat->findInt32(kKeyMaxInputSize, &tmp)); +    size_t max_size = tmp; +    // A somewhat arbitrary limit that should be sufficient for 8k video frames +    // If you see the message below for a valid input stream: increase the limit +    if (max_size > 64 * 1024 * 1024) { +        ALOGE("bogus max input size: %zu", max_size); +        return ERROR_MALFORMED; +    } +    mGroup = new MediaBufferGroup;      mGroup->add_buffer(new MediaBuffer(max_size));      mSrcBuffer = new (std::nothrow) uint8_t[max_size];      if (mSrcBuffer == NULL) {          // file probably specified a bad max size +        delete mGroup; +        mGroup = NULL;          return ERROR_MALFORMED;      } @@ -3166,7 +3415,7 @@ status_t MPEG4Source::parseChunk(off64_t *offset) {      char chunk[5];      MakeFourCCString(chunk_type, chunk); -    ALOGV("MPEG4Source chunk %s @ %llx", chunk, *offset); +    ALOGV("MPEG4Source chunk %s @ %#llx", chunk, (long long)*offset);      off64_t chunk_data_size = *offset + chunk_size - data_offset; @@ -3646,7 +3895,7 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {          sampleCtsOffset = 0;      } -    if (size < (off64_t)sampleCount * bytesPerSample) { +    if (size < (off64_t)(sampleCount * bytesPerSample)) {          return -EINVAL;      } @@ -4438,7 +4687,7 @@ static bool BetterSniffMPEG4(          char chunkstring[5];          MakeFourCCString(chunkType, chunkstring); -        ALOGV("saw chunk type %s, size %" PRIu64 " @ %lld", chunkstring, chunkSize, offset); +        ALOGV("saw chunk type %s, size %" PRIu64 " @ %lld", chunkstring, chunkSize, (long long)offset);          switch (chunkType) {              case FOURCC('f', 't', 'y', 'p'):              { @@ -4501,7 +4750,7 @@ static bool BetterSniffMPEG4(          *meta = new AMessage;          (*meta)->setInt64("meta-data-size", moovAtomEndOffset); -        ALOGV("found metadata size: %lld", moovAtomEndOffset); +        ALOGV("found metadata size: %lld", (long long)moovAtomEndOffset);      }      return true; diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 844a019..47f114a 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -29,6 +29,7 @@  #include <utils/Log.h>  #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h>  #include <media/stagefright/MPEG4Writer.h>  #include <media/stagefright/MediaBuffer.h>  #include <media/stagefright/MetaData.h> @@ -62,6 +63,16 @@ static const uint8_t kNalUnitTypeSeqParamSet = 0x07;  static const uint8_t kNalUnitTypePicParamSet = 0x08;  static const int64_t kInitialDelayTimeUs     = 700000LL; +static const char kMetaKey_Version[]    = "com.android.version"; +#ifdef SHOW_MODEL_BUILD +static const char kMetaKey_Model[]      = "com.android.model"; +static const char kMetaKey_Build[]      = "com.android.build"; +#endif +static const char kMetaKey_CaptureFps[] = "com.android.capture.fps"; + +/* uncomment to include model and build in meta */ +//#define SHOW_MODEL_BUILD 1 +  class MPEG4Writer::Track {  public:      Track(MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId); @@ -83,6 +94,7 @@ public:      void addChunkOffset(off64_t offset);      int32_t getTrackId() const { return mTrackId; }      status_t dump(int fd, const Vector<String16>& args) const; +    static const char *getFourCCForMime(const char *mime);  private:      enum { @@ -101,6 +113,8 @@ private:              mCurrTableEntriesElement(NULL) {              CHECK_GT(mElementCapacity, 0);              CHECK_GT(mEntryCapacity, 0); +            // Ensure no integer overflow on allocation in add(). +            CHECK_LT(mEntryCapacity, UINT32_MAX / mElementCapacity);          }          // Free the allocated memory. @@ -345,31 +359,6 @@ private:      Track &operator=(const Track &);  }; -MPEG4Writer::MPEG4Writer(const char *filename) -    : mFd(-1), -      mInitCheck(NO_INIT), -      mIsRealTimeRecording(true), -      mUse4ByteNalLength(true), -      mUse32BitOffset(true), -      mIsFileSizeLimitExplicitlyRequested(false), -      mPaused(false), -      mStarted(false), -      mWriterThreadStarted(false), -      mOffset(0), -      mMdatOffset(0), -      mEstimatedMoovBoxSize(0), -      mInterleaveDurationUs(1000000), -      mLatitudex10000(0), -      mLongitudex10000(0), -      mAreGeoTagsAvailable(false), -      mStartTimeOffsetMs(-1) { - -    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); -    if (mFd >= 0) { -        mInitCheck = OK; -    } -} -  MPEG4Writer::MPEG4Writer(int fd)      : mFd(dup(fd)),        mInitCheck(mFd < 0? NO_INIT: OK), @@ -382,12 +371,29 @@ MPEG4Writer::MPEG4Writer(int fd)        mWriterThreadStarted(false),        mOffset(0),        mMdatOffset(0), +      mMoovBoxBuffer(NULL), +      mMoovBoxBufferOffset(0), +      mWriteMoovBoxToMemory(false), +      mFreeBoxOffset(0), +      mStreamableFile(false),        mEstimatedMoovBoxSize(0), +      mMoovExtraSize(0),        mInterleaveDurationUs(1000000), +      mTimeScale(-1), +      mStartTimestampUs(-1ll),        mLatitudex10000(0),        mLongitudex10000(0),        mAreGeoTagsAvailable(false), -      mStartTimeOffsetMs(-1) { +      mStartTimeOffsetMs(-1), +      mMetaKeys(new AMessage()) { +    addDeviceMeta(); + +    // Verify mFd is seekable +    off64_t off = lseek64(mFd, 0, SEEK_SET); +    if (off < 0) { +        ALOGE("cannot seek mFd: %s (%d)", strerror(errno), errno); +        release(); +    }  }  MPEG4Writer::~MPEG4Writer() { @@ -437,6 +443,33 @@ status_t MPEG4Writer::Track::dump(      return OK;  } +// static +const char *MPEG4Writer::Track::getFourCCForMime(const char *mime) { +    if (mime == NULL) { +        return NULL; +    } +    if (!strncasecmp(mime, "audio/", 6)) { +        if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) { +            return "samr"; +        } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) { +            return "sawb"; +        } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) { +            return "mp4a"; +        } +    } else if (!strncasecmp(mime, "video/", 6)) { +        if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { +            return "mp4v"; +        } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { +            return "s263"; +        } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { +            return "avc1"; +        } +    } else { +        ALOGE("Track (%s) other than video or audio is not supported", mime); +    } +    return NULL; +} +  status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {      Mutex::Autolock l(mLock);      if (mStarted) { @@ -452,14 +485,11 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {      CHECK(source.get() != NULL); -    // A track of type other than video or audio is not supported.      const char *mime;      source->getFormat()->findCString(kKeyMIMEType, &mime);      bool isAudio = !strncasecmp(mime, "audio/", 6); -    bool isVideo = !strncasecmp(mime, "video/", 6); -    if (!isAudio && !isVideo) { -        ALOGE("Track (%s) other than video or audio is not supported", -            mime); +    if (Track::getFourCCForMime(mime) == NULL) { +        ALOGE("Unsupported mime '%s'", mime);          return ERROR_UNSUPPORTED;      } @@ -507,6 +537,34 @@ status_t MPEG4Writer::startTracks(MetaData *params) {      return OK;  } +void MPEG4Writer::addDeviceMeta() { +    // add device info and estimate space in 'moov' +    char val[PROPERTY_VALUE_MAX]; +    size_t n; +    // meta size is estimated by adding up the following: +    // - meta header structures, which occur only once (total 66 bytes) +    // - size for each key, which consists of a fixed header (32 bytes), +    //   plus key length and data length. +    mMoovExtraSize += 66; +    if (property_get("ro.build.version.release", val, NULL) +            && (n = strlen(val)) > 0) { +        mMetaKeys->setString(kMetaKey_Version, val, n + 1); +        mMoovExtraSize += sizeof(kMetaKey_Version) + n + 32; +    } +#ifdef SHOW_MODEL_BUILD +    if (property_get("ro.product.model", val, NULL) +            && (n = strlen(val)) > 0) { +        mMetaKeys->setString(kMetaKey_Model, val, n + 1); +        mMoovExtraSize += sizeof(kMetaKey_Model) + n + 32; +    } +    if (property_get("ro.build.display.id", val, NULL) +            && (n = strlen(val)) > 0) { +        mMetaKeys->setString(kMetaKey_Build, val, n + 1); +        mMoovExtraSize += sizeof(kMetaKey_Build) + n + 32; +    } +#endif +} +  int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {      // This implementation is highly experimental/heurisitic.      // @@ -560,6 +618,9 @@ int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {          size = MAX_MOOV_BOX_SIZE;      } +    // Account for the extra stuff (Geo, meta keys, etc.) +    size += mMoovExtraSize; +      ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the"           " estimated moov size %" PRId64 " bytes",           mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size); @@ -973,6 +1034,7 @@ void MPEG4Writer::writeMoovBox(int64_t durationUs) {      if (mAreGeoTagsAvailable) {          writeUdtaBox();      } +    writeMetaBox();      int32_t id = 1;      for (List<Track *>::iterator it = mTracks.begin();          it != mTracks.end(); ++it, ++id) { @@ -1142,6 +1204,14 @@ size_t MPEG4Writer::write(      return bytes;  } +void MPEG4Writer::beginBox(uint32_t id) { +    mBoxes.push_back(mWriteMoovBoxToMemory? +            mMoovBoxBufferOffset: mOffset); + +    writeInt32(0); +    writeInt32(id); +} +  void MPEG4Writer::beginBox(const char *fourcc) {      CHECK_EQ(strlen(fourcc), 4); @@ -1266,6 +1336,18 @@ status_t MPEG4Writer::setGeoData(int latitudex10000, int longitudex10000) {      mLatitudex10000 = latitudex10000;      mLongitudex10000 = longitudex10000;      mAreGeoTagsAvailable = true; +    mMoovExtraSize += 30; +    return OK; +} + +status_t MPEG4Writer::setCaptureRate(float captureFps) { +    if (captureFps <= 0.0f) { +        return BAD_VALUE; +    } + +    mMetaKeys->setFloat(kMetaKey_CaptureFps, captureFps); +    mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32; +      return OK;  } @@ -2689,17 +2771,13 @@ void MPEG4Writer::Track::writeVideoFourCCBox() {      const char *mime;      bool success = mMeta->findCString(kKeyMIMEType, &mime);      CHECK(success); -    if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { -        mOwner->beginBox("mp4v"); -    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { -        mOwner->beginBox("s263"); -    } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { -        mOwner->beginBox("avc1"); -    } else { +    const char *fourcc = getFourCCForMime(mime); +    if (fourcc == NULL) {          ALOGE("Unknown mime type '%s'.", mime);          CHECK(!"should not be here, unknown mime type.");      } +    mOwner->beginBox(fourcc);        // video format      mOwner->writeInt32(0);           // reserved      mOwner->writeInt16(0);           // reserved      mOwner->writeInt16(1);           // data ref index @@ -2743,14 +2821,8 @@ void MPEG4Writer::Track::writeAudioFourCCBox() {      const char *mime;      bool success = mMeta->findCString(kKeyMIMEType, &mime);      CHECK(success); -    const char *fourcc = NULL; -    if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mime)) { -        fourcc = "samr"; -    } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mime)) { -        fourcc = "sawb"; -    } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mime)) { -        fourcc = "mp4a"; -    } else { +    const char *fourcc = getFourCCForMime(mime); +    if (fourcc == NULL) {          ALOGE("Unknown mime type '%s'.", mime);          CHECK(!"should not be here, unknown mime type.");      } @@ -3099,6 +3171,103 @@ void MPEG4Writer::writeUdtaBox() {      endBox();  } +void MPEG4Writer::writeHdlr() { +    beginBox("hdlr"); +    writeInt32(0); // Version, Flags +    writeInt32(0); // Predefined +    writeFourcc("mdta"); +    writeInt32(0); // Reserved[0] +    writeInt32(0); // Reserved[1] +    writeInt32(0); // Reserved[2] +    writeInt8(0);  // Name (empty) +    endBox(); +} + +void MPEG4Writer::writeKeys() { +    size_t count = mMetaKeys->countEntries(); + +    beginBox("keys"); +    writeInt32(0);     // Version, Flags +    writeInt32(count); // Entry_count +    for (size_t i = 0; i < count; i++) { +        AMessage::Type type; +        const char *key = mMetaKeys->getEntryNameAt(i, &type); +        size_t n = strlen(key); +        writeInt32(n + 8); +        writeFourcc("mdta"); +        write(key, n); // write without the \0 +    } +    endBox(); +} + +void MPEG4Writer::writeIlst() { +    size_t count = mMetaKeys->countEntries(); + +    beginBox("ilst"); +    for (size_t i = 0; i < count; i++) { +        beginBox(i + 1); // key id (1-based) +        beginBox("data"); +        AMessage::Type type; +        const char *key = mMetaKeys->getEntryNameAt(i, &type); +        switch (type) { +            case AMessage::kTypeString: +            { +                AString val; +                CHECK(mMetaKeys->findString(key, &val)); +                writeInt32(1); // type = UTF8 +                writeInt32(0); // default country/language +                write(val.c_str(), strlen(val.c_str())); // write without \0 +                break; +            } + +            case AMessage::kTypeFloat: +            { +                float val; +                CHECK(mMetaKeys->findFloat(key, &val)); +                writeInt32(23); // type = float32 +                writeInt32(0);  // default country/language +                writeInt32(*reinterpret_cast<int32_t *>(&val)); +                break; +            } + +            case AMessage::kTypeInt32: +            { +                int32_t val; +                CHECK(mMetaKeys->findInt32(key, &val)); +                writeInt32(67); // type = signed int32 +                writeInt32(0);  // default country/language +                writeInt32(val); +                break; +            } + +            default: +            { +                ALOGW("Unsupported key type, writing 0 instead"); +                writeInt32(77); // type = unsigned int32 +                writeInt32(0);  // default country/language +                writeInt32(0); +                break; +            } +        } +        endBox(); // data +        endBox(); // key id +    } +    endBox(); // ilst +} + +void MPEG4Writer::writeMetaBox() { +    size_t count = mMetaKeys->countEntries(); +    if (count == 0) { +        return; +    } + +    beginBox("meta"); +    writeHdlr(); +    writeKeys(); +    writeIlst(); +    endBox(); +} +  /*   * Geodata is stored according to ISO-6709 standard.   */ diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp new file mode 100644 index 0000000..2641e4e --- /dev/null +++ b/media/libstagefright/MediaClock.cpp @@ -0,0 +1,153 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaClock" +#include <utils/Log.h> + +#include <media/stagefright/MediaClock.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> + +namespace android { + +MediaClock::MediaClock() +    : mAnchorTimeMediaUs(-1), +      mAnchorTimeRealUs(-1), +      mMaxTimeMediaUs(INT64_MAX), +      mStartingTimeMediaUs(-1), +      mPlaybackRate(1.0) { +} + +MediaClock::~MediaClock() { +} + +void MediaClock::setStartingTimeMedia(int64_t startingTimeMediaUs) { +    Mutex::Autolock autoLock(mLock); +    mStartingTimeMediaUs = startingTimeMediaUs; +} + +void MediaClock::clearAnchor() { +    Mutex::Autolock autoLock(mLock); +    mAnchorTimeMediaUs = -1; +    mAnchorTimeRealUs = -1; +} + +void MediaClock::updateAnchor( +        int64_t anchorTimeMediaUs, +        int64_t anchorTimeRealUs, +        int64_t maxTimeMediaUs) { +    if (anchorTimeMediaUs < 0 || anchorTimeRealUs < 0) { +        ALOGW("reject anchor time since it is negative."); +        return; +    } + +    Mutex::Autolock autoLock(mLock); +    int64_t nowUs = ALooper::GetNowUs(); +    int64_t nowMediaUs = +        anchorTimeMediaUs + (nowUs - anchorTimeRealUs) * (double)mPlaybackRate; +    if (nowMediaUs < 0) { +        ALOGW("reject anchor time since it leads to negative media time."); +        return; +    } +    mAnchorTimeRealUs = nowUs; +    mAnchorTimeMediaUs = nowMediaUs; +    mMaxTimeMediaUs = maxTimeMediaUs; +} + +void MediaClock::updateMaxTimeMedia(int64_t maxTimeMediaUs) { +    Mutex::Autolock autoLock(mLock); +    mMaxTimeMediaUs = maxTimeMediaUs; +} + +void MediaClock::setPlaybackRate(float rate) { +    CHECK_GE(rate, 0.0); +    Mutex::Autolock autoLock(mLock); +    if (mAnchorTimeRealUs == -1) { +        mPlaybackRate = rate; +        return; +    } + +    int64_t nowUs = ALooper::GetNowUs(); +    mAnchorTimeMediaUs += (nowUs - mAnchorTimeRealUs) * (double)mPlaybackRate; +    if (mAnchorTimeMediaUs < 0) { +        ALOGW("setRate: anchor time should not be negative, set to 0."); +        mAnchorTimeMediaUs = 0; +    } +    mAnchorTimeRealUs = nowUs; +    mPlaybackRate = rate; +} + +float MediaClock::getPlaybackRate() const { +    Mutex::Autolock autoLock(mLock); +    return mPlaybackRate; +} + +status_t MediaClock::getMediaTime( +        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const { +    if (outMediaUs == NULL) { +        return BAD_VALUE; +    } + +    Mutex::Autolock autoLock(mLock); +    return getMediaTime_l(realUs, outMediaUs, allowPastMaxTime); +} + +status_t MediaClock::getMediaTime_l( +        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const { +    if (mAnchorTimeRealUs == -1) { +        return NO_INIT; +    } + +    int64_t mediaUs = mAnchorTimeMediaUs +            + (realUs - mAnchorTimeRealUs) * (double)mPlaybackRate; +    if (mediaUs > mMaxTimeMediaUs && !allowPastMaxTime) { +        mediaUs = mMaxTimeMediaUs; +    } +    if (mediaUs < mStartingTimeMediaUs) { +        mediaUs = mStartingTimeMediaUs; +    } +    if (mediaUs < 0) { +        mediaUs = 0; +    } +    *outMediaUs = mediaUs; +    return OK; +} + +status_t MediaClock::getRealTimeFor( +        int64_t targetMediaUs, int64_t *outRealUs) const { +    if (outRealUs == NULL) { +        return BAD_VALUE; +    } + +    Mutex::Autolock autoLock(mLock); +    if (mPlaybackRate == 0.0) { +        return NO_INIT; +    } + +    int64_t nowUs = ALooper::GetNowUs(); +    int64_t nowMediaUs; +    status_t status = +            getMediaTime_l(nowUs, &nowMediaUs, true /* allowPastMaxTime */); +    if (status != OK) { +        return status; +    } +    *outRealUs = (targetMediaUs - nowMediaUs) / (double)mPlaybackRate + nowUs; +    return OK; +} + +}  // namespace android diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 6ca123a..cd59709 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -21,10 +21,15 @@  #include "include/avc_utils.h"  #include "include/SoftwareRenderer.h" -#include <binder/IBatteryStats.h> +#include <binder/IMemory.h> +#include <binder/IPCThreadState.h>  #include <binder/IServiceManager.h> +#include <binder/MemoryDealer.h> +#include <gui/BufferQueue.h>  #include <gui/Surface.h>  #include <media/ICrypto.h> +#include <media/IOMX.h> +#include <media/IResourceManagerService.h>  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AMessage.h> @@ -36,85 +41,131 @@  #include <media/stagefright/MediaCodecList.h>  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaFilter.h>  #include <media/stagefright/MetaData.h> -#include <media/stagefright/NativeWindowWrapper.h> +#include <media/stagefright/OMXClient.h> +#include <media/stagefright/OMXCodec.h> +#include <media/stagefright/PersistentSurface.h> +#include <media/stagefright/SurfaceUtils.h> +#include <mediautils/BatteryNotifier.h>  #include <private/android_filesystem_config.h>  #include <utils/Log.h>  #include <utils/Singleton.h>  namespace android { -struct MediaCodec::BatteryNotifier : public Singleton<BatteryNotifier> { -    BatteryNotifier(); +static int64_t getId(sp<IResourceManagerClient> client) { +    return (int64_t) client.get(); +} -    void noteStartVideo(); -    void noteStopVideo(); -    void noteStartAudio(); -    void noteStopAudio(); +static bool isResourceError(status_t err) { +    return (err == NO_MEMORY); +} -private: -    int32_t mVideoRefCount; -    int32_t mAudioRefCount; -    sp<IBatteryStats> mBatteryStatService; -}; +static const int kMaxRetry = 2; + +struct ResourceManagerClient : public BnResourceManagerClient { +    ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {} + +    virtual bool reclaimResource() { +        sp<MediaCodec> codec = mMediaCodec.promote(); +        if (codec == NULL) { +            // codec is already gone. +            return true; +        } +        status_t err = codec->reclaim(); +        if (err != OK) { +            ALOGW("ResourceManagerClient failed to release codec with err %d", err); +        } +        return (err == OK); +    } -ANDROID_SINGLETON_STATIC_INSTANCE(MediaCodec::BatteryNotifier) +    virtual String8 getName() { +        String8 ret; +        sp<MediaCodec> codec = mMediaCodec.promote(); +        if (codec == NULL) { +            // codec is already gone. +            return ret; +        } -MediaCodec::BatteryNotifier::BatteryNotifier() : -    mVideoRefCount(0), -    mAudioRefCount(0) { -    // get battery service -    const sp<IServiceManager> sm(defaultServiceManager()); -    if (sm != NULL) { -        const String16 name("batterystats"); -        mBatteryStatService = interface_cast<IBatteryStats>(sm->getService(name)); -        if (mBatteryStatService == NULL) { -            ALOGE("batterystats service unavailable!"); +        AString name; +        if (codec->getName(&name) == OK) { +            ret.setTo(name.c_str());          } +        return ret; +    } + +protected: +    virtual ~ResourceManagerClient() {} + +private: +    wp<MediaCodec> mMediaCodec; + +    DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient); +}; + +MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(pid_t pid) +        : mPid(pid) { +    if (mPid == MediaCodec::kNoPid) { +        mPid = IPCThreadState::self()->getCallingPid();      }  } -void MediaCodec::BatteryNotifier::noteStartVideo() { -    if (mVideoRefCount == 0 && mBatteryStatService != NULL) { -        mBatteryStatService->noteStartVideo(AID_MEDIA); +MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() { +    if (mService != NULL) { +        IInterface::asBinder(mService)->unlinkToDeath(this);      } -    mVideoRefCount++;  } -void MediaCodec::BatteryNotifier::noteStopVideo() { -    if (mVideoRefCount == 0) { -        ALOGW("BatteryNotifier::noteStop(): video refcount is broken!"); +void MediaCodec::ResourceManagerServiceProxy::init() { +    sp<IServiceManager> sm = defaultServiceManager(); +    sp<IBinder> binder = sm->getService(String16("media.resource_manager")); +    mService = interface_cast<IResourceManagerService>(binder); +    if (mService == NULL) { +        ALOGE("Failed to get ResourceManagerService");          return;      } +    IInterface::asBinder(mService)->linkToDeath(this); +} -    mVideoRefCount--; -    if (mVideoRefCount == 0 && mBatteryStatService != NULL) { -        mBatteryStatService->noteStopVideo(AID_MEDIA); -    } +void MediaCodec::ResourceManagerServiceProxy::binderDied(const wp<IBinder>& /*who*/) { +    ALOGW("ResourceManagerService died."); +    Mutex::Autolock _l(mLock); +    mService.clear();  } -void MediaCodec::BatteryNotifier::noteStartAudio() { -    if (mAudioRefCount == 0 && mBatteryStatService != NULL) { -        mBatteryStatService->noteStartAudio(AID_MEDIA); +void MediaCodec::ResourceManagerServiceProxy::addResource( +        int64_t clientId, +        const sp<IResourceManagerClient> client, +        const Vector<MediaResource> &resources) { +    Mutex::Autolock _l(mLock); +    if (mService == NULL) { +        return;      } -    mAudioRefCount++; +    mService->addResource(mPid, clientId, client, resources);  } -void MediaCodec::BatteryNotifier::noteStopAudio() { -    if (mAudioRefCount == 0) { -        ALOGW("BatteryNotifier::noteStop(): audio refcount is broken!"); +void MediaCodec::ResourceManagerServiceProxy::removeResource(int64_t clientId) { +    Mutex::Autolock _l(mLock); +    if (mService == NULL) {          return;      } +    mService->removeResource(mPid, clientId); +} -    mAudioRefCount--; -    if (mAudioRefCount == 0 && mBatteryStatService != NULL) { -        mBatteryStatService->noteStopAudio(AID_MEDIA); +bool MediaCodec::ResourceManagerServiceProxy::reclaimResource( +        const Vector<MediaResource> &resources) { +    Mutex::Autolock _l(mLock); +    if (mService == NULL) { +        return false;      } +    return mService->reclaimResource(mPid, resources);  } +  // static  sp<MediaCodec> MediaCodec::CreateByType( -        const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err) { -    sp<MediaCodec> codec = new MediaCodec(looper); +        const sp<ALooper> &looper, const char *mime, bool encoder, status_t *err, pid_t pid) { +    sp<MediaCodec> codec = new MediaCodec(looper, pid);      const status_t ret = codec->init(mime, true /* nameIsType */, encoder);      if (err != NULL) { @@ -125,8 +176,8 @@ sp<MediaCodec> MediaCodec::CreateByType(  // static  sp<MediaCodec> MediaCodec::CreateByComponentName( -        const sp<ALooper> &looper, const char *name, status_t *err) { -    sp<MediaCodec> codec = new MediaCodec(looper); +        const sp<ALooper> &looper, const char *name, status_t *err, pid_t pid) { +    sp<MediaCodec> codec = new MediaCodec(looper, pid);      const status_t ret = codec->init(name, false /* nameIsType */, false /* encoder */);      if (err != NULL) { @@ -135,25 +186,82 @@ sp<MediaCodec> MediaCodec::CreateByComponentName(      return ret == OK ? codec : NULL; // NULL deallocates codec.  } -MediaCodec::MediaCodec(const sp<ALooper> &looper) +// static +sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() { +    OMXClient client; +    CHECK_EQ(client.connect(), (status_t)OK); +    sp<IOMX> omx = client.interface(); + +    const sp<IMediaCodecList> mediaCodecList = MediaCodecList::getInstance(); +    if (mediaCodecList == NULL) { +        ALOGE("Failed to obtain MediaCodecList!"); +        return NULL; // if called from Java should raise IOException +    } + +    AString tmp; +    sp<AMessage> globalSettings = mediaCodecList->getGlobalSettings(); +    if (globalSettings == NULL || !globalSettings->findString( +            kMaxEncoderInputBuffers, &tmp)) { +        ALOGE("Failed to get encoder input buffer count!"); +        return NULL; +    } + +    int32_t bufferCount = strtol(tmp.c_str(), NULL, 10); +    if (bufferCount <= 0 +            || bufferCount > BufferQueue::MAX_MAX_ACQUIRED_BUFFERS) { +        ALOGE("Encoder input buffer count is invalid!"); +        return NULL; +    } + +    sp<IGraphicBufferProducer> bufferProducer; +    sp<IGraphicBufferConsumer> bufferConsumer; + +    status_t err = omx->createPersistentInputSurface( +            &bufferProducer, &bufferConsumer); + +    if (err != OK) { +        ALOGE("Failed to create persistent input surface."); +        return NULL; +    } + +    err = bufferConsumer->setMaxAcquiredBufferCount(bufferCount); + +    if (err != NO_ERROR) { +        ALOGE("Unable to set BQ max acquired buffer count to %u: %d", +                bufferCount, err); +        return NULL; +    } + +    return new PersistentSurface(bufferProducer, bufferConsumer); +} + +MediaCodec::MediaCodec(const sp<ALooper> &looper, pid_t pid)      : mState(UNINITIALIZED), +      mReleasedByResourceManager(false),        mLooper(looper),        mCodec(NULL),        mReplyID(0),        mFlags(0),        mStickyError(OK),        mSoftRenderer(NULL), +      mResourceManagerClient(new ResourceManagerClient(this)), +      mResourceManagerService(new ResourceManagerServiceProxy(pid)),        mBatteryStatNotified(false),        mIsVideo(false), +      mVideoWidth(0), +      mVideoHeight(0), +      mRotationDegrees(0),        mDequeueInputTimeoutGeneration(0),        mDequeueInputReplyID(0),        mDequeueOutputTimeoutGeneration(0),        mDequeueOutputReplyID(0), -      mHaveInputSurface(false) { +      mHaveInputSurface(false), +      mHavePendingInputBuffers(false) {  }  MediaCodec::~MediaCodec() {      CHECK_EQ(mState, UNINITIALIZED); +    mResourceManagerService->removeResource(getId(mResourceManagerClient));  }  // static @@ -172,14 +280,21 @@ status_t MediaCodec::PostAndAwaitResponse(      return err;  } -// static -void MediaCodec::PostReplyWithError(int32_t replyID, int32_t err) { +void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) { +    int32_t finalErr = err; +    if (mReleasedByResourceManager) { +        // override the err code if MediaCodec has been released by ResourceManager. +        finalErr = DEAD_OBJECT; +    } +      sp<AMessage> response = new AMessage; -    response->setInt32("err", err); +    response->setInt32("err", finalErr);      response->postReply(replyID);  }  status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) { +    mResourceManagerService->init(); +      // save init parameters for reset      mInitName = name;      mInitNameIsType = nameIsType; @@ -189,16 +304,30 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {      // quickly, violating the OpenMAX specs, until that is remedied      // we need to invest in an extra looper to free the main event      // queue. -    mCodec = new ACodec; -    bool needDedicatedLooper = false; + +    if (nameIsType || !strncasecmp(name.c_str(), "omx.", 4)) { +        mCodec = new ACodec; +    } else if (!nameIsType +            && !strncasecmp(name.c_str(), "android.filter.", 15)) { +        mCodec = new MediaFilter; +    } else { +        return NAME_NOT_FOUND; +    } + +    bool secureCodec = false;      if (nameIsType && !strncasecmp(name.c_str(), "video/", 6)) { -        needDedicatedLooper = true; +        mIsVideo = true;      } else {          AString tmp = name;          if (tmp.endsWith(".secure")) { +            secureCodec = true;              tmp.erase(tmp.size() - 7, 7);          }          const sp<IMediaCodecList> mcl = MediaCodecList::getInstance(); +        if (mcl == NULL) { +            mCodec = NULL;  // remove the codec. +            return NO_INIT; // if called from Java should raise IOException +        }          ssize_t codecIdx = mcl->findCodecByName(tmp.c_str());          if (codecIdx >= 0) {              const sp<MediaCodecInfo> info = mcl->getCodecInfo(codecIdx); @@ -206,14 +335,15 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {              info->getSupportedMimes(&mimes);              for (size_t i = 0; i < mimes.size(); i++) {                  if (mimes[i].startsWith("video/")) { -                    needDedicatedLooper = true; +                    mIsVideo = true;                      break;                  }              }          }      } -    if (needDedicatedLooper) { +    if (mIsVideo) { +        // video codec needs dedicated looper          if (mCodecLooper == NULL) {              mCodecLooper = new ALooper;              mCodecLooper->setName("CodecLooper"); @@ -227,9 +357,9 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {      mLooper->registerHandler(this); -    mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, id())); +    mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, this)); -    sp<AMessage> msg = new AMessage(kWhatInit, id()); +    sp<AMessage> msg = new AMessage(kWhatInit, this);      msg->setString("name", name);      msg->setInt32("nameIsType", nameIsType); @@ -237,58 +367,124 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {          msg->setInt32("encoder", encoder);      } -    sp<AMessage> response; -    return PostAndAwaitResponse(msg, &response); +    status_t err; +    Vector<MediaResource> resources; +    const char *type = secureCodec ? kResourceSecureCodec : kResourceNonSecureCodec; +    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec; +    resources.push_back(MediaResource(String8(type), String8(subtype), 1)); +    for (int i = 0; i <= kMaxRetry; ++i) { +        if (i > 0) { +            // Don't try to reclaim resource for the first time. +            if (!mResourceManagerService->reclaimResource(resources)) { +                break; +            } +        } + +        sp<AMessage> response; +        err = PostAndAwaitResponse(msg, &response); +        if (!isResourceError(err)) { +            break; +        } +    } +    return err;  }  status_t MediaCodec::setCallback(const sp<AMessage> &callback) { -    sp<AMessage> msg = new AMessage(kWhatSetCallback, id()); +    sp<AMessage> msg = new AMessage(kWhatSetCallback, this);      msg->setMessage("callback", callback);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  } +status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) { +    sp<AMessage> msg = new AMessage(kWhatSetNotification, this); +    msg->setMessage("on-frame-rendered", notify); +    return msg->post(); +} +  status_t MediaCodec::configure(          const sp<AMessage> &format, -        const sp<Surface> &nativeWindow, +        const sp<Surface> &surface,          const sp<ICrypto> &crypto,          uint32_t flags) { -    sp<AMessage> msg = new AMessage(kWhatConfigure, id()); +    sp<AMessage> msg = new AMessage(kWhatConfigure, this); + +    if (mIsVideo) { +        format->findInt32("width", &mVideoWidth); +        format->findInt32("height", &mVideoHeight); +        if (!format->findInt32("rotation-degrees", &mRotationDegrees)) { +            mRotationDegrees = 0; +        } +    }      msg->setMessage("format", format);      msg->setInt32("flags", flags); - -    if (nativeWindow != NULL) { -        msg->setObject( -                "native-window", -                new NativeWindowWrapper(nativeWindow)); -    } +    msg->setObject("surface", surface);      if (crypto != NULL) {          msg->setPointer("crypto", crypto.get());      } -    sp<AMessage> response; -    status_t err = PostAndAwaitResponse(msg, &response); +    // save msg for reset +    mConfigureMsg = msg; -    if (err != OK && err != INVALID_OPERATION) { -        // MediaCodec now set state to UNINITIALIZED upon any fatal error. -        // To maintain backward-compatibility, do a reset() to put codec -        // back into INITIALIZED state. -        // But don't reset if the err is INVALID_OPERATION, which means -        // the configure failure is due to wrong state. +    status_t err; +    Vector<MediaResource> resources; +    const char *type = (mFlags & kFlagIsSecure) ? +            kResourceSecureCodec : kResourceNonSecureCodec; +    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec; +    resources.push_back(MediaResource(String8(type), String8(subtype), 1)); +    // Don't know the buffer size at this point, but it's fine to use 1 because +    // the reclaimResource call doesn't consider the requester's buffer size for now. +    resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1)); +    for (int i = 0; i <= kMaxRetry; ++i) { +        if (i > 0) { +            // Don't try to reclaim resource for the first time. +            if (!mResourceManagerService->reclaimResource(resources)) { +                break; +            } +        } -        ALOGE("configure failed with err 0x%08x, resetting...", err); -        reset(); +        sp<AMessage> response; +        err = PostAndAwaitResponse(msg, &response); +        if (err != OK && err != INVALID_OPERATION) { +            // MediaCodec now set state to UNINITIALIZED upon any fatal error. +            // To maintain backward-compatibility, do a reset() to put codec +            // back into INITIALIZED state. +            // But don't reset if the err is INVALID_OPERATION, which means +            // the configure failure is due to wrong state. + +            ALOGE("configure failed with err 0x%08x, resetting...", err); +            reset(); +        } +        if (!isResourceError(err)) { +            break; +        }      } -      return err;  } +status_t MediaCodec::setInputSurface( +        const sp<PersistentSurface> &surface) { +    sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); +    msg->setObject("input-surface", surface.get()); + +    sp<AMessage> response; +    return PostAndAwaitResponse(msg, &response); +} + +status_t MediaCodec::setSurface(const sp<Surface> &surface) { +    sp<AMessage> msg = new AMessage(kWhatSetSurface, this); +    msg->setObject("surface", surface); + +    sp<AMessage> response; +    return PostAndAwaitResponse(msg, &response); +} +  status_t MediaCodec::createInputSurface(          sp<IGraphicBufferProducer>* bufferProducer) { -    sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, id()); +    sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);      sp<AMessage> response;      status_t err = PostAndAwaitResponse(msg, &response); @@ -306,22 +502,85 @@ status_t MediaCodec::createInputSurface(      return err;  } +uint64_t MediaCodec::getGraphicBufferSize() { +    if (!mIsVideo) { +        return 0; +    } + +    uint64_t size = 0; +    size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]); +    for (size_t i = 0; i < portNum; ++i) { +        // TODO: this is just an estimation, we should get the real buffer size from ACodec. +        size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2; +    } +    return size; +} + +void MediaCodec::addResource(const String8 &type, const String8 &subtype, uint64_t value) { +    Vector<MediaResource> resources; +    resources.push_back(MediaResource(type, subtype, value)); +    mResourceManagerService->addResource( +            getId(mResourceManagerClient), mResourceManagerClient, resources); +} +  status_t MediaCodec::start() { -    sp<AMessage> msg = new AMessage(kWhatStart, id()); +    sp<AMessage> msg = new AMessage(kWhatStart, this); + +    status_t err; +    Vector<MediaResource> resources; +    const char *type = (mFlags & kFlagIsSecure) ? +            kResourceSecureCodec : kResourceNonSecureCodec; +    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec; +    resources.push_back(MediaResource(String8(type), String8(subtype), 1)); +    // Don't know the buffer size at this point, but it's fine to use 1 because +    // the reclaimResource call doesn't consider the requester's buffer size for now. +    resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1)); +    for (int i = 0; i <= kMaxRetry; ++i) { +        if (i > 0) { +            // Don't try to reclaim resource for the first time. +            if (!mResourceManagerService->reclaimResource(resources)) { +                break; +            } +            // Recover codec from previous error before retry start. +            err = reset(); +            if (err != OK) { +                ALOGE("retrying start: failed to reset codec"); +                break; +            } +            sp<AMessage> response; +            err = PostAndAwaitResponse(mConfigureMsg, &response); +            if (err != OK) { +                ALOGE("retrying start: failed to configure codec"); +                break; +            } +        } + +        sp<AMessage> response; +        err = PostAndAwaitResponse(msg, &response); +        if (!isResourceError(err)) { +            break; +        } +    } +    return err; +} + +status_t MediaCodec::stop() { +    sp<AMessage> msg = new AMessage(kWhatStop, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  } -status_t MediaCodec::stop() { -    sp<AMessage> msg = new AMessage(kWhatStop, id()); +status_t MediaCodec::reclaim() { +    sp<AMessage> msg = new AMessage(kWhatRelease, this); +    msg->setInt32("reclaimed", 1);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  }  status_t MediaCodec::release() { -    sp<AMessage> msg = new AMessage(kWhatRelease, id()); +    sp<AMessage> msg = new AMessage(kWhatRelease, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response); @@ -373,7 +632,7 @@ status_t MediaCodec::queueInputBuffer(          errorDetailMsg->clear();      } -    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);      msg->setSize("index", index);      msg->setSize("offset", offset);      msg->setSize("size", size); @@ -400,7 +659,7 @@ status_t MediaCodec::queueSecureInputBuffer(          errorDetailMsg->clear();      } -    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);      msg->setSize("index", index);      msg->setSize("offset", offset);      msg->setPointer("subSamples", (void *)subSamples); @@ -419,7 +678,7 @@ status_t MediaCodec::queueSecureInputBuffer(  }  status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) { -    sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);      msg->setInt64("timeoutUs", timeoutUs);      sp<AMessage> response; @@ -440,7 +699,7 @@ status_t MediaCodec::dequeueOutputBuffer(          int64_t *presentationTimeUs,          uint32_t *flags,          int64_t timeoutUs) { -    sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);      msg->setInt64("timeoutUs", timeoutUs);      sp<AMessage> response; @@ -459,7 +718,7 @@ status_t MediaCodec::dequeueOutputBuffer(  }  status_t MediaCodec::renderOutputBufferAndRelease(size_t index) { -    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);      msg->setSize("index", index);      msg->setInt32("render", true); @@ -468,7 +727,7 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {  }  status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) { -    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);      msg->setSize("index", index);      msg->setInt32("render", true);      msg->setInt64("timestampNs", timestampNs); @@ -478,7 +737,7 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestam  }  status_t MediaCodec::releaseOutputBuffer(size_t index) { -    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);      msg->setSize("index", index);      sp<AMessage> response; @@ -486,14 +745,14 @@ status_t MediaCodec::releaseOutputBuffer(size_t index) {  }  status_t MediaCodec::signalEndOfInputStream() { -    sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, id()); +    sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  }  status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const { -    sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id()); +    sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);      sp<AMessage> response;      status_t err; @@ -507,7 +766,7 @@ status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {  }  status_t MediaCodec::getInputFormat(sp<AMessage> *format) const { -    sp<AMessage> msg = new AMessage(kWhatGetInputFormat, id()); +    sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);      sp<AMessage> response;      status_t err; @@ -521,7 +780,7 @@ status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {  }  status_t MediaCodec::getName(AString *name) const { -    sp<AMessage> msg = new AMessage(kWhatGetName, id()); +    sp<AMessage> msg = new AMessage(kWhatGetName, this);      sp<AMessage> response;      status_t err; @@ -534,8 +793,18 @@ status_t MediaCodec::getName(AString *name) const {      return OK;  } +status_t MediaCodec::getWidevineLegacyBuffers(Vector<sp<ABuffer> > *buffers) const { +    sp<AMessage> msg = new AMessage(kWhatGetBuffers, this); +    msg->setInt32("portIndex", kPortIndexInput); +    msg->setPointer("buffers", buffers); +    msg->setInt32("widevine", true); + +    sp<AMessage> response; +    return PostAndAwaitResponse(msg, &response); +} +  status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const { -    sp<AMessage> msg = new AMessage(kWhatGetBuffers, id()); +    sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);      msg->setInt32("portIndex", kPortIndexInput);      msg->setPointer("buffers", buffers); @@ -544,7 +813,7 @@ status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const {  }  status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const { -    sp<AMessage> msg = new AMessage(kWhatGetBuffers, id()); +    sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);      msg->setInt32("portIndex", kPortIndexOutput);      msg->setPointer("buffers", buffers); @@ -576,6 +845,10 @@ status_t MediaCodec::getBufferAndFormat(          sp<ABuffer> *buffer, sp<AMessage> *format) {      // use mutex instead of a context switch +    if (mReleasedByResourceManager) { +        return DEAD_OBJECT; +    } +      buffer->clear();      format->clear();      if (!isExecuting()) { @@ -602,20 +875,20 @@ status_t MediaCodec::getBufferAndFormat(  }  status_t MediaCodec::flush() { -    sp<AMessage> msg = new AMessage(kWhatFlush, id()); +    sp<AMessage> msg = new AMessage(kWhatFlush, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  }  status_t MediaCodec::requestIDRFrame() { -    (new AMessage(kWhatRequestIDRFrame, id()))->post(); +    (new AMessage(kWhatRequestIDRFrame, this))->post();      return OK;  }  void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) { -    sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, id()); +    sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);      msg->setMessage("notify", notify);      msg->post();  } @@ -640,7 +913,7 @@ void MediaCodec::cancelPendingDequeueOperations() {      }  } -bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) { +bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {      if (!isExecuting() || (mFlags & kFlagIsAsync)              || (newRequest && (mFlags & kFlagDequeueInputPending))) {          PostReplyWithError(replyID, INVALID_OPERATION); @@ -664,21 +937,20 @@ bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) {      return true;  } -bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) { -    sp<AMessage> response = new AMessage; - +bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {      if (!isExecuting() || (mFlags & kFlagIsAsync)              || (newRequest && (mFlags & kFlagDequeueOutputPending))) { -        response->setInt32("err", INVALID_OPERATION); +        PostReplyWithError(replyID, INVALID_OPERATION);      } else if (mFlags & kFlagStickyError) { -        response->setInt32("err", getStickyError()); +        PostReplyWithError(replyID, getStickyError());      } else if (mFlags & kFlagOutputBuffersChanged) { -        response->setInt32("err", INFO_OUTPUT_BUFFERS_CHANGED); +        PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);          mFlags &= ~kFlagOutputBuffersChanged;      } else if (mFlags & kFlagOutputFormatChanged) { -        response->setInt32("err", INFO_FORMAT_CHANGED); +        PostReplyWithError(replyID, INFO_FORMAT_CHANGED);          mFlags &= ~kFlagOutputFormatChanged;      } else { +        sp<AMessage> response = new AMessage;          ssize_t index = dequeuePortBuffer(kPortIndexOutput);          if (index < 0) { @@ -713,10 +985,9 @@ bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) {          }          response->setInt32("flags", flags); +        response->postReply(replyID);      } -    response->postReply(replyID); -      return true;  } @@ -874,18 +1145,30 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                          mFlags &= ~kFlagUsesSoftwareRenderer;                      } +                    String8 resourceType;                      if (mComponentName.endsWith(".secure")) {                          mFlags |= kFlagIsSecure; +                        resourceType = String8(kResourceSecureCodec);                      } else {                          mFlags &= ~kFlagIsSecure; +                        resourceType = String8(kResourceNonSecureCodec);                      } +                    const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec; +                    addResource(resourceType, String8(subtype), 1); +                      (new AMessage)->postReply(mReplyID);                      break;                  }                  case CodecBase::kWhatComponentConfigured:                  { +                    if (mState == UNINITIALIZED || mState == INITIALIZED) { +                        // In case a kWhatError message came in and replied with error, +                        // we log a warning and ignore. +                        ALOGW("configure interrupted by error, current state %d", mState); +                        break; +                    }                      CHECK_EQ(mState, CONFIGURING);                      // reset input surface flag @@ -908,7 +1191,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                  {                      // response to initiateCreateInputSurface()                      status_t err = NO_ERROR; -                    sp<AMessage> response = new AMessage(); +                    sp<AMessage> response = new AMessage;                      if (!msg->findInt32("err", &err)) {                          sp<RefBase> obj;                          msg->findObject("input-surface", &obj); @@ -922,10 +1205,24 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      break;                  } +                case CodecBase::kWhatInputSurfaceAccepted: +                { +                    // response to initiateSetInputSurface() +                    status_t err = NO_ERROR; +                    sp<AMessage> response = new AMessage(); +                    if (!msg->findInt32("err", &err)) { +                        mHaveInputSurface = true; +                    } else { +                        response->setInt32("err", err); +                    } +                    response->postReply(mReplyID); +                    break; +                } +                  case CodecBase::kWhatSignaledInputEOS:                  {                      // response to signalEndOfInputStream() -                    sp<AMessage> response = new AMessage(); +                    sp<AMessage> response = new AMessage;                      status_t err;                      if (msg->findInt32("err", &err)) {                          response->setInt32("err", err); @@ -959,6 +1256,17 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      size_t numBuffers = portDesc->countBuffers(); +                    size_t totalSize = 0; +                    for (size_t i = 0; i < numBuffers; ++i) { +                        if (portIndex == kPortIndexInput && mCrypto != NULL) { +                            totalSize += portDesc->bufferAt(i)->capacity(); +                        } +                    } + +                    if (totalSize) { +                        mDealer = new MemoryDealer(totalSize, "MediaCodec"); +                    } +                      for (size_t i = 0; i < numBuffers; ++i) {                          BufferInfo info;                          info.mBufferID = portDesc->bufferIDAt(i); @@ -966,8 +1274,10 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                          info.mData = portDesc->bufferAt(i);                          if (portIndex == kPortIndexInput && mCrypto != NULL) { +                            sp<IMemory> mem = mDealer->allocate(info.mData->capacity());                              info.mEncryptedData = -                                new ABuffer(info.mData->capacity()); +                                new ABuffer(mem->pointer(), info.mData->capacity()); +                            info.mSharedEncryptedBuffer = mem;                          }                          buffers->push_back(info); @@ -978,6 +1288,13 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                              // We're always allocating output buffers after                              // allocating input buffers, so this is a good                              // indication that now all buffers are allocated. +                            if (mIsVideo) { +                                String8 subtype; +                                addResource( +                                        String8(kResourceGraphicMemory), +                                        subtype, +                                        getGraphicBufferSize()); +                            }                              setState(STARTED);                              (new AMessage)->postReply(mReplyID);                          } else { @@ -993,13 +1310,13 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      ALOGV("codec output format changed");                      if (mSoftRenderer == NULL && -                            mNativeWindow != NULL && +                            mSurface != NULL &&                              (mFlags & kFlagUsesSoftwareRenderer)) {                          AString mime;                          CHECK(msg->findString("mime", &mime));                          if (mime.startsWithIgnoreCase("video/")) { -                            mSoftRenderer = new SoftwareRenderer(mNativeWindow); +                            mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);                          }                      } @@ -1031,6 +1348,18 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      break;                  } +                case CodecBase::kWhatOutputFramesRendered: +                { +                    // ignore these in all states except running, and check that we have a +                    // notification set +                    if (mState == STARTED && mOnFrameRenderedNotification != NULL) { +                        sp<AMessage> notify = mOnFrameRenderedNotification->dup(); +                        notify->setMessage("data", msg); +                        notify->post(); +                    } +                    break; +                } +                  case CodecBase::kWhatFillThisBuffer:                  {                      /* size_t index = */updateBuffers(kPortIndexInput, msg); @@ -1068,7 +1397,11 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      if (mFlags & kFlagIsAsync) {                          if (!mHaveInputSurface) { -                            onInputBufferAvailable(); +                            if (mState == FLUSHED) { +                                mHavePendingInputBuffers = true; +                            } else { +                                onInputBufferAvailable(); +                            }                          }                      } else if (mFlags & kFlagDequeueInputPending) {                          CHECK(handleDequeueInputBuffer(mDequeueInputReplyID)); @@ -1157,6 +1490,8 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      }                      mFlags &= ~kFlagIsComponentAllocated; +                    mResourceManagerService->removeResource(getId(mResourceManagerClient)); +                      (new AMessage)->postReply(mReplyID);                      break;                  } @@ -1188,7 +1523,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatInit:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mState != UNINITIALIZED) { @@ -1222,9 +1557,18 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatSetNotification: +        { +            sp<AMessage> notify; +            if (msg->findMessage("on-frame-rendered", ¬ify)) { +                mOnFrameRenderedNotification = notify; +            } +            break; +        } +          case kWhatSetCallback:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mState == UNINITIALIZED @@ -1256,7 +1600,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatConfigure:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mState != INITIALIZED) { @@ -1265,26 +1609,25 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              }              sp<RefBase> obj; -            if (!msg->findObject("native-window", &obj)) { -                obj.clear(); -            } +            CHECK(msg->findObject("surface", &obj));              sp<AMessage> format;              CHECK(msg->findMessage("format", &format)); +            int32_t push; +            if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) { +                mFlags |= kFlagPushBlankBuffersOnShutdown; +            } +              if (obj != NULL) {                  format->setObject("native-window", obj); - -                status_t err = setNativeWindow( -                    static_cast<NativeWindowWrapper *>(obj.get()) -                        ->getSurfaceTextureClient()); - +                status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));                  if (err != OK) {                      PostReplyWithError(replyID, err);                      break;                  }              } else { -                setNativeWindow(NULL); +                handleSetSurface(NULL);              }              mReplyID = replyID; @@ -1311,9 +1654,69 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatSetSurface: +        { +            sp<AReplyToken> replyID; +            CHECK(msg->senderAwaitsResponse(&replyID)); + +            status_t err = OK; +            sp<Surface> surface; + +            switch (mState) { +                case CONFIGURED: +                case STARTED: +                case FLUSHED: +                { +                    sp<RefBase> obj; +                    (void)msg->findObject("surface", &obj); +                    sp<Surface> surface = static_cast<Surface *>(obj.get()); +                    if (mSurface == NULL) { +                        // do not support setting surface if it was not set +                        err = INVALID_OPERATION; +                    } else if (obj == NULL) { +                        // do not support unsetting surface +                        err = BAD_VALUE; +                    } else { +                        err = connectToSurface(surface); +                        if (err == BAD_VALUE) { +                            // assuming reconnecting to same surface +                            // TODO: check if it is the same surface +                            err = OK; +                        } else { +                            if (err == OK) { +                                if (mFlags & kFlagUsesSoftwareRenderer) { +                                    if (mSoftRenderer != NULL +                                            && (mFlags & kFlagPushBlankBuffersOnShutdown)) { +                                        pushBlankBuffersToNativeWindow(mSurface.get()); +                                    } +                                    mSoftRenderer = new SoftwareRenderer(surface); +                                    // TODO: check if this was successful +                                } else { +                                    err = mCodec->setSurface(surface); +                                } +                            } +                            if (err == OK) { +                                (void)disconnectFromSurface(); +                                mSurface = surface; +                            } +                        } +                    } +                    break; +                } + +                default: +                    err = INVALID_OPERATION; +                    break; +            } + +            PostReplyWithError(replyID, err); +            break; +        } +          case kWhatCreateInputSurface: +        case kWhatSetInputSurface:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              // Must be configured, but can't have been started yet. @@ -1323,17 +1726,28 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              }              mReplyID = replyID; -            mCodec->initiateCreateInputSurface(); +            if (msg->what() == kWhatCreateInputSurface) { +                mCodec->initiateCreateInputSurface(); +            } else { +                sp<RefBase> obj; +                CHECK(msg->findObject("input-surface", &obj)); + +                mCodec->initiateSetInputSurface( +                        static_cast<PersistentSurface *>(obj.get())); +            }              break;          } -          case kWhatStart:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mState == FLUSHED) {                  setState(STARTED); +                if (mHavePendingInputBuffers) { +                    onInputBufferAvailable(); +                    mHavePendingInputBuffers = false; +                }                  mCodec->signalResume();                  PostReplyWithError(replyID, OK);                  break; @@ -1355,9 +1769,23 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              State targetState =                  (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED; -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID)); +            // already stopped/released +            if (mState == UNINITIALIZED && mReleasedByResourceManager) { +                sp<AMessage> response = new AMessage; +                response->setInt32("err", OK); +                response->postReply(replyID); +                break; +            } + +            int32_t reclaimed = 0; +            msg->findInt32("reclaimed", &reclaimed); +            if (reclaimed) { +                mReleasedByResourceManager = true; +            } +              if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1                      && mState != INITIALIZED                      && mState != CONFIGURED && !isExecuting()) { @@ -1371,6 +1799,8 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                  // and it should be in this case, no harm to allow a release()                  // if we're already uninitialized.                  sp<AMessage> response = new AMessage; +                // TODO: we shouldn't throw an exception for stop/release. Change this to wait until +                // the previous stop/release completes and then reply with OK.                  status_t err = mState == targetState ? OK : INVALID_OPERATION;                  response->setInt32("err", err);                  if (err == OK && targetState == UNINITIALIZED) { @@ -1398,12 +1828,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {                      msg->what() == kWhatStop /* keepComponentAllocated */);              returnBuffersToCodec(); + +            if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) { +                pushBlankBuffersToNativeWindow(mSurface.get()); +            }              break;          }          case kWhatDequeueInputBuffer:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mFlags & kFlagIsAsync) { @@ -1435,7 +1869,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              if (timeoutUs > 0ll) {                  sp<AMessage> timeoutMsg = -                    new AMessage(kWhatDequeueInputTimedOut, id()); +                    new AMessage(kWhatDequeueInputTimedOut, this);                  timeoutMsg->setInt32(                          "generation", ++mDequeueInputTimeoutGeneration);                  timeoutMsg->post(timeoutUs); @@ -1464,7 +1898,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatQueueInputBuffer:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (!isExecuting()) { @@ -1483,7 +1917,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatDequeueOutputBuffer:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mFlags & kFlagIsAsync) { @@ -1509,7 +1943,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              if (timeoutUs > 0ll) {                  sp<AMessage> timeoutMsg = -                    new AMessage(kWhatDequeueOutputTimedOut, id()); +                    new AMessage(kWhatDequeueOutputTimedOut, this);                  timeoutMsg->setInt32(                          "generation", ++mDequeueOutputTimeoutGeneration);                  timeoutMsg->post(timeoutUs); @@ -1538,7 +1972,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatReleaseOutputBuffer:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (!isExecuting()) { @@ -1557,7 +1991,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatSignalEndOfInputStream:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (!isExecuting()) { @@ -1575,10 +2009,14 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatGetBuffers:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID)); +            // Unfortunately widevine legacy source requires knowing all of the +            // codec input buffers, so we have to provide them even in async mode. +            int32_t widevine = 0; +            msg->findInt32("widevine", &widevine); -            if (!isExecuting() || (mFlags & kFlagIsAsync)) { +            if (!isExecuting() || ((mFlags & kFlagIsAsync) && !widevine)) {                  PostReplyWithError(replyID, INVALID_OPERATION);                  break;              } else if (mFlags & kFlagStickyError) { @@ -1609,7 +2047,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatFlush:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (!isExecuting()) { @@ -1635,7 +2073,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {              sp<AMessage> format =                  (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat); -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if ((mState != CONFIGURED && mState != STARTING && @@ -1672,7 +2110,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatGetName:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              if (mComponentName.empty()) { @@ -1688,7 +2126,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {          case kWhatSetParameters:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              sp<AMessage> params; @@ -1742,7 +2180,7 @@ status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {      AString errorDetailMsg; -    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id()); +    sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);      msg->setSize("index", bufferIndex);      msg->setSize("offset", 0);      msg->setSize("size", csd->size()); @@ -1759,7 +2197,7 @@ void MediaCodec::setState(State newState) {          mSoftRenderer = NULL;          mCrypto.clear(); -        setNativeWindow(NULL); +        handleSetSurface(NULL);          mInputFormat.clear();          mOutputFormat.clear(); @@ -1943,7 +2381,8 @@ status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {                  key,                  iv,                  mode, -                info->mEncryptedData->base() + offset, +                info->mSharedEncryptedBuffer, +                offset,                  subSamples,                  numSubSamples,                  info->mData->base(), @@ -1969,6 +2408,23 @@ status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {      return OK;  } +//static +size_t MediaCodec::CreateFramesRenderedMessage( +        std::list<FrameRenderTracker::Info> done, sp<AMessage> &msg) { +    size_t index = 0; + +    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); +            it != done.cend(); ++it) { +        if (it->getRenderTimeNs() < 0) { +            continue; // dropped frame from tracking +        } +        msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs()); +        msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs()); +        ++index; +    } +    return index; +} +  status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {      size_t index;      CHECK(msg->findSize("index", &index)); @@ -2001,26 +2457,31 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {      if (render && info->mData != NULL && info->mData->size() != 0) {          info->mNotify->setInt32("render", true); -        int64_t timestampNs = 0; -        if (msg->findInt64("timestampNs", ×tampNs)) { -            info->mNotify->setInt64("timestampNs", timestampNs); -        } else { -            // TODO: it seems like we should use the timestamp -            // in the (media)buffer as it potentially came from -            // an input surface, but we did not propagate it prior to -            // API 20.  Perhaps check for target SDK version. -#if 0 -            if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { -                ALOGV("using buffer PTS of %" PRId64, timestampNs); -                timestampNs *= 1000; -            } -#endif +        int64_t mediaTimeUs = -1; +        info->mData->meta()->findInt64("timeUs", &mediaTimeUs); + +        int64_t renderTimeNs = 0; +        if (!msg->findInt64("timestampNs", &renderTimeNs)) { +            // use media timestamp if client did not request a specific render timestamp +            ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs); +            renderTimeNs = mediaTimeUs * 1000;          } +        info->mNotify->setInt64("timestampNs", renderTimeNs);          if (mSoftRenderer != NULL) { -            mSoftRenderer->render( +            std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(                      info->mData->data(), info->mData->size(), -                    timestampNs, NULL, info->mFormat); +                    mediaTimeUs, renderTimeNs, NULL, info->mFormat); + +            // if we are running, notify rendered frames +            if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) { +                sp<AMessage> notify = mOnFrameRenderedNotification->dup(); +                sp<AMessage> data = new AMessage; +                if (CreateFramesRenderedMessage(doneFrames, data)) { +                    notify->setMessage("data", data); +                    notify->post(); +                } +            }          }      } @@ -2064,37 +2525,65 @@ ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {      return index;  } -status_t MediaCodec::setNativeWindow( -        const sp<Surface> &surfaceTextureClient) { -    status_t err; - -    if (mNativeWindow != NULL) { -        err = native_window_api_disconnect( -                mNativeWindow.get(), NATIVE_WINDOW_API_MEDIA); +status_t MediaCodec::connectToSurface(const sp<Surface> &surface) { +    status_t err = OK; +    if (surface != NULL) { +        err = native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA); +        if (err == BAD_VALUE) { +            ALOGI("native window already connected. Assuming no change of surface"); +            return err; +        } else if (err == OK) { +            // Require a fresh set of buffers after each connect by using a unique generation +            // number. Rely on the fact that max supported process id by Linux is 2^22. +            // PID is never 0 so we don't have to worry that we use the default generation of 0. +            // TODO: come up with a unique scheme if other producers also set the generation number. +            static uint32_t mSurfaceGeneration = 0; +            uint32_t generation = (getpid() << 10) | (++mSurfaceGeneration & ((1 << 10) - 1)); +            surface->setGenerationNumber(generation); +            ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), generation); + +            // HACK: clear any free buffers. Remove when connect will automatically do this. +            // This is needed as the consumer may be holding onto stale frames that it can reattach +            // to this surface after disconnect/connect, and those free frames would inherit the new +            // generation number. Disconnecting after setting a unique generation prevents this. +            native_window_api_disconnect(surface.get(), NATIVE_WINDOW_API_MEDIA); +            err = native_window_api_connect(surface.get(), NATIVE_WINDOW_API_MEDIA); +        }          if (err != OK) { -            ALOGW("native_window_api_disconnect returned an error: %s (%d)", -                    strerror(-err), err); +            ALOGE("native_window_api_connect returned an error: %s (%d)", strerror(-err), err);          } - -        mNativeWindow.clear();      } +    return err; +} -    if (surfaceTextureClient != NULL) { -        err = native_window_api_connect( -                surfaceTextureClient.get(), NATIVE_WINDOW_API_MEDIA); - +status_t MediaCodec::disconnectFromSurface() { +    status_t err = OK; +    if (mSurface != NULL) { +        // Resetting generation is not technically needed, but there is no need to keep it either +        mSurface->setGenerationNumber(0); +        err = native_window_api_disconnect(mSurface.get(), NATIVE_WINDOW_API_MEDIA);          if (err != OK) { -            ALOGE("native_window_api_connect returned an error: %s (%d)", -                    strerror(-err), err); - -            return err; +            ALOGW("native_window_api_disconnect returned an error: %s (%d)", strerror(-err), err);          } - -        mNativeWindow = surfaceTextureClient; +        // assume disconnected even on error +        mSurface.clear();      } +    return err; +} -    return OK; +status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) { +    status_t err = OK; +    if (mSurface != NULL) { +        (void)disconnectFromSurface(); +    } +    if (surface != NULL) { +        err = connectToSurface(surface); +        if (err == OK) { +            mSurface = surface; +        } +    } +    return err;  }  void MediaCodec::onInputBufferAvailable() { @@ -2197,7 +2686,7 @@ void MediaCodec::postActivityNotificationIfPossible() {  }  status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) { -    sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); +    sp<AMessage> msg = new AMessage(kWhatSetParameters, this);      msg->setMessage("params", params);      sp<AMessage> response; @@ -2253,12 +2742,6 @@ status_t MediaCodec::amendOutputFormatWithCodecSpecificData(  void MediaCodec::updateBatteryStat() {      if (mState == CONFIGURED && !mBatteryStatNotified) { -        AString mime; -        CHECK(mOutputFormat != NULL && -                mOutputFormat->findString("mime", &mime)); - -        mIsVideo = mime.startsWithIgnoreCase("video/"); -          BatteryNotifier& notifier(BatteryNotifier::getInstance());          if (mIsVideo) { diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp index cf6e937..5edc04c 100644 --- a/media/libstagefright/MediaCodecList.cpp +++ b/media/libstagefright/MediaCodecList.cpp @@ -18,11 +18,15 @@  #define LOG_TAG "MediaCodecList"  #include <utils/Log.h> +#include "MediaCodecListOverrides.h" +  #include <binder/IServiceManager.h>  #include <media/IMediaCodecList.h>  #include <media/IMediaPlayerService.h> +#include <media/IResourceManagerService.h>  #include <media/MediaCodecInfo.h> +#include <media/MediaResourcePolicy.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AMessage.h> @@ -31,27 +35,100 @@  #include <media/stagefright/OMXClient.h>  #include <media/stagefright/OMXCodec.h> +#include <sys/stat.h>  #include <utils/threads.h> +#include <cutils/properties.h>  #include <libexpat/expat.h>  namespace android { +const char *kMaxEncoderInputBuffers = "max-video-encoder-input-buffers"; +  static Mutex sInitMutex; -static MediaCodecList *gCodecList = NULL; +static bool parseBoolean(const char *s) { +    if (!strcasecmp(s, "true") || !strcasecmp(s, "yes") || !strcasecmp(s, "y")) { +        return true; +    } +    char *end; +    unsigned long res = strtoul(s, &end, 10); +    return *s != '\0' && *end == '\0' && res > 0; +} + +static bool isProfilingNeeded() { +    int8_t value = property_get_bool("debug.stagefright.profilecodec", 0); +    if (value == 0) { +        return false; +    } + +    bool profilingNeeded = true; +    FILE *resultsFile = fopen(kProfilingResults, "r"); +    if (resultsFile) { +        AString currentVersion = getProfilingVersionString(); +        size_t currentVersionSize = currentVersion.size(); +        char *versionString = new char[currentVersionSize + 1]; +        fgets(versionString, currentVersionSize + 1, resultsFile); +        if (strcmp(versionString, currentVersion.c_str()) == 0) { +            // profiling result up to date +            profilingNeeded = false; +        } +        fclose(resultsFile); +        delete[] versionString; +    } +    return profilingNeeded; +}  // static  sp<IMediaCodecList> MediaCodecList::sCodecList;  // static +void *MediaCodecList::profilerThreadWrapper(void * /*arg*/) { +    ALOGV("Enter profilerThreadWrapper."); +    remove(kProfilingResults);  // remove previous result so that it won't be loaded to +                                // the new MediaCodecList +    MediaCodecList *codecList = new MediaCodecList(); +    if (codecList->initCheck() != OK) { +        ALOGW("Failed to create a new MediaCodecList, skipping codec profiling."); +        delete codecList; +        return NULL; +    } + +    Vector<sp<MediaCodecInfo>> infos; +    for (size_t i = 0; i < codecList->countCodecs(); ++i) { +        infos.push_back(codecList->getCodecInfo(i)); +    } +    ALOGV("Codec profiling started."); +    profileCodecs(infos); +    ALOGV("Codec profiling completed."); +    codecList->parseTopLevelXMLFile(kProfilingResults, true /* ignore_errors */); + +    { +        Mutex::Autolock autoLock(sInitMutex); +        sCodecList = codecList; +    } +    return NULL; +} + +// static  sp<IMediaCodecList> MediaCodecList::getLocalInstance() {      Mutex::Autolock autoLock(sInitMutex); -    if (gCodecList == NULL) { -        gCodecList = new MediaCodecList; -        if (gCodecList->initCheck() == OK) { -            sCodecList = gCodecList; +    if (sCodecList == NULL) { +        MediaCodecList *codecList = new MediaCodecList; +        if (codecList->initCheck() == OK) { +            sCodecList = codecList; + +            if (isProfilingNeeded()) { +                ALOGV("Codec profiling needed, will be run in separated thread."); +                pthread_t profiler; +                if (pthread_create(&profiler, NULL, profilerThreadWrapper, NULL) != 0) { +                    ALOGW("Failed to create thread for codec profiling."); +                } +            } +        } else { +            // failure to initialize may be temporary. retry on next call. +            delete codecList;          }      } @@ -94,11 +171,15 @@ sp<IMediaCodecList> MediaCodecList::getInstance() {  }  MediaCodecList::MediaCodecList() -    : mInitCheck(NO_INIT) { +    : mInitCheck(NO_INIT), +      mUpdate(false), +      mGlobalSettings(new AMessage()) {      parseTopLevelXMLFile("/etc/media_codecs.xml"); +    parseTopLevelXMLFile("/etc/media_codecs_performance.xml", true/* ignore_errors */); +    parseTopLevelXMLFile(kProfilingResults, true/* ignore_errors */);  } -void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) { +void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml, bool ignore_errors) {      // get href_base      char *href_base_end = strrchr(codecs_xml, '/');      if (href_base_end != NULL) { @@ -112,20 +193,49 @@ void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {      OMXClient client;      mInitCheck = client.connect();      if (mInitCheck != OK) { -        return; +        return;  // this may fail if IMediaPlayerService is not available.      }      mOMX = client.interface();      parseXMLFile(codecs_xml);      mOMX.clear();      if (mInitCheck != OK) { +        if (ignore_errors) { +            mInitCheck = OK; +            return; +        }          mCodecInfos.clear();          return;      } -    for (size_t i = mCodecInfos.size(); i-- > 0;) { -        const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get(); +    Vector<MediaResourcePolicy> policies; +    AString value; +    if (mGlobalSettings->findString(kPolicySupportsMultipleSecureCodecs, &value)) { +        policies.push_back( +                MediaResourcePolicy( +                        String8(kPolicySupportsMultipleSecureCodecs), +                        String8(value.c_str()))); +    } +    if (mGlobalSettings->findString(kPolicySupportsSecureWithNonSecureCodec, &value)) { +        policies.push_back( +                MediaResourcePolicy( +                        String8(kPolicySupportsSecureWithNonSecureCodec), +                        String8(value.c_str()))); +    } +    if (policies.size() > 0) { +        sp<IServiceManager> sm = defaultServiceManager(); +        sp<IBinder> binder = sm->getService(String16("media.resource_manager")); +        sp<IResourceManagerService> service = interface_cast<IResourceManagerService>(binder); +        if (service == NULL) { +            ALOGE("MediaCodecList: failed to get ResourceManagerService"); +        } else { +            service->config(policies); +        } +    } +    for (size_t i = mCodecInfos.size(); i > 0;) { +        i--; +        const MediaCodecInfo &info = *mCodecInfos.itemAt(i).get();          if (info.mCaps.size() == 0) {              // No types supported by this component???              ALOGW("Component %s does not support any type of media?", @@ -169,6 +279,16 @@ void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {                      }                      ALOGV("    levels=[%s]", nice.c_str());                  } +                { +                    AString quirks; +                    for (size_t ix = 0; ix < info.mQuirks.size(); ix++) { +                        if (ix > 0) { +                            quirks.append(", "); +                        } +                        quirks.append(info.mQuirks[ix]); +                    } +                    ALOGV("    quirks=[%s]", quirks.c_str()); +                }              }  #endif          } @@ -328,6 +448,16 @@ void MediaCodecList::startElementHandler(                  mCurrentSection = SECTION_DECODERS;              } else if (!strcmp(name, "Encoders")) {                  mCurrentSection = SECTION_ENCODERS; +            } else if (!strcmp(name, "Settings")) { +                mCurrentSection = SECTION_SETTINGS; +            } +            break; +        } + +        case SECTION_SETTINGS: +        { +            if (!strcmp(name, "Setting")) { +                mInitCheck = addSettingFromAttributes(attrs);              }              break;          } @@ -397,6 +527,14 @@ void MediaCodecList::endElementHandler(const char *name) {      }      switch (mCurrentSection) { +        case SECTION_SETTINGS: +        { +            if (!strcmp(name, "Settings")) { +                mCurrentSection = SECTION_TOPLEVEL; +            } +            break; +        } +          case SECTION_DECODERS:          {              if (!strcmp(name, "Decoders")) { @@ -462,10 +600,10 @@ void MediaCodecList::endElementHandler(const char *name) {      --mDepth;  } -status_t MediaCodecList::addMediaCodecFromAttributes( -        bool encoder, const char **attrs) { +status_t MediaCodecList::addSettingFromAttributes(const char **attrs) {      const char *name = NULL; -    const char *type = NULL; +    const char *value = NULL; +    const char *update = NULL;      size_t i = 0;      while (attrs[i] != NULL) { @@ -475,11 +613,17 @@ status_t MediaCodecList::addMediaCodecFromAttributes(              }              name = attrs[i + 1];              ++i; -        } else if (!strcmp(attrs[i], "type")) { +        } else if (!strcmp(attrs[i], "value")) {              if (attrs[i + 1] == NULL) {                  return -EINVAL;              } -            type = attrs[i + 1]; +            value = attrs[i + 1]; +            ++i; +        } else if (!strcmp(attrs[i], "update")) { +            if (attrs[i + 1] == NULL) { +                return -EINVAL; +            } +            update = attrs[i + 1];              ++i;          } else {              return -EINVAL; @@ -488,10 +632,34 @@ status_t MediaCodecList::addMediaCodecFromAttributes(          ++i;      } -    if (name == NULL) { +    if (name == NULL || value == NULL) {          return -EINVAL;      } +    mUpdate = (update != NULL) && parseBoolean(update); +    if (mUpdate != mGlobalSettings->contains(name)) { +        return -EINVAL; +    } + +    mGlobalSettings->setString(name, value); +    return OK; +} + +void MediaCodecList::setCurrentCodecInfo(bool encoder, const char *name, const char *type) { +    for (size_t i = 0; i < mCodecInfos.size(); ++i) { +        if (AString(name) == mCodecInfos[i]->getCodecName()) { +            if (mCodecInfos[i]->getCapabilitiesFor(type) == NULL) { +                ALOGW("Overrides with an unexpected mime %s", type); +                // Create a new MediaCodecInfo (but don't add it to mCodecInfos) to hold the +                // overrides we don't want. +                mCurrentInfo = new MediaCodecInfo(name, encoder, type); +            } else { +                mCurrentInfo = mCodecInfos.editItemAt(i); +                mCurrentInfo->updateMime(type);  // to set the current cap +            } +            return; +        } +    }      mCurrentInfo = new MediaCodecInfo(name, encoder, type);      // The next step involves trying to load the codec, which may      // fail.  Only list the codec if this succeeds. @@ -500,6 +668,78 @@ status_t MediaCodecList::addMediaCodecFromAttributes(      if (initializeCapabilities(type) == OK) {          mCodecInfos.push_back(mCurrentInfo);      } +} + +status_t MediaCodecList::addMediaCodecFromAttributes( +        bool encoder, const char **attrs) { +    const char *name = NULL; +    const char *type = NULL; +    const char *update = NULL; + +    size_t i = 0; +    while (attrs[i] != NULL) { +        if (!strcmp(attrs[i], "name")) { +            if (attrs[i + 1] == NULL) { +                return -EINVAL; +            } +            name = attrs[i + 1]; +            ++i; +        } else if (!strcmp(attrs[i], "type")) { +            if (attrs[i + 1] == NULL) { +                return -EINVAL; +            } +            type = attrs[i + 1]; +            ++i; +        } else if (!strcmp(attrs[i], "update")) { +            if (attrs[i + 1] == NULL) { +                return -EINVAL; +            } +            update = attrs[i + 1]; +            ++i; +        } else { +            return -EINVAL; +        } + +        ++i; +    } + +    if (name == NULL) { +        return -EINVAL; +    } + +    mUpdate = (update != NULL) && parseBoolean(update); +    ssize_t index = -1; +    for (size_t i = 0; i < mCodecInfos.size(); ++i) { +        if (AString(name) == mCodecInfos[i]->getCodecName()) { +            index = i; +        } +    } +    if (mUpdate != (index >= 0)) { +        return -EINVAL; +    } + +    if (index >= 0) { +        // existing codec +        mCurrentInfo = mCodecInfos.editItemAt(index); +        if (type != NULL) { +            // existing type +            if (mCodecInfos[index]->getCapabilitiesFor(type) == NULL) { +                return -EINVAL; +            } +            mCurrentInfo->updateMime(type); +        } +    } else { +        // new codec +        mCurrentInfo = new MediaCodecInfo(name, encoder, type); +        // The next step involves trying to load the codec, which may +        // fail.  Only list the codec if this succeeds. +        // However, keep mCurrentInfo object around until parsing +        // of full codec info is completed. +        if (initializeCapabilities(type) == OK) { +            mCodecInfos.push_back(mCurrentInfo); +        } +    } +      return OK;  } @@ -553,6 +793,7 @@ status_t MediaCodecList::addQuirk(const char **attrs) {  status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {      const char *name = NULL; +    const char *update = NULL;      size_t i = 0;      while (attrs[i] != NULL) { @@ -562,6 +803,12 @@ status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {              }              name = attrs[i + 1];              ++i; +        } else if (!strcmp(attrs[i], "update")) { +            if (attrs[i + 1] == NULL) { +                return -EINVAL; +            } +            update = attrs[i + 1]; +            ++i;          } else {              return -EINVAL;          } @@ -573,14 +820,25 @@ status_t MediaCodecList::addTypeFromAttributes(const char **attrs) {          return -EINVAL;      } -    status_t ret = mCurrentInfo->addMime(name); +    bool isExistingType = (mCurrentInfo->getCapabilitiesFor(name) != NULL); +    if (mUpdate != isExistingType) { +        return -EINVAL; +    } + +    status_t ret; +    if (mUpdate) { +        ret = mCurrentInfo->updateMime(name); +    } else { +        ret = mCurrentInfo->addMime(name); +    } +      if (ret != OK) {          return ret;      }      // The next step involves trying to load the codec, which may      // fail.  Handle this gracefully (by not reporting such mime). -    if (initializeCapabilities(name) != OK) { +    if (!mUpdate && initializeCapabilities(name) != OK) {          mCurrentInfo->removeMime(name);      }      return OK; @@ -675,14 +933,16 @@ status_t MediaCodecList::addLimit(const char **attrs) {          return -EINVAL;      } -    // size, blocks, bitrate, frame-rate, blocks-per-second, aspect-ratio: range +    // size, blocks, bitrate, frame-rate, blocks-per-second, aspect-ratio, +    // measured-frame-rate, measured-blocks-per-second: range      // quality: range + default + [scale]      // complexity: range + default      bool found;      if (name == "aspect-ratio" || name == "bitrate" || name == "block-count"              || name == "blocks-per-second" || name == "complexity" -            || name == "frame-rate" || name == "quality" || name == "size") { +            || name == "frame-rate" || name == "quality" || name == "size" +            || name == "measured-blocks-per-second" || name.startsWith("measured-frame-rate-")) {          AString min, max;          if (msg->findString("min", &min) && msg->findString("max", &max)) {              min.append("-"); @@ -746,7 +1006,7 @@ status_t MediaCodecList::addLimit(const char **attrs) {              return limitFoundMissingAttr(name, "default");          } else if (msg->contains("in")) {              return limitFoundMissingAttr(name, "in"); -        } else if ((name == "channel-count") ^ +        } else if ((name == "channel-count" || name == "concurrent-instances") ^                  (found = msg->findString("max", &max))) {              return limitFoundMissingAttr(name, "max", found);          } else if (msg->contains("min")) { @@ -780,15 +1040,6 @@ status_t MediaCodecList::addLimit(const char **attrs) {      return OK;  } -static bool parseBoolean(const char *s) { -    if (!strcasecmp(s, "true") || !strcasecmp(s, "yes") || !strcasecmp(s, "y")) { -        return true; -    } -    char *end; -    unsigned long res = strtoul(s, &end, 10); -    return *s != '\0' && *end == '\0' && res > 0; -} -  status_t MediaCodecList::addFeature(const char **attrs) {      size_t i = 0;      const char *name = NULL; @@ -860,4 +1111,8 @@ size_t MediaCodecList::countCodecs() const {      return mCodecInfos.size();  } +const sp<AMessage> MediaCodecList::getGlobalSettings() const { +    return mGlobalSettings; +} +  }  // namespace android diff --git a/media/libstagefright/MediaCodecListOverrides.cpp b/media/libstagefright/MediaCodecListOverrides.cpp new file mode 100644 index 0000000..4ec36b5 --- /dev/null +++ b/media/libstagefright/MediaCodecListOverrides.cpp @@ -0,0 +1,421 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaCodecListOverrides" +#include <utils/Log.h> + +#include "MediaCodecListOverrides.h" + +#include <cutils/properties.h> +#include <gui/Surface.h> +#include <media/ICrypto.h> +#include <media/IMediaCodecList.h> +#include <media/MediaCodecInfo.h> +#include <media/MediaResourcePolicy.h> +#include <media/openmax/OMX_IVCommon.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaCodec.h> +#include <media/stagefright/MediaCodecList.h> + +namespace android { + +const char *kProfilingResults = "/data/misc/media/media_codecs_profiling_results.xml"; + +AString getProfilingVersionString() { +    char val[PROPERTY_VALUE_MAX]; +    if (property_get("ro.build.display.id", val, NULL) && (strlen(val) > 0)) { +        return AStringPrintf("<!-- Profiled-with: %s -->", val); +    } + +    return "<!-- Profiled-with: UNKNOWN_BUILD_ID -->"; +} + +// a limit to avoid allocating unreasonable number of codec instances in the measurement. +// this should be in sync with the MAX_SUPPORTED_INSTANCES defined in MediaCodecInfo.java. +static const int kMaxInstances = 32; + +// TODO: move MediaCodecInfo to C++. Until then, some temp methods to parse out info. +static bool getMeasureSize(sp<MediaCodecInfo::Capabilities> caps, int32_t *width, int32_t *height) { +    AString sizeRange; +    if (!caps->getDetails()->findString("size-range", &sizeRange)) { +        return false; +    } +    AString minSize; +    AString maxSize; +    if (!splitString(sizeRange, "-", &minSize, &maxSize)) { +        return false; +    } +    AString sWidth; +    AString sHeight; +    if (!splitString(minSize, "x", &sWidth, &sHeight)) { +        if (!splitString(minSize, "*", &sWidth, &sHeight)) { +            return false; +        } +    } + +    *width = strtol(sWidth.c_str(), NULL, 10); +    *height = strtol(sHeight.c_str(), NULL, 10); +    return (*width > 0) && (*height > 0); +} + +static void getMeasureBitrate(sp<MediaCodecInfo::Capabilities> caps, int32_t *bitrate) { +    // Until have native MediaCodecInfo, we cannot get bitrates based on profile/levels. +    // We use 200000 as default value for our measurement. +    *bitrate = 200000; +    AString bitrateRange; +    if (!caps->getDetails()->findString("bitrate-range", &bitrateRange)) { +        return; +    } +    AString minBitrate; +    AString maxBitrate; +    if (!splitString(bitrateRange, "-", &minBitrate, &maxBitrate)) { +        return; +    } + +    *bitrate = strtol(minBitrate.c_str(), NULL, 10); +} + +static sp<AMessage> getMeasureFormat( +        bool isEncoder, AString mime, sp<MediaCodecInfo::Capabilities> caps) { +    sp<AMessage> format = new AMessage(); +    format->setString("mime", mime); + +    if (isEncoder) { +        int32_t bitrate = 0; +        getMeasureBitrate(caps, &bitrate); +        format->setInt32("bitrate", bitrate); +        format->setInt32("encoder", 1); +    } + +    if (mime.startsWith("video/")) { +        int32_t width = 0; +        int32_t height = 0; +        if (!getMeasureSize(caps, &width, &height)) { +            return NULL; +        } +        format->setInt32("width", width); +        format->setInt32("height", height); + +        Vector<uint32_t> colorFormats; +        caps->getSupportedColorFormats(&colorFormats); +        if (colorFormats.size() == 0) { +            return NULL; +        } +        format->setInt32("color-format", colorFormats[0]); + +        format->setFloat("frame-rate", 10.0); +        format->setInt32("i-frame-interval", 10); +    } else { +        // TODO: profile hw audio +        return NULL; +    } + +    return format; +} + +static size_t doProfileEncoderInputBuffers( +        AString name, AString mime, sp<MediaCodecInfo::Capabilities> caps) { +    ALOGV("doProfileEncoderInputBuffers: name %s, mime %s", name.c_str(), mime.c_str()); + +    sp<AMessage> format = getMeasureFormat(true /* isEncoder */, mime, caps); +    if (format == NULL) { +        return 0; +    } + +    format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque); +    ALOGV("doProfileEncoderInputBuffers: format %s", format->debugString().c_str()); + +    status_t err = OK; +    sp<ALooper> looper = new ALooper; +    looper->setName("MediaCodec_looper"); +    looper->start( +            false /* runOnCallingThread */, false /* canCallJava */, ANDROID_PRIORITY_AUDIO); + +    sp<MediaCodec> codec = MediaCodec::CreateByComponentName(looper, name.c_str(), &err); +    if (err != OK) { +        ALOGE("Failed to create codec: %s", name.c_str()); +        return 0; +    } + +    err = codec->configure(format, NULL, NULL, MediaCodec::CONFIGURE_FLAG_ENCODE); +    if (err != OK) { +        ALOGE("Failed to configure codec: %s with mime: %s", name.c_str(), mime.c_str()); +        codec->release(); +        return 0; +    } + +    sp<IGraphicBufferProducer> bufferProducer; +    err = codec->createInputSurface(&bufferProducer); +    if (err != OK) { +        ALOGE("Failed to create surface: %s with mime: %s", name.c_str(), mime.c_str()); +        codec->release(); +        return 0; +    } + +    int minUndequeued = 0; +    err = bufferProducer->query( +            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeued); +    if (err != OK) { +        ALOGE("Failed to query NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS"); +        minUndequeued = 0; +    } + +    err = codec->release(); +    if (err != OK) { +        ALOGW("Failed to release codec: %s with mime: %s", name.c_str(), mime.c_str()); +    } + +    return minUndequeued; +} + +static size_t doProfileCodecs( +        bool isEncoder, AString name, AString mime, sp<MediaCodecInfo::Capabilities> caps) { +    sp<AMessage> format = getMeasureFormat(isEncoder, mime, caps); +    if (format == NULL) { +        return 0; +    } +    ALOGV("doProfileCodecs %s %s %s %s", +            name.c_str(), mime.c_str(), isEncoder ? "encoder" : "decoder", +            format->debugString().c_str()); + +    status_t err = OK; +    Vector<sp<MediaCodec>> codecs; +    while (err == OK && codecs.size() < kMaxInstances) { +        sp<ALooper> looper = new ALooper; +        looper->setName("MediaCodec_looper"); +        ALOGV("doProfileCodecs for codec #%zu", codecs.size()); +        ALOGV("doProfileCodecs start looper"); +        looper->start( +                false /* runOnCallingThread */, false /* canCallJava */, ANDROID_PRIORITY_AUDIO); +        ALOGV("doProfileCodecs CreateByComponentName"); +        sp<MediaCodec> codec = MediaCodec::CreateByComponentName(looper, name.c_str(), &err); +        if (err != OK) { +            ALOGV("Failed to create codec: %s", name.c_str()); +            break; +        } +        const sp<Surface> nativeWindow; +        const sp<ICrypto> crypto; +        uint32_t flags = isEncoder ? MediaCodec::CONFIGURE_FLAG_ENCODE : 0; +        ALOGV("doProfileCodecs configure"); +        err = codec->configure(format, nativeWindow, crypto, flags); +        if (err != OK) { +            ALOGV("Failed to configure codec: %s with mime: %s", name.c_str(), mime.c_str()); +            codec->release(); +            break; +        } +        ALOGV("doProfileCodecs start"); +        err = codec->start(); +        if (err != OK) { +            ALOGV("Failed to start codec: %s with mime: %s", name.c_str(), mime.c_str()); +            codec->release(); +            break; +        } +        codecs.push_back(codec); +    } + +    for (size_t i = 0; i < codecs.size(); ++i) { +        ALOGV("doProfileCodecs release %s", name.c_str()); +        err = codecs[i]->release(); +        if (err != OK) { +            ALOGE("Failed to release codec: %s with mime: %s", name.c_str(), mime.c_str()); +        } +    } + +    return codecs.size(); +} + +bool splitString(const AString &s, const AString &delimiter, AString *s1, AString *s2) { +    ssize_t pos = s.find(delimiter.c_str()); +    if (pos < 0) { +        return false; +    } +    *s1 = AString(s, 0, pos); +    *s2 = AString(s, pos + 1, s.size() - pos - 1); +    return true; +} + +bool splitString( +        const AString &s, const AString &delimiter, AString *s1, AString *s2, AString *s3) { +    AString temp; +    if (!splitString(s, delimiter, s1, &temp)) { +        return false; +    } +    if (!splitString(temp, delimiter, s2, s3)) { +        return false; +    } +    return true; +} + +void profileCodecs(const Vector<sp<MediaCodecInfo>> &infos) { +    CodecSettings global_results; +    KeyedVector<AString, CodecSettings> encoder_results; +    KeyedVector<AString, CodecSettings> decoder_results; +    profileCodecs(infos, &global_results, &encoder_results, &decoder_results); +    exportResultsToXML(kProfilingResults, global_results, encoder_results, decoder_results); +} + +void profileCodecs( +        const Vector<sp<MediaCodecInfo>> &infos, +        CodecSettings *global_results, +        KeyedVector<AString, CodecSettings> *encoder_results, +        KeyedVector<AString, CodecSettings> *decoder_results, +        bool forceToMeasure) { +    KeyedVector<AString, sp<MediaCodecInfo::Capabilities>> codecsNeedMeasure; +    AString supportMultipleSecureCodecs = "true"; +    size_t maxEncoderInputBuffers = 0; +    for (size_t i = 0; i < infos.size(); ++i) { +        const sp<MediaCodecInfo> info = infos[i]; +        AString name = info->getCodecName(); +        if (name.startsWith("OMX.google.") || +                // TODO: reenable below codecs once fixed +                name == "OMX.Intel.VideoDecoder.VP9.hybrid") { +            continue; +        } + +        Vector<AString> mimes; +        info->getSupportedMimes(&mimes); +        for (size_t i = 0; i < mimes.size(); ++i) { +            const sp<MediaCodecInfo::Capabilities> &caps = +                    info->getCapabilitiesFor(mimes[i].c_str()); +            if (!forceToMeasure && +                (caps->getDetails()->contains("max-supported-instances") || +                 caps->getDetails()->contains("max-concurrent-instances"))) { +                continue; +            } + +            size_t max = doProfileCodecs(info->isEncoder(), name, mimes[i], caps); +            if (max > 0) { +                CodecSettings settings; +                char maxStr[32]; +                sprintf(maxStr, "%zu", max); +                settings.add("max-supported-instances", maxStr); + +                AString key = name; +                key.append(" "); +                key.append(mimes[i]); + +                if (info->isEncoder()) { +                    encoder_results->add(key, settings); +                } else { +                    decoder_results->add(key, settings); +                } + +                if (name.endsWith(".secure")) { +                    if (max <= 1) { +                        supportMultipleSecureCodecs = "false"; +                    } +                } +                if (info->isEncoder() && mimes[i].startsWith("video/")) { +                    size_t encoderInputBuffers = +                        doProfileEncoderInputBuffers(name, mimes[i], caps); +                    if (encoderInputBuffers > maxEncoderInputBuffers) { +                        maxEncoderInputBuffers = encoderInputBuffers; +                    } +                } +            } +        } +    } +    if (maxEncoderInputBuffers > 0) { +        char tmp[32]; +        sprintf(tmp, "%zu", maxEncoderInputBuffers); +        global_results->add(kMaxEncoderInputBuffers, tmp); +    } +    global_results->add(kPolicySupportsMultipleSecureCodecs, supportMultipleSecureCodecs); +} + +static AString globalResultsToXml(const CodecSettings& results) { +    AString ret; +    for (size_t i = 0; i < results.size(); ++i) { +        AString setting = AStringPrintf( +                "        <Setting name=\"%s\" value=\"%s\" />\n", +                results.keyAt(i).c_str(), +                results.valueAt(i).c_str()); +        ret.append(setting); +    } +    return ret; +} + +static AString codecResultsToXml(const KeyedVector<AString, CodecSettings>& results) { +    AString ret; +    for (size_t i = 0; i < results.size(); ++i) { +        AString name; +        AString mime; +        if (!splitString(results.keyAt(i), " ", &name, &mime)) { +            continue; +        } +        AString codec = +                AStringPrintf("        <MediaCodec name=\"%s\" type=\"%s\" update=\"true\" >\n", +                              name.c_str(), +                              mime.c_str()); +        ret.append(codec); +        CodecSettings settings = results.valueAt(i); +        for (size_t i = 0; i < settings.size(); ++i) { +            // WARNING: we assume all the settings are "Limit". Currently we have only one type +            // of setting in this case, which is "max-supported-instances". +            AString setting = AStringPrintf( +                    "            <Limit name=\"%s\" value=\"%s\" />\n", +                    settings.keyAt(i).c_str(), +                    settings.valueAt(i).c_str()); +            ret.append(setting); +        } +        ret.append("        </MediaCodec>\n"); +    } +    return ret; +} + +void exportResultsToXML( +        const char *fileName, +        const CodecSettings& global_results, +        const KeyedVector<AString, CodecSettings>& encoder_results, +        const KeyedVector<AString, CodecSettings>& decoder_results) { +    if (global_results.size() == 0 && encoder_results.size() == 0 && decoder_results.size() == 0) { +        return; +    } + +    AString overrides; +    overrides.append(getProfilingVersionString()); +    overrides.append("\n"); +    overrides.append("<MediaCodecs>\n"); +    if (global_results.size() > 0) { +        overrides.append("    <Settings>\n"); +        overrides.append(globalResultsToXml(global_results)); +        overrides.append("    </Settings>\n"); +    } +    if (encoder_results.size() > 0) { +        overrides.append("    <Encoders>\n"); +        overrides.append(codecResultsToXml(encoder_results)); +        overrides.append("    </Encoders>\n"); +    } +    if (decoder_results.size() > 0) { +        overrides.append("    <Decoders>\n"); +        overrides.append(codecResultsToXml(decoder_results)); +        overrides.append("    </Decoders>\n"); +    } +    overrides.append("</MediaCodecs>\n"); + +    FILE *f = fopen(fileName, "wb"); +    if (f == NULL) { +        ALOGE("Failed to open %s for writing.", fileName); +        return; +    } +    if (fwrite(overrides.c_str(), 1, overrides.size(), f) != overrides.size()) { +        ALOGE("Failed to write to %s.", fileName); +    } +    fclose(f); +} + +}  // namespace android diff --git a/media/libstagefright/MediaCodecListOverrides.h b/media/libstagefright/MediaCodecListOverrides.h new file mode 100644 index 0000000..d4bb225 --- /dev/null +++ b/media/libstagefright/MediaCodecListOverrides.h @@ -0,0 +1,57 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_CODEC_LIST_OVERRIDES_H_ + +#define MEDIA_CODEC_LIST_OVERRIDES_H_ + +#include <media/MediaCodecInfo.h> +#include <media/stagefright/foundation/AString.h> + +#include <utils/StrongPointer.h> +#include <utils/KeyedVector.h> + +namespace android { + +extern const char *kProfilingVersionString; +extern const char *kProfilingResults; + +struct MediaCodecInfo; + +AString getProfilingVersionString(); + +bool splitString(const AString &s, const AString &delimiter, AString *s1, AString *s2); + +// profile codecs and save the result to xml file named kProfilingResults. +void profileCodecs(const Vector<sp<MediaCodecInfo>> &infos); + +// profile codecs and save the result to global_results, encoder_results and decoder_results. +void profileCodecs( +        const Vector<sp<MediaCodecInfo>> &infos, +        CodecSettings *global_results, +        KeyedVector<AString, CodecSettings> *encoder_results, +        KeyedVector<AString, CodecSettings> *decoder_results, +        bool forceToMeasure = false); + +void exportResultsToXML( +        const char *fileName, +        const CodecSettings& global_results, +        const KeyedVector<AString, CodecSettings>& encoder_results, +        const KeyedVector<AString, CodecSettings>& decoder_results); + +}  // namespace android + +#endif  // MEDIA_CODEC_LIST_OVERRIDES_H_ diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp index c26e909..7f9f824 100644 --- a/media/libstagefright/MediaCodecSource.cpp +++ b/media/libstagefright/MediaCodecSource.cpp @@ -20,6 +20,7 @@  #include <inttypes.h> +#include <gui/IGraphicBufferConsumer.h>  #include <gui/IGraphicBufferProducer.h>  #include <gui/Surface.h>  #include <media/ICrypto.h> @@ -29,14 +30,18 @@  #include <media/stagefright/foundation/AMessage.h>  #include <media/stagefright/MediaBuffer.h>  #include <media/stagefright/MediaCodec.h> -#include <media/stagefright/MetaData.h> +#include <media/stagefright/MediaCodecSource.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MediaSource.h> -#include <media/stagefright/MediaCodecSource.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/PersistentSurface.h>  #include <media/stagefright/Utils.h>  namespace android { +const int kDefaultSwVideoEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888; +const int kDefaultSwVideoEncoderDataSpace = HAL_DATASPACE_BT709; +  struct MediaCodecSource::Puller : public AHandler {      Puller(const sp<MediaSource> &source); @@ -121,7 +126,7 @@ status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,      mLooper->registerHandler(this);      mNotify = notify; -    sp<AMessage> msg = new AMessage(kWhatStart, id()); +    sp<AMessage> msg = new AMessage(kWhatStart, this);      msg->setObject("meta", meta);      return postSynchronouslyAndReturnError(msg);  } @@ -137,19 +142,19 @@ void MediaCodecSource::Puller::stop() {      mSource->stop();      ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video"); -    (new AMessage(kWhatStop, id()))->post(); +    (new AMessage(kWhatStop, this))->post();  }  void MediaCodecSource::Puller::pause() { -    (new AMessage(kWhatPause, id()))->post(); +    (new AMessage(kWhatPause, this))->post();  }  void MediaCodecSource::Puller::resume() { -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();  }  void MediaCodecSource::Puller::schedulePull() { -    sp<AMessage> msg = new AMessage(kWhatPull, id()); +    sp<AMessage> msg = new AMessage(kWhatPull, this);      msg->setInt32("generation", mPullGeneration);      msg->post();  } @@ -182,7 +187,7 @@ void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {              sp<AMessage> response = new AMessage;              response->setInt32("err", err); -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              response->postReply(replyID);              break; @@ -258,9 +263,10 @@ sp<MediaCodecSource> MediaCodecSource::Create(          const sp<ALooper> &looper,          const sp<AMessage> &format,          const sp<MediaSource> &source, +        const sp<IGraphicBufferConsumer> &consumer,          uint32_t flags) {      sp<MediaCodecSource> mediaSource = -            new MediaCodecSource(looper, format, source, flags); +            new MediaCodecSource(looper, format, source, consumer, flags);      if (mediaSource->init() == OK) {          return mediaSource; @@ -269,13 +275,13 @@ sp<MediaCodecSource> MediaCodecSource::Create(  }  status_t MediaCodecSource::start(MetaData* params) { -    sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id()); +    sp<AMessage> msg = new AMessage(kWhatStart, mReflector);      msg->setObject("meta", params);      return postSynchronouslyAndReturnError(msg);  }  status_t MediaCodecSource::stop() { -    sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id()); +    sp<AMessage> msg = new AMessage(kWhatStop, mReflector);      status_t err = postSynchronouslyAndReturnError(msg);      // mPuller->stop() needs to be done outside MediaCodecSource's looper, @@ -294,7 +300,7 @@ status_t MediaCodecSource::stop() {  }  status_t MediaCodecSource::pause() { -    (new AMessage(kWhatPause, mReflector->id()))->post(); +    (new AMessage(kWhatPause, mReflector))->post();      return OK;  } @@ -328,6 +334,7 @@ MediaCodecSource::MediaCodecSource(          const sp<ALooper> &looper,          const sp<AMessage> &outputFormat,          const sp<MediaSource> &source, +        const sp<IGraphicBufferConsumer> &consumer,          uint32_t flags)      : mLooper(looper),        mOutputFormat(outputFormat), @@ -337,6 +344,10 @@ MediaCodecSource::MediaCodecSource(        mStarted(false),        mStopping(false),        mDoMoreWorkPending(false), +      mSetEncoderFormat(false), +      mEncoderFormat(0), +      mEncoderDataSpace(0), +      mGraphicBufferConsumer(consumer),        mFirstSampleTimeUs(-1ll),        mEncoderReachedEOS(false),        mErrorCode(OK) { @@ -399,6 +410,9 @@ status_t MediaCodecSource::initEncoder() {      ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str()); +    mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector); +    mEncoder->setCallback(mEncoderActivityNotify); +      status_t err = mEncoder->configure(                  mOutputFormat,                  NULL /* nativeWindow */, @@ -415,16 +429,32 @@ status_t MediaCodecSource::initEncoder() {      if (mFlags & FLAG_USE_SURFACE_INPUT) {          CHECK(mIsVideo); -        err = mEncoder->createInputSurface(&mGraphicBufferProducer); +        if (mGraphicBufferConsumer != NULL) { +            // When using persistent surface, we are only interested in the +            // consumer, but have to use PersistentSurface as a wrapper to +            // pass consumer over messages (similar to BufferProducerWrapper) +            err = mEncoder->setInputSurface( +                    new PersistentSurface(NULL, mGraphicBufferConsumer)); +        } else { +            err = mEncoder->createInputSurface(&mGraphicBufferProducer); +        }          if (err != OK) {              return err;          }      } -    mEncoderActivityNotify = new AMessage( -            kWhatEncoderActivity, mReflector->id()); -    mEncoder->setCallback(mEncoderActivityNotify); +    sp<AMessage> inputFormat; +    int32_t usingSwReadOften; +    mSetEncoderFormat = false; +    if (mEncoder->getInputFormat(&inputFormat) == OK +            && inputFormat->findInt32("using-sw-read-often", &usingSwReadOften) +            && usingSwReadOften) { +        // this is a SW encoder; signal source to allocate SW readable buffers +        mSetEncoderFormat = true; +        mEncoderFormat = kDefaultSwVideoEncoderFormat; +        mEncoderDataSpace = kDefaultSwVideoEncoderDataSpace; +    }      err = mEncoder->start(); @@ -492,7 +522,7 @@ void MediaCodecSource::signalEOS(status_t err) {      if (mStopping && mEncoderReachedEOS) {          ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio");          // posting reply to everyone that's waiting -        List<uint32_t>::iterator it; +        List<sp<AReplyToken>>::iterator it;          for (it = mStopReplyIDQueue.begin();                  it != mStopReplyIDQueue.end(); it++) {              (new AMessage)->postReply(*it); @@ -620,9 +650,17 @@ status_t MediaCodecSource::onStart(MetaData *params) {          resume(startTimeUs);      } else {          CHECK(mPuller != NULL); -        sp<AMessage> notify = new AMessage( -                kWhatPullerNotify, mReflector->id()); -        err = mPuller->start(params, notify); +        sp<MetaData> meta = params; +        if (mSetEncoderFormat) { +            if (meta == NULL) { +                meta = new MetaData; +            } +            meta->setInt32(kKeyPixelFormat, mEncoderFormat); +            meta->setInt32(kKeyColorSpace, mEncoderDataSpace); +        } + +        sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector); +        err = mPuller->start(meta.get(), notify);          if (err != OK) {              return err;          } @@ -684,7 +722,6 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {              size_t size;              int64_t timeUs;              int32_t flags; -            native_handle_t* handle = NULL;              CHECK(msg->findInt32("index", &index));              CHECK(msg->findSize("offset", &offset)); @@ -768,7 +805,7 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {      }      case kWhatStart:      { -        uint32_t replyID; +        sp<AReplyToken> replyID;          CHECK(msg->senderAwaitsResponse(&replyID));          sp<RefBase> obj; @@ -784,7 +821,7 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {      {          ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio"); -        uint32_t replyID; +        sp<AReplyToken> replyID;          CHECK(msg->senderAwaitsResponse(&replyID));          if (mEncoderReachedEOS) { diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp index c48a5ae..2a50692 100644 --- a/media/libstagefright/MediaDefs.cpp +++ b/media/libstagefright/MediaDefs.cpp @@ -62,5 +62,6 @@ const char *MEDIA_MIMETYPE_TEXT_3GPP = "text/3gpp-tt";  const char *MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";  const char *MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";  const char *MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608"; +const char *MEDIA_MIMETYPE_DATA_TIMED_ID3 = "application/x-id3v4";  }  // namespace android diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index c7c6f34..b13877d 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -38,21 +38,6 @@  namespace android { -MediaMuxer::MediaMuxer(const char *path, OutputFormat format) -    : mFormat(format), -      mState(UNINITIALIZED) { -    if (format == OUTPUT_FORMAT_MPEG_4) { -        mWriter = new MPEG4Writer(path); -    } else if (format == OUTPUT_FORMAT_WEBM) { -        mWriter = new WebmWriter(path); -    } - -    if (mWriter != NULL) { -        mFileMeta = new MetaData; -        mState = INITIALIZED; -    } -} -  MediaMuxer::MediaMuxer(int fd, OutputFormat format)      : mFormat(format),        mState(UNINITIALIZED) { diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp new file mode 100644 index 0000000..3a45e25 --- /dev/null +++ b/media/libstagefright/MediaSync.cpp @@ -0,0 +1,861 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaSync" +#include <inttypes.h> + +#include <gui/BufferQueue.h> +#include <gui/IGraphicBufferConsumer.h> +#include <gui/IGraphicBufferProducer.h> + +#include <media/AudioTrack.h> +#include <media/stagefright/MediaClock.h> +#include <media/stagefright/MediaSync.h> +#include <media/stagefright/VideoFrameScheduler.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <ui/GraphicBuffer.h> + +// Maximum late time allowed for a video frame to be rendered. When a video +// frame arrives later than this number, it will be discarded without rendering. +static const int64_t kMaxAllowedVideoLateTimeUs = 40000ll; + +namespace android { + +// static +sp<MediaSync> MediaSync::create() { +    sp<MediaSync> sync = new MediaSync(); +    sync->mLooper->registerHandler(sync); +    return sync; +} + +MediaSync::MediaSync() +      : mIsAbandoned(false), +        mMutex(), +        mReleaseCondition(), +        mNumOutstandingBuffers(0), +        mUsageFlagsFromOutput(0), +        mMaxAcquiredBufferCount(1), +        mReturnPendingInputFrame(false), +        mNativeSampleRateInHz(0), +        mNumFramesWritten(0), +        mHasAudio(false), +        mNextBufferItemMediaUs(-1), +        mPlaybackRate(0.0) { +    mMediaClock = new MediaClock; + +    // initialize settings +    mPlaybackSettings = AUDIO_PLAYBACK_RATE_DEFAULT; +    mPlaybackSettings.mSpeed = mPlaybackRate; + +    mLooper = new ALooper; +    mLooper->setName("MediaSync"); +    mLooper->start(false, false, ANDROID_PRIORITY_AUDIO); +} + +MediaSync::~MediaSync() { +    if (mInput != NULL) { +        mInput->consumerDisconnect(); +    } +    if (mOutput != NULL) { +        mOutput->disconnect(NATIVE_WINDOW_API_MEDIA); +    } + +    if (mLooper != NULL) { +        mLooper->unregisterHandler(id()); +        mLooper->stop(); +    } +} + +status_t MediaSync::setSurface(const sp<IGraphicBufferProducer> &output) { +    Mutex::Autolock lock(mMutex); + +    if (output == mOutput) { +        return NO_ERROR;  // same output surface. +    } + +    if (output == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_VSYNC) { +        ALOGE("setSurface: output surface is used as sync source and cannot be removed."); +        return INVALID_OPERATION; +    } + +    if (output != NULL) { +        int newUsage = 0; +        output->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &newUsage); + +        // Check usage flags only when current output surface has been used to create input surface. +        if (mOutput != NULL && mInput != NULL) { +            int ignoredFlags = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER +                    | GRALLOC_USAGE_EXTERNAL_DISP); +            // New output surface is not allowed to add new usage flag except ignored ones. +            if ((newUsage & ~(mUsageFlagsFromOutput | ignoredFlags)) != 0) { +                ALOGE("setSurface: new output surface has new usage flag not used by current one."); +                return BAD_VALUE; +            } +        } + +        // Try to connect to new output surface. If failed, current output surface will not +        // be changed. +        IGraphicBufferProducer::QueueBufferOutput queueBufferOutput; +        sp<OutputListener> listener(new OutputListener(this, output)); +        IInterface::asBinder(output)->linkToDeath(listener); +        status_t status = +            output->connect(listener, +                            NATIVE_WINDOW_API_MEDIA, +                            true /* producerControlledByApp */, +                            &queueBufferOutput); +        if (status != NO_ERROR) { +            ALOGE("setSurface: failed to connect (%d)", status); +            return status; +        } + +        if (mFrameScheduler == NULL) { +            mFrameScheduler = new VideoFrameScheduler(); +            mFrameScheduler->init(); +        } +    } + +    if (mOutput != NULL) { +        mOutput->disconnect(NATIVE_WINDOW_API_MEDIA); +        while (!mBuffersSentToOutput.isEmpty()) { +            returnBufferToInput_l(mBuffersSentToOutput.valueAt(0), Fence::NO_FENCE); +            mBuffersSentToOutput.removeItemsAt(0); +        } +    } + +    mOutput = output; + +    return NO_ERROR; +} + +// |audioTrack| is used only for querying information. +status_t MediaSync::setAudioTrack(const sp<AudioTrack> &audioTrack) { +    Mutex::Autolock lock(mMutex); + +    // TODO: support audio track change. +    if (mAudioTrack != NULL) { +        ALOGE("setAudioTrack: audioTrack has already been configured."); +        return INVALID_OPERATION; +    } + +    if (audioTrack == NULL && mSyncSettings.mSource == AVSYNC_SOURCE_AUDIO) { +        ALOGE("setAudioTrack: audioTrack is used as sync source and cannot be removed."); +        return INVALID_OPERATION; +    } + +    if (audioTrack != NULL) { +        // check if audio track supports the playback settings +        if (mPlaybackSettings.mSpeed != 0.f +                && audioTrack->setPlaybackRate(mPlaybackSettings) != OK) { +            ALOGE("playback settings are not supported by the audio track"); +            return INVALID_OPERATION; +        } +        uint32_t nativeSampleRateInHz = audioTrack->getOriginalSampleRate(); +        if (nativeSampleRateInHz <= 0) { +            ALOGE("setAudioTrack: native sample rate should be positive."); +            return BAD_VALUE; +        } +        mAudioTrack = audioTrack; +        mNativeSampleRateInHz = nativeSampleRateInHz; +        (void)setPlaybackSettings_l(mPlaybackSettings); +    } +    else { +        mAudioTrack = NULL; +        mNativeSampleRateInHz = 0; +    } + +    // potentially resync to new source +    resync_l(); +    return OK; +} + +status_t MediaSync::createInputSurface( +        sp<IGraphicBufferProducer> *outBufferProducer) { +    if (outBufferProducer == NULL) { +        return BAD_VALUE; +    } + +    Mutex::Autolock lock(mMutex); + +    if (mOutput == NULL) { +        return NO_INIT; +    } + +    if (mInput != NULL) { +        return INVALID_OPERATION; +    } + +    sp<IGraphicBufferProducer> bufferProducer; +    sp<IGraphicBufferConsumer> bufferConsumer; +    BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer); + +    sp<InputListener> listener(new InputListener(this)); +    IInterface::asBinder(bufferConsumer)->linkToDeath(listener); +    status_t status = +        bufferConsumer->consumerConnect(listener, false /* controlledByApp */); +    if (status == NO_ERROR) { +        bufferConsumer->setConsumerName(String8("MediaSync")); +        // propagate usage bits from output surface +        mUsageFlagsFromOutput = 0; +        mOutput->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &mUsageFlagsFromOutput); +        bufferConsumer->setConsumerUsageBits(mUsageFlagsFromOutput); +        *outBufferProducer = bufferProducer; +        mInput = bufferConsumer; + +        // set undequeued buffer count +        int minUndequeuedBuffers; +        mOutput->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers); +        mMaxAcquiredBufferCount = minUndequeuedBuffers; +        bufferConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount); +    } +    return status; +} + +void MediaSync::resync_l() { +    AVSyncSource src = mSyncSettings.mSource; +    if (src == AVSYNC_SOURCE_DEFAULT) { +        if (mAudioTrack != NULL) { +            src = AVSYNC_SOURCE_AUDIO; +        } else { +            src = AVSYNC_SOURCE_SYSTEM_CLOCK; +        } +    } + +    // TODO: resync ourselves to the current clock (e.g. on sync source change) +    updatePlaybackRate_l(mPlaybackRate); +} + +void MediaSync::updatePlaybackRate_l(float rate) { +    if (rate > mPlaybackRate) { +        mNextBufferItemMediaUs = -1; +    } +    mPlaybackRate = rate; +    // TODO: update frame scheduler with this info +    mMediaClock->setPlaybackRate(rate); +    onDrainVideo_l(); +} + +sp<const MediaClock> MediaSync::getMediaClock() { +    return mMediaClock; +} + +status_t MediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) { +    Mutex::Autolock lock(mMutex); +    // User should check the playback rate if it doesn't want to receive a +    // huge number for play time. +    if (mPlaybackRate == 0.0f) { +        *outTimeUs = INT64_MAX; +        return OK; +    } + +    uint32_t numFramesPlayed = 0; +    if (mAudioTrack != NULL) { +        status_t res = mAudioTrack->getPosition(&numFramesPlayed); +        if (res != OK) { +            return res; +        } +    } + +    int64_t numPendingFrames = mNumFramesWritten - numFramesPlayed; +    if (numPendingFrames < 0) { +        numPendingFrames = 0; +        ALOGW("getPlayTimeForPendingAudioFrames: pending frame count is negative."); +    } +    double timeUs = numPendingFrames * 1000000.0 +            / (mNativeSampleRateInHz * (double)mPlaybackRate); +    if (timeUs > (double)INT64_MAX) { +        // Overflow. +        *outTimeUs = INT64_MAX; +        ALOGW("getPlayTimeForPendingAudioFrames: play time for pending audio frames " +              "is too high, possibly due to super low playback rate(%f)", mPlaybackRate); +    } else { +        *outTimeUs = (int64_t)timeUs; +    } + +    return OK; +} + +status_t MediaSync::updateQueuedAudioData( +        size_t sizeInBytes, int64_t presentationTimeUs) { +    if (sizeInBytes == 0) { +        return OK; +    } + +    Mutex::Autolock lock(mMutex); + +    if (mAudioTrack == NULL) { +        ALOGW("updateQueuedAudioData: audioTrack has NOT been configured."); +        return INVALID_OPERATION; +    } + +    int64_t numFrames = sizeInBytes / mAudioTrack->frameSize(); +    int64_t maxMediaTimeUs = presentationTimeUs +            + getDurationIfPlayedAtNativeSampleRate_l(numFrames); + +    int64_t nowUs = ALooper::GetNowUs(); +    int64_t nowMediaUs = presentationTimeUs +            - getDurationIfPlayedAtNativeSampleRate_l(mNumFramesWritten) +            + getPlayedOutAudioDurationMedia_l(nowUs); + +    mNumFramesWritten += numFrames; + +    int64_t oldRealTime = -1; +    if (mNextBufferItemMediaUs != -1) { +        oldRealTime = getRealTime(mNextBufferItemMediaUs, nowUs); +    } + +    mMediaClock->updateAnchor(nowMediaUs, nowUs, maxMediaTimeUs); +    mHasAudio = true; + +    if (oldRealTime != -1) { +        int64_t newRealTime = getRealTime(mNextBufferItemMediaUs, nowUs); +        if (newRealTime >= oldRealTime) { +            return OK; +        } +    } + +    mNextBufferItemMediaUs = -1; +    onDrainVideo_l(); +    return OK; +} + +void MediaSync::setName(const AString &name) { +    Mutex::Autolock lock(mMutex); +    mInput->setConsumerName(String8(name.c_str())); +} + +void MediaSync::flush() { +    Mutex::Autolock lock(mMutex); +    if (mFrameScheduler != NULL) { +        mFrameScheduler->restart(); +    } +    while (!mBufferItems.empty()) { +        BufferItem *bufferItem = &*mBufferItems.begin(); +        returnBufferToInput_l(bufferItem->mGraphicBuffer, bufferItem->mFence); +        mBufferItems.erase(mBufferItems.begin()); +    } +    mNextBufferItemMediaUs = -1; +    mNumFramesWritten = 0; +    mReturnPendingInputFrame = true; +    mReleaseCondition.signal(); +    mMediaClock->clearAnchor(); +} + +status_t MediaSync::setVideoFrameRateHint(float rate) { +    Mutex::Autolock lock(mMutex); +    if (rate < 0.f) { +        return BAD_VALUE; +    } +    if (mFrameScheduler != NULL) { +        mFrameScheduler->init(rate); +    } +    return OK; +} + +float MediaSync::getVideoFrameRate() { +    Mutex::Autolock lock(mMutex); +    if (mFrameScheduler != NULL) { +        float fps = mFrameScheduler->getFrameRate(); +        if (fps > 0.f) { +            return fps; +        } +    } + +    // we don't have or know the frame rate +    return -1.f; +} + +status_t MediaSync::setSyncSettings(const AVSyncSettings &syncSettings) { +    // validate settings +    if (syncSettings.mSource >= AVSYNC_SOURCE_MAX +            || syncSettings.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX +            || syncSettings.mTolerance < 0.f +            || syncSettings.mTolerance >= AVSYNC_TOLERANCE_MAX) { +        return BAD_VALUE; +    } + +    Mutex::Autolock lock(mMutex); + +    // verify that we have the sync source +    switch (syncSettings.mSource) { +        case AVSYNC_SOURCE_AUDIO: +            if (mAudioTrack == NULL) { +                ALOGE("setSyncSettings: audio sync source requires an audio track"); +                return BAD_VALUE; +            } +            break; +        case AVSYNC_SOURCE_VSYNC: +            if (mOutput == NULL) { +                ALOGE("setSyncSettings: vsync sync source requires an output surface"); +                return BAD_VALUE; +            } +            break; +        default: +            break; +    } + +    mSyncSettings = syncSettings; +    resync_l(); +    return OK; +} + +void MediaSync::getSyncSettings(AVSyncSettings *syncSettings) { +    Mutex::Autolock lock(mMutex); +    *syncSettings = mSyncSettings; +} + +status_t MediaSync::setPlaybackSettings(const AudioPlaybackRate &rate) { +    Mutex::Autolock lock(mMutex); + +    status_t err = setPlaybackSettings_l(rate); +    if (err == OK) { +        // TODO: adjust rate if using VSYNC as source +        updatePlaybackRate_l(rate.mSpeed); +    } +    return err; +} + +status_t MediaSync::setPlaybackSettings_l(const AudioPlaybackRate &rate) { +    if (rate.mSpeed < 0.f || rate.mPitch < 0.f) { +        // We don't validate other audio settings. +        // They will be validated when/if audiotrack is set. +        return BAD_VALUE; +    } + +    if (mAudioTrack != NULL) { +        if (rate.mSpeed == 0.f) { +            mAudioTrack->pause(); +        } else { +            status_t err = mAudioTrack->setPlaybackRate(rate); +            if (err != OK) { +                return BAD_VALUE; +            } + +            // ignore errors +            (void)mAudioTrack->start(); +        } +    } +    mPlaybackSettings = rate; +    return OK; +} + +void MediaSync::getPlaybackSettings(AudioPlaybackRate *rate) { +    Mutex::Autolock lock(mMutex); +    *rate = mPlaybackSettings; +} + +int64_t MediaSync::getRealTime(int64_t mediaTimeUs, int64_t nowUs) { +    int64_t realUs; +    if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) { +        // If failed to get current position, e.g. due to audio clock is +        // not ready, then just play out video immediately without delay. +        return nowUs; +    } +    return realUs; +} + +int64_t MediaSync::getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames) { +    return (numFrames * 1000000LL / mNativeSampleRateInHz); +} + +int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) { +    CHECK(mAudioTrack != NULL); + +    uint32_t numFramesPlayed; +    int64_t numFramesPlayedAt; +    AudioTimestamp ts; +    static const int64_t kStaleTimestamp100ms = 100000; + +    status_t res = mAudioTrack->getTimestamp(ts); +    if (res == OK) { +        // case 1: mixing audio tracks. +        numFramesPlayed = ts.mPosition; +        numFramesPlayedAt = +            ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; +        const int64_t timestampAge = nowUs - numFramesPlayedAt; +        if (timestampAge > kStaleTimestamp100ms) { +            // This is an audio FIXME. +            // getTimestamp returns a timestamp which may come from audio +            // mixing threads. After pausing, the MixerThread may go idle, +            // thus the mTime estimate may become stale. Assuming that the +            // MixerThread runs 20ms, with FastMixer at 5ms, the max latency +            // should be about 25ms with an average around 12ms (to be +            // verified). For safety we use 100ms. +            ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) " +                  "numFramesPlayedAt(%lld)", +                  (long long)nowUs, (long long)numFramesPlayedAt); +            numFramesPlayedAt = nowUs - kStaleTimestamp100ms; +        } +        //ALOGD("getTimestamp: OK %d %lld", +        //      numFramesPlayed, (long long)numFramesPlayedAt); +    } else if (res == WOULD_BLOCK) { +        // case 2: transitory state on start of a new track +        numFramesPlayed = 0; +        numFramesPlayedAt = nowUs; +        //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", +        //      numFramesPlayed, (long long)numFramesPlayedAt); +    } else { +        // case 3: transitory at new track or audio fast tracks. +        res = mAudioTrack->getPosition(&numFramesPlayed); +        CHECK_EQ(res, (status_t)OK); +        numFramesPlayedAt = nowUs; +        numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */ +        //ALOGD("getPosition: %d %lld", numFramesPlayed, (long long)numFramesPlayedAt); +    } + +    //can't be negative until 12.4 hrs, test. +    //CHECK_EQ(numFramesPlayed & (1 << 31), 0); +    int64_t durationUs = +        getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed) +            + nowUs - numFramesPlayedAt; +    if (durationUs < 0) { +        // Occurs when numFramesPlayed position is very small and the following: +        // (1) In case 1, the time nowUs is computed before getTimestamp() is +        //     called and numFramesPlayedAt is greater than nowUs by time more +        //     than numFramesPlayed. +        // (2) In case 3, using getPosition and adding mAudioTrack->latency() +        //     to numFramesPlayedAt, by a time amount greater than +        //     numFramesPlayed. +        // +        // Both of these are transitory conditions. +        ALOGV("getPlayedOutAudioDurationMedia_l: negative duration %lld " +              "set to zero", (long long)durationUs); +        durationUs = 0; +    } +    ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) " +          "framesAt(%lld)", +          (long long)durationUs, (long long)nowUs, numFramesPlayed, +          (long long)numFramesPlayedAt); +    return durationUs; +} + +void MediaSync::onDrainVideo_l() { +    if (!isPlaying()) { +        return; +    } + +    while (!mBufferItems.empty()) { +        int64_t nowUs = ALooper::GetNowUs(); +        BufferItem *bufferItem = &*mBufferItems.begin(); +        int64_t itemMediaUs = bufferItem->mTimestamp / 1000; +        int64_t itemRealUs = getRealTime(itemMediaUs, nowUs); + +        // adjust video frame PTS based on vsync +        itemRealUs = mFrameScheduler->schedule(itemRealUs * 1000) / 1000; +        int64_t twoVsyncsUs = 2 * (mFrameScheduler->getVsyncPeriod() / 1000); + +        // post 2 display refreshes before rendering is due +        if (itemRealUs <= nowUs + twoVsyncsUs) { +            ALOGV("adjusting PTS from %lld to %lld", +                    (long long)bufferItem->mTimestamp / 1000, (long long)itemRealUs); +            bufferItem->mTimestamp = itemRealUs * 1000; +            bufferItem->mIsAutoTimestamp = false; + +            if (mHasAudio) { +                if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) { +                    renderOneBufferItem_l(*bufferItem); +                } else { +                    // too late. +                    returnBufferToInput_l( +                            bufferItem->mGraphicBuffer, bufferItem->mFence); +                    mFrameScheduler->restart(); +                } +            } else { +                // always render video buffer in video-only mode. +                renderOneBufferItem_l(*bufferItem); + +                // smooth out videos >= 10fps +                mMediaClock->updateAnchor( +                        itemMediaUs, nowUs, itemMediaUs + 100000); +            } + +            mBufferItems.erase(mBufferItems.begin()); +            mNextBufferItemMediaUs = -1; +        } else { +            if (mNextBufferItemMediaUs == -1 +                    || mNextBufferItemMediaUs > itemMediaUs) { +                sp<AMessage> msg = new AMessage(kWhatDrainVideo, this); +                msg->post(itemRealUs - nowUs - twoVsyncsUs); +                mNextBufferItemMediaUs = itemMediaUs; +            } +            break; +        } +    } +} + +void MediaSync::onFrameAvailableFromInput() { +    Mutex::Autolock lock(mMutex); + +    const static nsecs_t kAcquireWaitTimeout = 2000000000; // 2 seconds + +    mReturnPendingInputFrame = false; + +    // If there are too many outstanding buffers, wait until a buffer is +    // released back to the input in onBufferReleased. +    // NOTE: BufferQueue allows dequeuing maxAcquiredBufferCount + 1 buffers +    while (mNumOutstandingBuffers > mMaxAcquiredBufferCount +            && !mIsAbandoned && !mReturnPendingInputFrame) { +        if (mReleaseCondition.waitRelative(mMutex, kAcquireWaitTimeout) != OK) { +            ALOGI_IF(mPlaybackRate != 0.f, "still waiting to release a buffer before acquire"); +        } + +        // If the sync is abandoned while we are waiting, the release +        // condition variable will be broadcast, and we should just return +        // without attempting to do anything more (since the input queue will +        // also be abandoned). +        if (mIsAbandoned) { +            return; +        } +    } + +    // Acquire and detach the buffer from the input. +    BufferItem bufferItem; +    status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */); +    if (status != NO_ERROR) { +        ALOGE("acquiring buffer from input failed (%d)", status); +        return; +    } +    ++mNumOutstandingBuffers; + +    ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId()); + +    status = mInput->detachBuffer(bufferItem.mBuf); +    if (status != NO_ERROR) { +        ALOGE("detaching buffer from input failed (%d)", status); +        if (status == NO_INIT) { +            // If the input has been abandoned, move on. +            onAbandoned_l(true /* isInput */); +        } +        return; +    } + +    if (mBuffersFromInput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) { +        // Something is wrong since this buffer should be at our hands, bail. +        ALOGE("received buffer multiple times from input"); +        mInput->consumerDisconnect(); +        onAbandoned_l(true /* isInput */); +        return; +    } +    mBuffersFromInput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer); + +    // If flush happened while waiting for a buffer to be released, simply return it +    // TRICKY: do it here after it is detached so that we don't have to cache mGraphicBuffer. +    if (mReturnPendingInputFrame) { +        mReturnPendingInputFrame = false; +        returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence); +        return; +    } + +    mBufferItems.push_back(bufferItem); + +    if (mBufferItems.size() == 1) { +        onDrainVideo_l(); +    } +} + +void MediaSync::renderOneBufferItem_l(const BufferItem &bufferItem) { +    IGraphicBufferProducer::QueueBufferInput queueInput( +            bufferItem.mTimestamp, +            bufferItem.mIsAutoTimestamp, +            bufferItem.mDataSpace, +            bufferItem.mCrop, +            static_cast<int32_t>(bufferItem.mScalingMode), +            bufferItem.mTransform, +            bufferItem.mIsDroppable, +            bufferItem.mFence); + +    // Attach and queue the buffer to the output. +    int slot; +    mOutput->setGenerationNumber(bufferItem.mGraphicBuffer->getGenerationNumber()); +    status_t status = mOutput->attachBuffer(&slot, bufferItem.mGraphicBuffer); +    ALOGE_IF(status != NO_ERROR, "attaching buffer to output failed (%d)", status); +    if (status == NO_ERROR) { +        IGraphicBufferProducer::QueueBufferOutput queueOutput; +        status = mOutput->queueBuffer(slot, queueInput, &queueOutput); +        ALOGE_IF(status != NO_ERROR, "queueing buffer to output failed (%d)", status); +    } + +    if (status != NO_ERROR) { +        returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence); +        if (status == NO_INIT) { +            // If the output has been abandoned, move on. +            onAbandoned_l(false /* isInput */); +        } +        return; +    } + +    if (mBuffersSentToOutput.indexOfKey(bufferItem.mGraphicBuffer->getId()) >= 0) { +        // Something is wrong since this buffer should be held by output now, bail. +        mInput->consumerDisconnect(); +        onAbandoned_l(true /* isInput */); +        return; +    } +    mBuffersSentToOutput.add(bufferItem.mGraphicBuffer->getId(), bufferItem.mGraphicBuffer); + +    ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId()); +} + +void MediaSync::onBufferReleasedByOutput(sp<IGraphicBufferProducer> &output) { +    Mutex::Autolock lock(mMutex); + +    if (output != mOutput) { +        return;  // This is not the current output, ignore. +    } + +    sp<GraphicBuffer> buffer; +    sp<Fence> fence; +    status_t status = mOutput->detachNextBuffer(&buffer, &fence); +    ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status); + +    if (status == NO_INIT) { +        // If the output has been abandoned, we can't do anything else, +        // since buffer is invalid. +        onAbandoned_l(false /* isInput */); +        return; +    } + +    ALOGV("detached buffer %#llx from output", (long long)buffer->getId()); + +    // If we've been abandoned, we can't return the buffer to the input, so just +    // move on. +    if (mIsAbandoned) { +        return; +    } + +    ssize_t ix = mBuffersSentToOutput.indexOfKey(buffer->getId()); +    if (ix < 0) { +        // The buffer is unknown, maybe leftover, ignore. +        return; +    } +    mBuffersSentToOutput.removeItemsAt(ix); + +    returnBufferToInput_l(buffer, fence); +} + +void MediaSync::returnBufferToInput_l( +        const sp<GraphicBuffer> &buffer, const sp<Fence> &fence) { +    ssize_t ix = mBuffersFromInput.indexOfKey(buffer->getId()); +    if (ix < 0) { +        // The buffer is unknown, something is wrong, bail. +        ALOGE("output returned unknown buffer"); +        mOutput->disconnect(NATIVE_WINDOW_API_MEDIA); +        onAbandoned_l(false /* isInput */); +        return; +    } +    sp<GraphicBuffer> oldBuffer = mBuffersFromInput.valueAt(ix); +    mBuffersFromInput.removeItemsAt(ix); + +    // Attach and release the buffer back to the input. +    int consumerSlot; +    status_t status = mInput->attachBuffer(&consumerSlot, oldBuffer); +    ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status); +    if (status == NO_ERROR) { +        status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */, +                EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence); +        ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status); +    } + +    // Notify any waiting onFrameAvailable calls. +    --mNumOutstandingBuffers; +    mReleaseCondition.signal(); + +    if (status == NO_ERROR) { +        ALOGV("released buffer %#llx to input", (long long)oldBuffer->getId()); +    } +} + +void MediaSync::onAbandoned_l(bool isInput) { +    ALOGE("the %s has abandoned me", (isInput ? "input" : "output")); +    if (!mIsAbandoned) { +        if (isInput) { +            mOutput->disconnect(NATIVE_WINDOW_API_MEDIA); +        } else { +            mInput->consumerDisconnect(); +        } +        mIsAbandoned = true; +    } +    mReleaseCondition.broadcast(); +} + +void MediaSync::onMessageReceived(const sp<AMessage> &msg) { +    switch (msg->what()) { +        case kWhatDrainVideo: +        { +            Mutex::Autolock lock(mMutex); +            if (mNextBufferItemMediaUs != -1) { +                int64_t nowUs = ALooper::GetNowUs(); +                int64_t itemRealUs = getRealTime(mNextBufferItemMediaUs, nowUs); + +                // The message could arrive earlier than expected due to +                // various reasons, e.g., media clock has been changed because +                // of new anchor time or playback rate. In such cases, the +                // message needs to be re-posted. +                if (itemRealUs > nowUs) { +                    msg->post(itemRealUs - nowUs); +                    break; +                } +            } + +            onDrainVideo_l(); +            break; +        } + +        default: +            TRESPASS(); +            break; +    } +} + +MediaSync::InputListener::InputListener(const sp<MediaSync> &sync) +      : mSync(sync) {} + +MediaSync::InputListener::~InputListener() {} + +void MediaSync::InputListener::onFrameAvailable(const BufferItem &/* item */) { +    mSync->onFrameAvailableFromInput(); +} + +// We don't care about sideband streams, since we won't relay them. +void MediaSync::InputListener::onSidebandStreamChanged() { +    ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly."); +} + + +void MediaSync::InputListener::binderDied(const wp<IBinder> &/* who */) { +    Mutex::Autolock lock(mSync->mMutex); +    mSync->onAbandoned_l(true /* isInput */); +} + +MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync, +        const sp<IGraphicBufferProducer> &output) +      : mSync(sync), +        mOutput(output) {} + +MediaSync::OutputListener::~OutputListener() {} + +void MediaSync::OutputListener::onBufferReleased() { +    mSync->onBufferReleasedByOutput(mOutput); +} + +void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) { +    Mutex::Autolock lock(mSync->mMutex); +    mSync->onAbandoned_l(false /* isInput */); +} + +} // namespace android diff --git a/media/libstagefright/MidiExtractor.cpp b/media/libstagefright/MidiExtractor.cpp index 66fab77..f6b8c84 100644 --- a/media/libstagefright/MidiExtractor.cpp +++ b/media/libstagefright/MidiExtractor.cpp @@ -217,7 +217,7 @@ status_t MidiEngine::releaseBuffers() {  }  status_t MidiEngine::seekTo(int64_t positionUs) { -    ALOGV("seekTo %lld", positionUs); +    ALOGV("seekTo %lld", (long long)positionUs);      EAS_RESULT result = EAS_Locate(mEasData, mEasHandle, positionUs / 1000, false);      return result == EAS_SUCCESS ? OK : UNKNOWN_ERROR;  } diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp index 7d7d631..f82636b 100644 --- a/media/libstagefright/NuCachedSource2.cpp +++ b/media/libstagefright/NuCachedSource2.cpp @@ -226,7 +226,7 @@ NuCachedSource2::NuCachedSource2(      mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);      Mutex::Autolock autoLock(mLock); -    (new AMessage(kWhatFetchMore, mReflector->id()))->post(); +    (new AMessage(kWhatFetchMore, mReflector))->post();  }  NuCachedSource2::~NuCachedSource2() { @@ -433,7 +433,7 @@ void NuCachedSource2::onFetch() {          delayUs = 100000ll;      } -    (new AMessage(kWhatFetchMore, mReflector->id()))->post(delayUs); +    (new AMessage(kWhatFetchMore, mReflector))->post(delayUs);  }  void NuCachedSource2::onRead(const sp<AMessage> &msg) { @@ -503,7 +503,7 @@ void NuCachedSource2::restartPrefetcherIfNecessary_l(  ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {      Mutex::Autolock autoSerializer(mSerializer); -    ALOGV("readAt offset %lld, size %zu", offset, size); +    ALOGV("readAt offset %lld, size %zu", (long long)offset, size);      Mutex::Autolock autoLock(mLock);      if (mDisconnecting) { @@ -522,7 +522,7 @@ ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {          return size;      } -    sp<AMessage> msg = new AMessage(kWhatRead, mReflector->id()); +    sp<AMessage> msg = new AMessage(kWhatRead, mReflector);      msg->setInt64("offset", offset);      msg->setPointer("data", data);      msg->setSize("size", size); @@ -579,10 +579,17 @@ size_t NuCachedSource2::approxDataRemaining_l(status_t *finalStatus) const {  ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) {      CHECK_LE(size, (size_t)mHighwaterThresholdBytes); -    ALOGV("readInternal offset %lld size %zu", offset, size); +    ALOGV("readInternal offset %lld size %zu", (long long)offset, size);      Mutex::Autolock autoLock(mLock); +    // If we're disconnecting, return EOS and don't access *data pointer. +    // data could be on the stack of the caller to NuCachedSource2::readAt(), +    // which may have exited already. +    if (mDisconnecting) { +        return ERROR_END_OF_STREAM; +    } +      if (!mFetching) {          mLastAccessPos = offset;          restartPrefetcherIfNecessary_l( @@ -640,7 +647,7 @@ status_t NuCachedSource2::seekInternal_l(off64_t offset) {          return OK;      } -    ALOGI("new range: offset= %lld", offset); +    ALOGI("new range: offset= %lld", (long long)offset);      mCacheOffset = offset; @@ -719,10 +726,10 @@ void NuCachedSource2::updateCacheParamsFromString(const char *s) {          mKeepAliveIntervalUs = kDefaultKeepAliveIntervalUs;      } -    ALOGV("lowwater = %zu bytes, highwater = %zu bytes, keepalive = %" PRId64 " us", +    ALOGV("lowwater = %zu bytes, highwater = %zu bytes, keepalive = %lld us",           mLowwaterThresholdBytes,           mHighwaterThresholdBytes, -         mKeepAliveIntervalUs); +         (long long)mKeepAliveIntervalUs);  }  // static diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp index 230c1f7..e69890d 100644 --- a/media/libstagefright/OMXClient.cpp +++ b/media/libstagefright/OMXClient.cpp @@ -72,7 +72,7 @@ struct MuxOMX : public IOMX {              node_id node, OMX_STATETYPE* state);      virtual status_t storeMetaDataInBuffers( -            node_id node, OMX_U32 port_index, OMX_BOOL enable); +            node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type);      virtual status_t prepareForAdaptivePlayback(              node_id node, OMX_U32 port_index, OMX_BOOL enable, @@ -90,7 +90,7 @@ struct MuxOMX : public IOMX {      virtual status_t useBuffer(              node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -            buffer_id *buffer); +            buffer_id *buffer, OMX_U32 allottedSize);      virtual status_t useGraphicBuffer(              node_id node, OMX_U32 port_index, @@ -102,7 +102,15 @@ struct MuxOMX : public IOMX {      virtual status_t createInputSurface(              node_id node, OMX_U32 port_index, -            sp<IGraphicBufferProducer> *bufferProducer); +            sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type); + +    virtual status_t createPersistentInputSurface( +            sp<IGraphicBufferProducer> *bufferProducer, +            sp<IGraphicBufferConsumer> *bufferConsumer); + +    virtual status_t setInputSurface( +            node_id node, OMX_U32 port_index, +            const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type);      virtual status_t signalEndOfInputStream(node_id node); @@ -112,18 +120,18 @@ struct MuxOMX : public IOMX {      virtual status_t allocateBufferWithBackup(              node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -            buffer_id *buffer); +            buffer_id *buffer, OMX_U32 allottedSize);      virtual status_t freeBuffer(              node_id node, OMX_U32 port_index, buffer_id buffer); -    virtual status_t fillBuffer(node_id node, buffer_id buffer); +    virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd);      virtual status_t emptyBuffer(              node_id node,              buffer_id buffer,              OMX_U32 range_offset, OMX_U32 range_length, -            OMX_U32 flags, OMX_TICKS timestamp); +            OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);      virtual status_t getExtensionIndex(              node_id node, @@ -284,8 +292,8 @@ status_t MuxOMX::getState(  }  status_t MuxOMX::storeMetaDataInBuffers( -        node_id node, OMX_U32 port_index, OMX_BOOL enable) { -    return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable); +        node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) { +    return getOMX(node)->storeMetaDataInBuffers(node, port_index, enable, type);  }  status_t MuxOMX::prepareForAdaptivePlayback( @@ -314,8 +322,8 @@ status_t MuxOMX::getGraphicBufferUsage(  status_t MuxOMX::useBuffer(          node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -        buffer_id *buffer) { -    return getOMX(node)->useBuffer(node, port_index, params, buffer); +        buffer_id *buffer, OMX_U32 allottedSize) { +    return getOMX(node)->useBuffer(node, port_index, params, buffer, allottedSize);  }  status_t MuxOMX::useGraphicBuffer( @@ -334,12 +342,26 @@ status_t MuxOMX::updateGraphicBufferInMeta(  status_t MuxOMX::createInputSurface(          node_id node, OMX_U32 port_index, -        sp<IGraphicBufferProducer> *bufferProducer) { +        sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {      status_t err = getOMX(node)->createInputSurface( -            node, port_index, bufferProducer); +            node, port_index, bufferProducer, type);      return err;  } +status_t MuxOMX::createPersistentInputSurface( +        sp<IGraphicBufferProducer> *bufferProducer, +        sp<IGraphicBufferConsumer> *bufferConsumer) { +    // TODO: local or remote? Always use remote for now +    return mRemoteOMX->createPersistentInputSurface( +            bufferProducer, bufferConsumer); +} + +status_t MuxOMX::setInputSurface( +        node_id node, OMX_U32 port_index, +        const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) { +    return getOMX(node)->setInputSurface(node, port_index, bufferConsumer, type); +} +  status_t MuxOMX::signalEndOfInputStream(node_id node) {      return getOMX(node)->signalEndOfInputStream(node);  } @@ -353,9 +375,9 @@ status_t MuxOMX::allocateBuffer(  status_t MuxOMX::allocateBufferWithBackup(          node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -        buffer_id *buffer) { +        buffer_id *buffer, OMX_U32 allottedSize) {      return getOMX(node)->allocateBufferWithBackup( -            node, port_index, params, buffer); +            node, port_index, params, buffer, allottedSize);  }  status_t MuxOMX::freeBuffer( @@ -363,17 +385,17 @@ status_t MuxOMX::freeBuffer(      return getOMX(node)->freeBuffer(node, port_index, buffer);  } -status_t MuxOMX::fillBuffer(node_id node, buffer_id buffer) { -    return getOMX(node)->fillBuffer(node, buffer); +status_t MuxOMX::fillBuffer(node_id node, buffer_id buffer, int fenceFd) { +    return getOMX(node)->fillBuffer(node, buffer, fenceFd);  }  status_t MuxOMX::emptyBuffer(          node_id node,          buffer_id buffer,          OMX_U32 range_offset, OMX_U32 range_length, -        OMX_U32 flags, OMX_TICKS timestamp) { +        OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {      return getOMX(node)->emptyBuffer( -            node, buffer, range_offset, range_length, flags, timestamp); +            node, buffer, range_offset, range_length, flags, timestamp, fenceFd);  }  status_t MuxOMX::getExtensionIndex( @@ -400,10 +422,16 @@ status_t OMXClient::connect() {      sp<IBinder> binder = sm->getService(String16("media.player"));      sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder); -    CHECK(service.get() != NULL); +    if (service.get() == NULL) { +        ALOGE("Cannot obtain IMediaPlayerService"); +        return NO_INIT; +    }      mOMX = service->getOMX(); -    CHECK(mOMX.get() != NULL); +    if (mOMX.get() == NULL) { +        ALOGE("Cannot obtain IOMX"); +        return NO_INIT; +    }      if (!mOMX->livesLocally(0 /* node */, getpid())) {          ALOGI("Using client-side OMX mux."); diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 4d30069..96aa808 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -43,6 +43,7 @@  #include <media/stagefright/MediaExtractor.h>  #include <media/stagefright/MetaData.h>  #include <media/stagefright/OMXCodec.h> +#include <media/stagefright/SurfaceUtils.h>  #include <media/stagefright/Utils.h>  #include <media/stagefright/SkipCutBuffer.h>  #include <utils/Vector.h> @@ -115,12 +116,15 @@ struct OMXCodecObserver : public BnOMXObserver {      }      // from IOMXObserver -    virtual void onMessage(const omx_message &msg) { +    virtual void onMessages(const std::list<omx_message> &messages) {          sp<OMXCodec> codec = mTarget.promote();          if (codec.get() != NULL) {              Mutex::Autolock autoLock(codec->mLock); -            codec->on_message(msg); +            for (std::list<omx_message>::const_iterator it = messages.cbegin(); +                  it != messages.cend(); ++it) { +                codec->on_message(*it); +            }              codec.clear();          }      } @@ -1057,7 +1061,7 @@ status_t OMXCodec::getVideoProfileLevel(          const sp<MetaData>& meta,          const CodecProfileLevel& defaultProfileLevel,          CodecProfileLevel &profileLevel) { -    CODEC_LOGV("Default profile: %ld, level %ld", +    CODEC_LOGV("Default profile: %u, level #x%x",              defaultProfileLevel.mProfile, defaultProfileLevel.mLevel);      // Are the default profile and level overwriten? @@ -1283,7 +1287,7 @@ status_t OMXCodec::setVideoOutputFormat(      success = success && meta->findInt32(kKeyHeight, &height);      CHECK(success); -    CODEC_LOGV("setVideoOutputFormat width=%ld, height=%ld", width, height); +    CODEC_LOGV("setVideoOutputFormat width=%d, height=%d", width, height);      OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;      if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { @@ -1650,7 +1654,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {          return err;      } -    CODEC_LOGV("allocating %lu buffers of size %lu on %s port", +    CODEC_LOGV("allocating %u buffers of size %u on %s port",              def.nBufferCountActual, def.nBufferSize,              portIndex == kPortIndexInput ? "input" : "output"); @@ -1677,7 +1681,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {                          &info.mData);              } else {                  err = mOMX->allocateBufferWithBackup( -                        mNode, portIndex, mem, &buffer); +                        mNode, portIndex, mem, &buffer, mem->size());              }          } else if (portIndex == kPortIndexOutput                  && (mQuirks & kRequiresAllocateBufferOnOutputPorts)) { @@ -1689,10 +1693,10 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {                          &info.mData);              } else {                  err = mOMX->allocateBufferWithBackup( -                        mNode, portIndex, mem, &buffer); +                        mNode, portIndex, mem, &buffer, mem->size());              }          } else { -            err = mOMX->useBuffer(mNode, portIndex, mem, &buffer); +            err = mOMX->useBuffer(mNode, portIndex, mem, &buffer, mem->size());          }          if (err != OK) { @@ -1723,7 +1727,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {          mPortBuffers[portIndex].push(info); -        CODEC_LOGV("allocated buffer %p on %s port", buffer, +        CODEC_LOGV("allocated buffer %u on %s port", buffer,               portIndex == kPortIndexInput ? "input" : "output");      } @@ -1745,7 +1749,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {                  if (mSkipCutBuffer != NULL) {                      size_t prevbuffersize = mSkipCutBuffer->size();                      if (prevbuffersize != 0) { -                        ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbuffersize); +                        ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbuffersize);                      }                  }                  mSkipCutBuffer = new SkipCutBuffer(delay * frameSize, padding * frameSize); @@ -1783,35 +1787,6 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {      return OK;  } -status_t OMXCodec::applyRotation() { -    sp<MetaData> meta = mSource->getFormat(); - -    int32_t rotationDegrees; -    if (!meta->findInt32(kKeyRotation, &rotationDegrees)) { -        rotationDegrees = 0; -    } - -    uint32_t transform; -    switch (rotationDegrees) { -        case 0: transform = 0; break; -        case 90: transform = HAL_TRANSFORM_ROT_90; break; -        case 180: transform = HAL_TRANSFORM_ROT_180; break; -        case 270: transform = HAL_TRANSFORM_ROT_270; break; -        default: transform = 0; break; -    } - -    status_t err = OK; - -    if (transform) { -        err = native_window_set_buffers_transform( -                mNativeWindow.get(), transform); -        ALOGE("native_window_set_buffers_transform failed: %s (%d)", -                strerror(-err), -err); -    } - -    return err; -} -  status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {      // Get the number of buffers needed.      OMX_PARAM_PORTDEFINITIONTYPE def; @@ -1825,21 +1800,11 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {          return err;      } -    err = native_window_set_buffers_geometry( -            mNativeWindow.get(), -            def.format.video.nFrameWidth, -            def.format.video.nFrameHeight, -            def.format.video.eColorFormat); - -    if (err != 0) { -        ALOGE("native_window_set_buffers_geometry failed: %s (%d)", -                strerror(-err), -err); -        return err; -    } +    sp<MetaData> meta = mSource->getFormat(); -    err = applyRotation(); -    if (err != OK) { -        return err; +    int32_t rotationDegrees; +    if (!meta->findInt32(kKeyRotation, &rotationDegrees)) { +        rotationDegrees = 0;      }      // Set up the native window. @@ -1850,34 +1815,19 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {          // XXX: Currently this error is logged, but not fatal.          usage = 0;      } +      if (mFlags & kEnableGrallocUsageProtected) {          usage |= GRALLOC_USAGE_PROTECTED;      } -    // Make sure to check whether either Stagefright or the video decoder -    // requested protected buffers. -    if (usage & GRALLOC_USAGE_PROTECTED) { -        // Verify that the ANativeWindow sends images directly to -        // SurfaceFlinger. -        int queuesToNativeWindow = 0; -        err = mNativeWindow->query( -                mNativeWindow.get(), NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, -                &queuesToNativeWindow); -        if (err != 0) { -            ALOGE("error authenticating native window: %d", err); -            return err; -        } -        if (queuesToNativeWindow != 1) { -            ALOGE("native window could not be authenticated"); -            return PERMISSION_DENIED; -        } -    } - -    ALOGV("native_window_set_usage usage=0x%lx", usage); -    err = native_window_set_usage( -            mNativeWindow.get(), usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); +    err = setNativeWindowSizeFormatAndUsage( +            mNativeWindow.get(), +            def.format.video.nFrameWidth, +            def.format.video.nFrameHeight, +            def.format.video.eColorFormat, +            rotationDegrees, +            usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);      if (err != 0) { -        ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);          return err;      } @@ -2044,150 +1994,6 @@ OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {      return bufInfo;  } -status_t OMXCodec::pushBlankBuffersToNativeWindow() { -    status_t err = NO_ERROR; -    ANativeWindowBuffer* anb = NULL; -    int numBufs = 0; -    int minUndequeuedBufs = 0; - -    // We need to reconnect to the ANativeWindow as a CPU client to ensure that -    // no frames get dropped by SurfaceFlinger assuming that these are video -    // frames. -    err = native_window_api_disconnect(mNativeWindow.get(), -            NATIVE_WINDOW_API_MEDIA); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", -                strerror(-err), -err); -        return err; -    } - -    err = native_window_api_connect(mNativeWindow.get(), -            NATIVE_WINDOW_API_CPU); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: api_connect failed: %s (%d)", -                strerror(-err), -err); -        return err; -    } - -    err = native_window_set_buffers_geometry(mNativeWindow.get(), 1, 1, -            HAL_PIXEL_FORMAT_RGBX_8888); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: set_buffers_geometry failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    err = native_window_set_usage(mNativeWindow.get(), -            GRALLOC_USAGE_SW_WRITE_OFTEN); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: set_usage failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    err = native_window_set_scaling_mode(mNativeWindow.get(), -            NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); -    if (err != OK) { -        ALOGE("error pushing blank frames: set_scaling_mode failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    err = mNativeWindow->query(mNativeWindow.get(), -            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query " -                "failed: %s (%d)", strerror(-err), -err); -        goto error; -    } - -    numBufs = minUndequeuedBufs + 1; -    err = native_window_set_buffer_count(mNativeWindow.get(), numBufs); -    if (err != NO_ERROR) { -        ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", -                strerror(-err), -err); -        goto error; -    } - -    // We  push numBufs + 1 buffers to ensure that we've drawn into the same -    // buffer twice.  This should guarantee that the buffer has been displayed -    // on the screen and then been replaced, so an previous video frames are -    // guaranteed NOT to be currently displayed. -    for (int i = 0; i < numBufs + 1; i++) { -        err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &anb); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        sp<GraphicBuffer> buf(new GraphicBuffer(anb, false)); - -        // Fill the buffer with the a 1x1 checkerboard pattern ;) -        uint32_t* img = NULL; -        err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img)); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: lock failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        *img = 0; - -        err = buf->unlock(); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: unlock failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        err = mNativeWindow->queueBuffer(mNativeWindow.get(), -                buf->getNativeBuffer(), -1); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", -                    strerror(-err), -err); -            goto error; -        } - -        anb = NULL; -    } - -error: - -    if (err != NO_ERROR) { -        // Clean up after an error. -        if (anb != NULL) { -            mNativeWindow->cancelBuffer(mNativeWindow.get(), anb, -1); -        } - -        native_window_api_disconnect(mNativeWindow.get(), -                NATIVE_WINDOW_API_CPU); -        native_window_api_connect(mNativeWindow.get(), -                NATIVE_WINDOW_API_MEDIA); - -        return err; -    } else { -        // Clean up after success. -        err = native_window_api_disconnect(mNativeWindow.get(), -                NATIVE_WINDOW_API_CPU); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", -                    strerror(-err), -err); -            return err; -        } - -        err = native_window_api_connect(mNativeWindow.get(), -                NATIVE_WINDOW_API_MEDIA); -        if (err != NO_ERROR) { -            ALOGE("error pushing blank frames: api_connect failed: %s (%d)", -                    strerror(-err), -err); -            return err; -        } - -        return NO_ERROR; -    } -} -  int64_t OMXCodec::getDecodingTimeUs() {      CHECK(mIsEncoder && mIsVideo); @@ -2708,7 +2514,7 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {          default:          { -            CODEC_LOGV("CMD_COMPLETE(%d, %ld)", cmd, data); +            CODEC_LOGV("CMD_COMPLETE(%d, %u)", cmd, data);              break;          }      } @@ -2734,7 +2540,7 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) {                  if (countBuffersWeOwn(mPortBuffers[kPortIndexInput]) !=                      mPortBuffers[kPortIndexInput].size()) {                      ALOGE("Codec did not return all input buffers " -                          "(received %d / %d)", +                          "(received %zu / %zu)",                              countBuffersWeOwn(mPortBuffers[kPortIndexInput]),                              mPortBuffers[kPortIndexInput].size());                      TRESPASS(); @@ -2743,7 +2549,7 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) {                  if (countBuffersWeOwn(mPortBuffers[kPortIndexOutput]) !=                      mPortBuffers[kPortIndexOutput].size()) {                      ALOGE("Codec did not return all output buffers " -                          "(received %d / %d)", +                          "(received %zu / %zu)",                              countBuffersWeOwn(mPortBuffers[kPortIndexOutput]),                              mPortBuffers[kPortIndexOutput].size());                      TRESPASS(); @@ -2769,7 +2575,7 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) {                      // them has made it to the display.  This allows the OMX                      // component teardown to zero out any protected buffers                      // without the risk of scanning out one of those buffers. -                    pushBlankBuffersToNativeWindow(); +                    pushBlankBuffersToNativeWindow(mNativeWindow.get());                  }                  setState(IDLE_TO_LOADED); @@ -2847,7 +2653,7 @@ status_t OMXCodec::freeBuffersOnPort(          CHECK(info->mStatus == OWNED_BY_US                  || info->mStatus == OWNED_BY_NATIVE_WINDOW); -        CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex); +        CODEC_LOGV("freeing buffer %u on port %u", info->mBuffer, portIndex);          status_t err = freeBuffer(portIndex, i); @@ -2894,7 +2700,7 @@ status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {  }  void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) { -    CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex); +    CODEC_LOGV("PORT_SETTINGS_CHANGED(%u)", portIndex);      CHECK(mState == EXECUTING || mState == EXECUTING_TO_IDLE);      CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput); @@ -2921,7 +2727,7 @@ bool OMXCodec::flushPortAsync(OMX_U32 portIndex) {      CHECK(mState == EXECUTING || mState == RECONFIGURING              || mState == EXECUTING_TO_IDLE); -    CODEC_LOGV("flushPortAsync(%ld): we own %d out of %d buffers already.", +    CODEC_LOGV("flushPortAsync(%u): we own %zu out of %zu buffers already.",           portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),           mPortBuffers[portIndex].size()); @@ -2950,7 +2756,7 @@ void OMXCodec::disablePortAsync(OMX_U32 portIndex) {      CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);      mPortStatus[portIndex] = DISABLING; -    CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex); +    CODEC_LOGV("sending OMX_CommandPortDisable(%u)", portIndex);      status_t err =          mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);      CHECK_EQ(err, (status_t)OK); @@ -2964,7 +2770,7 @@ status_t OMXCodec::enablePortAsync(OMX_U32 portIndex) {      CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED);      mPortStatus[portIndex] = ENABLING; -    CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex); +    CODEC_LOGV("sending OMX_CommandPortEnable(%u)", portIndex);      return mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);  } @@ -3037,7 +2843,7 @@ OMXCodec::BufferInfo *OMXCodec::findInputBufferByDataPointer(void *ptr) {          if (info->mData == ptr) {              CODEC_LOGV( -                    "input buffer data ptr = %p, buffer_id = %p", +                    "input buffer data ptr = %p, buffer_id = %u",                      ptr,                      info->mBuffer); @@ -3147,7 +2953,7 @@ bool OMXCodec::drainInputBuffer(BufferInfo *info) {                  if (srcBuffer->meta_data()->findInt64(                              kKeyTargetTime, &targetTimeUs)                          && targetTimeUs >= 0) { -                    CODEC_LOGV("targetTimeUs = %lld us", targetTimeUs); +                    CODEC_LOGV("targetTimeUs = %lld us", (long long)targetTimeUs);                      mTargetTimeUs = targetTimeUs;                  } else {                      mTargetTimeUs = -1; @@ -3181,7 +2987,7 @@ bool OMXCodec::drainInputBuffer(BufferInfo *info) {              if (offset == 0) {                  CODEC_LOGE(                       "Codec's input buffers are too small to accomodate " -                     "buffer read from source (info->mSize = %d, srcLength = %d)", +                     "buffer read from source (info->mSize = %zu, srcLength = %zu)",                       info->mSize, srcBuffer->range_length());                  srcBuffer->release(); @@ -3287,10 +3093,10 @@ bool OMXCodec::drainInputBuffer(BufferInfo *info) {          info = findEmptyInputBuffer();      } -    CODEC_LOGV("Calling emptyBuffer on buffer %p (length %d), " +    CODEC_LOGV("Calling emptyBuffer on buffer %u (length %zu), "                 "timestamp %lld us (%.2f secs)",                 info->mBuffer, offset, -               timestampUs, timestampUs / 1E6); +               (long long)timestampUs, timestampUs / 1E6);      err = mOMX->emptyBuffer(              mNode, info->mBuffer, 0, offset, @@ -3315,7 +3121,7 @@ void OMXCodec::fillOutputBuffer(BufferInfo *info) {          return;      } -    CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer); +    CODEC_LOGV("Calling fillBuffer on buffer %u", info->mBuffer);      status_t err = mOMX->fillBuffer(mNode, info->mBuffer);      if (err != OK) { @@ -3372,7 +3178,7 @@ status_t OMXCodec::waitForBufferFilled_l() {      }      status_t err = mBufferFilled.waitRelative(mLock, kBufferFilledEventTimeOutNs);      if (err != OK) { -        CODEC_LOGE("Timed out waiting for output buffers: %d/%d", +        CODEC_LOGE("Timed out waiting for output buffers: %zu/%zu",              countBuffersWeOwn(mPortBuffers[kPortIndexInput]),              countBuffersWeOwn(mPortBuffers[kPortIndexOutput]));      } @@ -3627,7 +3433,7 @@ void OMXCodec::setG711Format(int32_t sampleRate, int32_t numChannels) {  void OMXCodec::setImageOutputFormat(          OMX_COLOR_FORMATTYPE format, OMX_U32 width, OMX_U32 height) { -    CODEC_LOGV("setImageOutputFormat(%ld, %ld)", width, height); +    CODEC_LOGV("setImageOutputFormat(%u, %u)", width, height);  #if 0      OMX_INDEXTYPE index; @@ -4281,14 +4087,14 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {                  if ((OMX_U32)numChannels != params.nChannels) {                      ALOGV("Codec outputs a different number of channels than "                           "the input stream contains (contains %d channels, " -                         "codec outputs %ld channels).", +                         "codec outputs %u channels).",                           numChannels, params.nChannels);                  }                  if (sampleRate != (int32_t)params.nSamplingRate) {                      ALOGV("Codec outputs at different sampling rate than "                           "what the input stream contains (contains data at " -                         "%d Hz, codec outputs %lu Hz)", +                         "%d Hz, codec outputs %u Hz)",                           sampleRate, params.nSamplingRate);                  } @@ -4390,8 +4196,7 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {                              mNode, OMX_IndexConfigCommonOutputCrop,                              &rect, sizeof(rect)); -                CODEC_LOGI( -                        "video dimensions are %ld x %ld", +                CODEC_LOGI("video dimensions are %u x %u",                          video_def->nFrameWidth, video_def->nFrameHeight);                  if (err == OK) { @@ -4409,8 +4214,7 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {                              rect.nLeft + rect.nWidth - 1,                              rect.nTop + rect.nHeight - 1); -                    CODEC_LOGI( -                            "Crop rect is %ld x %ld @ (%ld, %ld)", +                    CODEC_LOGI("Crop rect is %u x %u @ (%d, %d)",                              rect.nWidth, rect.nHeight, rect.nLeft, rect.nTop);                  } else {                      mOutputFormat->setRect( diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp index 976763c..6fba8e1 100644 --- a/media/libstagefright/OggExtractor.cpp +++ b/media/libstagefright/OggExtractor.cpp @@ -21,6 +21,7 @@  #include "include/OggExtractor.h"  #include <cutils/properties.h> +#include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/DataSource.h>  #include <media/stagefright/MediaBuffer.h> @@ -65,24 +66,28 @@ private:      OggSource &operator=(const OggSource &);  }; -struct MyVorbisExtractor { -    MyVorbisExtractor(const sp<DataSource> &source); -    virtual ~MyVorbisExtractor(); +struct MyOggExtractor { +    MyOggExtractor( +            const sp<DataSource> &source, +            const char *mimeType, +            size_t numHeaders, +            int64_t seekPreRollUs); +    virtual ~MyOggExtractor();      sp<MetaData> getFormat() const;      // Returns an approximate bitrate in bits per second. -    uint64_t approxBitrate(); +    virtual uint64_t approxBitrate() const = 0;      status_t seekToTime(int64_t timeUs);      status_t seekToOffset(off64_t offset); -    status_t readNextPacket(MediaBuffer **buffer, bool conf); +    virtual status_t readNextPacket(MediaBuffer **buffer) = 0;      status_t init();      sp<MetaData> getFileMetaData() { return mFileMeta; } -private: +protected:      struct Page {          uint64_t mGranulePosition;          int32_t mPrevPacketSize; @@ -102,12 +107,17 @@ private:      sp<DataSource> mSource;      off64_t mOffset;      Page mCurrentPage; +    uint64_t mCurGranulePosition;      uint64_t mPrevGranulePosition;      size_t mCurrentPageSize;      bool mFirstPacketInPage;      uint64_t mCurrentPageSamples;      size_t mNextLaceIndex; +    const char *mMimeType; +    size_t mNumHeaders; +    int64_t mSeekPreRollUs; +      off64_t mFirstDataOffset;      vorbis_info mVi; @@ -121,10 +131,26 @@ private:      ssize_t readPage(off64_t offset, Page *page);      status_t findNextPage(off64_t startOffset, off64_t *pageOffset); -    status_t verifyHeader( -            MediaBuffer *buffer, uint8_t type); +    virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const = 0; -    int32_t packetBlockSize(MediaBuffer *buffer); +    // Extract codec format, metadata tags, and various codec specific data; +    // the format and CSD's are required to setup the decoders for the enclosed media content. +    // +    // Valid values for `type` are: +    // 1 - bitstream identification header +    // 3 - comment header +    // 5 - codec setup header (Vorbis only) +    virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type) = 0; + +    // Read the next ogg packet from the underlying data source; optionally +    // calculate the timestamp for the output packet whilst pretending +    // that we are parsing an Ogg Vorbis stream. +    // +    // *buffer is NULL'ed out immediately upon entry, and if successful a new buffer is allocated; +    // clients are responsible for releasing the original buffer. +    status_t _readNextPacket(MediaBuffer **buffer, bool calcVorbisTimestamp); + +    int32_t getPacketBlockSize(MediaBuffer *buffer);      void parseFileMetaData(); @@ -132,8 +158,61 @@ private:      void buildTableOfContents(); -    MyVorbisExtractor(const MyVorbisExtractor &); -    MyVorbisExtractor &operator=(const MyVorbisExtractor &); +    MyOggExtractor(const MyOggExtractor &); +    MyOggExtractor &operator=(const MyOggExtractor &); +}; + +struct MyVorbisExtractor : public MyOggExtractor { +    MyVorbisExtractor(const sp<DataSource> &source) +        : MyOggExtractor(source, +                MEDIA_MIMETYPE_AUDIO_VORBIS, +                /* numHeaders */ 3, +                /* seekPreRollUs */ 0) { +    } + +    virtual uint64_t approxBitrate() const; + +    virtual status_t readNextPacket(MediaBuffer **buffer) { +        return _readNextPacket(buffer, /* calcVorbisTimestamp = */ true); +    } + +protected: +    virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const { +        return granulePos * 1000000ll / mVi.rate; +    } + +    virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type); +}; + +struct MyOpusExtractor : public MyOggExtractor { +    static const int32_t kOpusSampleRate = 48000; +    static const int64_t kOpusSeekPreRollUs = 80000; // 80 ms + +    MyOpusExtractor(const sp<DataSource> &source) +        : MyOggExtractor(source, MEDIA_MIMETYPE_AUDIO_OPUS, /*numHeaders*/ 2, kOpusSeekPreRollUs), +          mChannelCount(0), +          mCodecDelay(0), +          mStartGranulePosition(-1) { +    } + +    virtual uint64_t approxBitrate() const { +        return 0; +    } + +    virtual status_t readNextPacket(MediaBuffer **buffer); + +protected: +    virtual int64_t getTimeUsOfGranule(uint64_t granulePos) const; +    virtual status_t verifyHeader(MediaBuffer *buffer, uint8_t type); + +private: +    status_t verifyOpusHeader(MediaBuffer *buffer); +    status_t verifyOpusComments(MediaBuffer *buffer); +    uint32_t getNumSamplesInPacket(MediaBuffer *buffer) const; + +    uint8_t mChannelCount; +    uint16_t mCodecDelay; +    int64_t mStartGranulePosition;  };  static void extractAlbumArt( @@ -179,13 +258,14 @@ status_t OggSource::read(      int64_t seekTimeUs;      ReadOptions::SeekMode mode;      if (options && options->getSeekTo(&seekTimeUs, &mode)) { -        if (mExtractor->mImpl->seekToTime(seekTimeUs) != OK) { -            return ERROR_END_OF_STREAM; +        status_t err = mExtractor->mImpl->seekToTime(seekTimeUs); +        if (err != OK) { +            return err;          }      }      MediaBuffer *packet; -    status_t err = mExtractor->mImpl->readNextPacket(&packet, /* conf = */ false); +    status_t err = mExtractor->mImpl->readNextPacket(&packet);      if (err != OK) {          return err; @@ -209,14 +289,22 @@ status_t OggSource::read(  //////////////////////////////////////////////////////////////////////////////// -MyVorbisExtractor::MyVorbisExtractor(const sp<DataSource> &source) +MyOggExtractor::MyOggExtractor( +        const sp<DataSource> &source, +        const char *mimeType, +        size_t numHeaders, +        int64_t seekPreRollUs)      : mSource(source),        mOffset(0), +      mCurGranulePosition(0),        mPrevGranulePosition(0),        mCurrentPageSize(0),        mFirstPacketInPage(true),        mCurrentPageSamples(0),        mNextLaceIndex(0), +      mMimeType(mimeType), +      mNumHeaders(numHeaders), +      mSeekPreRollUs(seekPreRollUs),        mFirstDataOffset(-1) {      mCurrentPage.mNumSegments = 0; @@ -224,16 +312,16 @@ MyVorbisExtractor::MyVorbisExtractor(const sp<DataSource> &source)      vorbis_comment_init(&mVc);  } -MyVorbisExtractor::~MyVorbisExtractor() { +MyOggExtractor::~MyOggExtractor() {      vorbis_comment_clear(&mVc);      vorbis_info_clear(&mVi);  } -sp<MetaData> MyVorbisExtractor::getFormat() const { +sp<MetaData> MyOggExtractor::getFormat() const {      return mMeta;  } -status_t MyVorbisExtractor::findNextPage( +status_t MyOggExtractor::findNextPage(          off64_t startOffset, off64_t *pageOffset) {      *pageOffset = startOffset; @@ -250,7 +338,7 @@ status_t MyVorbisExtractor::findNextPage(          if (!memcmp(signature, "OggS", 4)) {              if (*pageOffset > startOffset) {                  ALOGV("skipped %lld bytes of junk to reach next frame", -                     *pageOffset - startOffset); +                     (long long)(*pageOffset - startOffset));              }              return OK; @@ -264,7 +352,7 @@ status_t MyVorbisExtractor::findNextPage(  // it (if any) and return its granule position.  // To do this we back up from the "current" page's offset until we find any  // page preceding it and then scan forward to just before the current page. -status_t MyVorbisExtractor::findPrevGranulePosition( +status_t MyOggExtractor::findPrevGranulePosition(          off64_t pageOffset, uint64_t *granulePos) {      *granulePos = 0; @@ -277,10 +365,14 @@ status_t MyVorbisExtractor::findPrevGranulePosition(              prevGuess = 0;          } -        ALOGV("backing up %lld bytes", pageOffset - prevGuess); +        ALOGV("backing up %lld bytes", (long long)(pageOffset - prevGuess));          status_t err = findNextPage(prevGuess, &prevPageOffset); -        if (err != OK) { +        if (err == ERROR_END_OF_STREAM) { +            // We are at the last page and didn't back off enough; +            // back off 5000 bytes more and try again. +            continue; +        } else if (err != OK) {              return err;          } @@ -295,7 +387,7 @@ status_t MyVorbisExtractor::findPrevGranulePosition(      }      ALOGV("prevPageOffset at %lld, pageOffset at %lld", -         prevPageOffset, pageOffset); +            (long long)prevPageOffset, (long long)pageOffset);      for (;;) {          Page prevPage; @@ -314,13 +406,22 @@ status_t MyVorbisExtractor::findPrevGranulePosition(      }  } -status_t MyVorbisExtractor::seekToTime(int64_t timeUs) { +status_t MyOggExtractor::seekToTime(int64_t timeUs) { +    timeUs -= mSeekPreRollUs; +    if (timeUs < 0) { +        timeUs = 0; +    } +      if (mTableOfContents.isEmpty()) {          // Perform approximate seeking based on avg. bitrate. +        uint64_t bps = approxBitrate(); +        if (bps <= 0) { +            return INVALID_OPERATION; +        } -        off64_t pos = timeUs * approxBitrate() / 8000000ll; +        off64_t pos = timeUs * bps / 8000000ll; -        ALOGV("seeking to offset %lld", pos); +        ALOGV("seeking to offset %lld", (long long)pos);          return seekToOffset(pos);      } @@ -348,12 +449,12 @@ status_t MyVorbisExtractor::seekToTime(int64_t timeUs) {      const TOCEntry &entry = mTableOfContents.itemAt(left);      ALOGV("seeking to entry %zu / %zu at offset %lld", -         left, mTableOfContents.size(), entry.mPageOffset); +         left, mTableOfContents.size(), (long long)entry.mPageOffset);      return seekToOffset(entry.mPageOffset);  } -status_t MyVorbisExtractor::seekToOffset(off64_t offset) { +status_t MyOggExtractor::seekToOffset(off64_t offset) {      if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) {          // Once we know where the actual audio data starts (past the headers)          // don't ever seek to anywhere before that. @@ -386,13 +487,13 @@ status_t MyVorbisExtractor::seekToOffset(off64_t offset) {      return OK;  } -ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) { +ssize_t MyOggExtractor::readPage(off64_t offset, Page *page) {      uint8_t header[27];      ssize_t n;      if ((n = mSource->readAt(offset, header, sizeof(header)))              < (ssize_t)sizeof(header)) { -        ALOGV("failed to read %zu bytes at offset 0x%016llx, got %zd bytes", -             sizeof(header), offset, n); +        ALOGV("failed to read %zu bytes at offset %#016llx, got %zd bytes", +                sizeof(header), (long long)offset, n);          if (n < 0) {              return n; @@ -457,7 +558,110 @@ ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {      return sizeof(header) + page->mNumSegments + totalSize;  } -status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) { +status_t MyOpusExtractor::readNextPacket(MediaBuffer **out) { +    if (mOffset <= mFirstDataOffset && mStartGranulePosition < 0) { +        // The first sample might not start at time 0; find out where by subtracting +        // the number of samples on the first page from the granule position +        // (position of last complete sample) of the first page. This happens +        // the first time before we attempt to read a packet from the first page. +        MediaBuffer *mBuf; +        uint32_t numSamples = 0; +        uint64_t curGranulePosition = 0; +        while (true) { +            status_t err = _readNextPacket(&mBuf, /* calcVorbisTimestamp = */false); +            if (err != OK && err != ERROR_END_OF_STREAM) { +                return err; +            } +            // First two pages are header pages. +            if (err == ERROR_END_OF_STREAM || mCurrentPage.mPageNo > 2) { +                break; +            } +            curGranulePosition = mCurrentPage.mGranulePosition; +            numSamples += getNumSamplesInPacket(mBuf); +            mBuf->release(); +            mBuf = NULL; +        } + +        if (curGranulePosition > numSamples) { +            mStartGranulePosition = curGranulePosition - numSamples; +        } else { +            mStartGranulePosition = 0; +        } +        seekToOffset(0); +    } + +    status_t err = _readNextPacket(out, /* calcVorbisTimestamp = */false); +    if (err != OK) { +        return err; +    } + +    int32_t currentPageSamples; +    // Calculate timestamps by accumulating durations starting from the first sample of a page; +    // We assume that we only seek to page boundaries. +    if ((*out)->meta_data()->findInt32(kKeyValidSamples, ¤tPageSamples)) { +        // first packet in page +        if (mOffset == mFirstDataOffset) { +            currentPageSamples -= mStartGranulePosition; +            (*out)->meta_data()->setInt32(kKeyValidSamples, currentPageSamples); +        } +        mCurGranulePosition = mCurrentPage.mGranulePosition - currentPageSamples; +    } + +    int64_t timeUs = getTimeUsOfGranule(mCurGranulePosition); +    (*out)->meta_data()->setInt64(kKeyTime, timeUs); + +    uint32_t frames = getNumSamplesInPacket(*out); +    mCurGranulePosition += frames; +    return OK; +} + +uint32_t MyOpusExtractor::getNumSamplesInPacket(MediaBuffer *buffer) const { +    if (buffer == NULL || buffer->range_length() < 1) { +        return 0; +    } + +    uint8_t *data = (uint8_t *)buffer->data() + buffer->range_offset(); +    uint8_t toc = data[0]; +    uint8_t config = (toc >> 3) & 0x1f; +    uint32_t frameSizesUs[] = { +        10000, 20000, 40000, 60000, // 0...3 +        10000, 20000, 40000, 60000, // 4...7 +        10000, 20000, 40000, 60000, // 8...11 +        10000, 20000,               // 12...13 +        10000, 20000,               // 14...15 +        2500, 5000, 10000, 20000,   // 16...19 +        2500, 5000, 10000, 20000,   // 20...23 +        2500, 5000, 10000, 20000,   // 24...27 +        2500, 5000, 10000, 20000    // 28...31 +    }; +    uint32_t frameSizeUs = frameSizesUs[config]; + +    uint32_t numFrames; +    uint8_t c = toc & 3; +    switch (c) { +    case 0: +        numFrames = 1; +        break; +    case 1: +    case 2: +        numFrames = 2; +        break; +    case 3: +        if (buffer->range_length() < 3) { +            numFrames = 0; +        } else { +            numFrames = data[2] & 0x3f; +        } +        break; +    default: +        TRESPASS(); +    } + +    uint32_t numSamples = frameSizeUs * numFrames * kOpusSampleRate / 1000000; +    return numSamples; +} + +status_t MyOggExtractor::_readNextPacket(MediaBuffer **out, bool calcVorbisTimestamp) {      *out = NULL;      MediaBuffer *buffer = NULL; @@ -505,8 +709,8 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) {                      packetSize);              if (n < (ssize_t)packetSize) { -                ALOGV("failed to read %zu bytes at 0x%016llx, got %zd bytes", -                     packetSize, dataOffset, n); +                ALOGV("failed to read %zu bytes at %#016llx, got %zd bytes", +                        packetSize, (long long)dataOffset, n);                  return ERROR_IO;              } @@ -523,9 +727,8 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) {                      mFirstPacketInPage = false;                  } -                // ignore timestamp for configuration packets -                if (!conf) { -                    int32_t curBlockSize = packetBlockSize(buffer); +                if (calcVorbisTimestamp) { +                    int32_t curBlockSize = getPacketBlockSize(buffer);                      if (mCurrentPage.mPrevPacketSize < 0) {                          mCurrentPage.mPrevPacketSize = curBlockSize;                          mCurrentPage.mPrevPacketPos = @@ -597,43 +800,24 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out, bool conf) {      }  } -status_t MyVorbisExtractor::init() { +status_t MyOggExtractor::init() {      mMeta = new MetaData; -    mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS); +    mMeta->setCString(kKeyMIMEType, mMimeType); -    MediaBuffer *packet;      status_t err; -    if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) { -        return err; -    } -    ALOGV("read packet of size %zu\n", packet->range_length()); -    err = verifyHeader(packet, 1); -    packet->release(); -    packet = NULL; -    if (err != OK) { -        return err; -    } - -    if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) { -        return err; -    } -    ALOGV("read packet of size %zu\n", packet->range_length()); -    err = verifyHeader(packet, 3); -    packet->release(); -    packet = NULL; -    if (err != OK) { -        return err; -    } - -    if ((err = readNextPacket(&packet, /* conf = */ true)) != OK) { -        return err; -    } -    ALOGV("read packet of size %zu\n", packet->range_length()); -    err = verifyHeader(packet, 5); -    packet->release(); -    packet = NULL; -    if (err != OK) { -        return err; +    MediaBuffer *packet; +    for (size_t i = 0; i < mNumHeaders; ++i) { +        // ignore timestamp for configuration packets +        if ((err = _readNextPacket(&packet, /* calcVorbisTimestamp = */ false)) != OK) { +            return err; +        } +        ALOGV("read packet of size %zu\n", packet->range_length()); +        err = verifyHeader(packet, /* type = */ i * 2 + 1); +        packet->release(); +        packet = NULL; +        if (err != OK) { +            return err; +        }      }      mFirstDataOffset = mOffset + mCurrentPageSize; @@ -649,7 +833,7 @@ status_t MyVorbisExtractor::init() {          // we can only approximate using avg. bitrate if seeking to          // the end is too expensive or impossible (live streaming). -        int64_t durationUs = lastGranulePosition * 1000000ll / mVi.rate; +        int64_t durationUs = getTimeUsOfGranule(lastGranulePosition);          mMeta->setInt64(kKeyDuration, durationUs); @@ -659,7 +843,7 @@ status_t MyVorbisExtractor::init() {      return OK;  } -void MyVorbisExtractor::buildTableOfContents() { +void MyOggExtractor::buildTableOfContents() {      off64_t offset = mFirstDataOffset;      Page page;      ssize_t pageSize; @@ -670,7 +854,7 @@ void MyVorbisExtractor::buildTableOfContents() {              mTableOfContents.editItemAt(mTableOfContents.size() - 1);          entry.mPageOffset = offset; -        entry.mTimeUs = page.mGranulePosition * 1000000ll / mVi.rate; +        entry.mTimeUs = getTimeUsOfGranule(page.mGranulePosition);          offset += (size_t)pageSize;      } @@ -698,7 +882,7 @@ void MyVorbisExtractor::buildTableOfContents() {      }  } -int32_t MyVorbisExtractor::packetBlockSize(MediaBuffer *buffer) { +int32_t MyOggExtractor::getPacketBlockSize(MediaBuffer *buffer) {      const uint8_t *data =          (const uint8_t *)buffer->data() + buffer->range_offset(); @@ -727,6 +911,144 @@ int32_t MyVorbisExtractor::packetBlockSize(MediaBuffer *buffer) {      return vorbis_packet_blocksize(&mVi, &pack);  } +int64_t MyOpusExtractor::getTimeUsOfGranule(uint64_t granulePos) const { +    uint64_t pcmSamplePosition = 0; +    if (granulePos > mCodecDelay) { +        pcmSamplePosition = granulePos - mCodecDelay; +    } +    return pcmSamplePosition * 1000000ll / kOpusSampleRate; +} + +status_t MyOpusExtractor::verifyHeader(MediaBuffer *buffer, uint8_t type) { +    switch (type) { +        // there are actually no header types defined in the Opus spec; we choose 1 and 3 to mean +        // header and comments such that we can share code with MyVorbisExtractor. +        case 1: +            return verifyOpusHeader(buffer); +        case 3: +            return verifyOpusComments(buffer); +        default: +            return INVALID_OPERATION; +    } +} + +status_t MyOpusExtractor::verifyOpusHeader(MediaBuffer *buffer) { +    const size_t kOpusHeaderSize = 19; +    const uint8_t *data = +        (const uint8_t *)buffer->data() + buffer->range_offset(); + +    size_t size = buffer->range_length(); + +    if (size < kOpusHeaderSize +            || memcmp(data, "OpusHead", 8) +            || /* version = */ data[8] != 1) { +        return ERROR_MALFORMED; +    } + +    mChannelCount = data[9]; +    mCodecDelay = U16LE_AT(&data[10]); + +    mMeta->setData(kKeyOpusHeader, 0, data, size); +    mMeta->setInt32(kKeySampleRate, kOpusSampleRate); +    mMeta->setInt32(kKeyChannelCount, mChannelCount); +    mMeta->setInt64(kKeyOpusSeekPreRoll /* ns */, kOpusSeekPreRollUs * 1000 /* = 80 ms*/); +    mMeta->setInt64(kKeyOpusCodecDelay /* ns */, +            mCodecDelay /* sample/s */ * 1000000000 / kOpusSampleRate); + +    return OK; +} + +status_t MyOpusExtractor::verifyOpusComments(MediaBuffer *buffer) { +    // add artificial framing bit so we can reuse _vorbis_unpack_comment +    int32_t commentSize = buffer->range_length() + 1; +    sp<ABuffer> aBuf = new ABuffer(commentSize); +    if (aBuf->capacity() <= buffer->range_length()) { +        return ERROR_MALFORMED; +    } + +    uint8_t* commentData = aBuf->data(); +    memcpy(commentData, +            (uint8_t *)buffer->data() + buffer->range_offset(), +            buffer->range_length()); + +    ogg_buffer buf; +    buf.data = commentData; +    buf.size = commentSize; +    buf.refcount = 1; +    buf.ptr.owner = NULL; + +    ogg_reference ref; +    ref.buffer = &buf; +    ref.begin = 0; +    ref.length = commentSize; +    ref.next = NULL; + +    oggpack_buffer bits; +    oggpack_readinit(&bits, &ref); + +    // skip 'OpusTags' +    const char *OpusTags = "OpusTags"; +    const int32_t headerLen = strlen(OpusTags); +    int32_t framingBitOffset = headerLen; +    for (int i = 0; i < headerLen; ++i) { +        char chr = oggpack_read(&bits, 8); +        if (chr != OpusTags[i]) { +            return ERROR_MALFORMED; +        } +    } + +    int32_t vendorLen = oggpack_read(&bits, 32); +    framingBitOffset += 4; +    if (vendorLen < 0 || vendorLen > commentSize - 8) { +        return ERROR_MALFORMED; +    } +    // skip vendor string +    framingBitOffset += vendorLen; +    for (int i = 0; i < vendorLen; ++i) { +        oggpack_read(&bits, 8); +    } + +    int32_t n = oggpack_read(&bits, 32); +    framingBitOffset += 4; +    if (n < 0 || n > ((commentSize - oggpack_bytes(&bits)) >> 2)) { +        return ERROR_MALFORMED; +    } +    for (int i = 0; i < n; ++i) { +        int32_t len = oggpack_read(&bits, 32); +        framingBitOffset += 4; +        if (len  < 0 || len  > (commentSize - oggpack_bytes(&bits))) { +            return ERROR_MALFORMED; +        } +        framingBitOffset += len; +        for (int j = 0; j < len; ++j) { +            oggpack_read(&bits, 8); +        } +    } +    if (framingBitOffset < 0 || framingBitOffset >= commentSize) { +        return ERROR_MALFORMED; +    } +    commentData[framingBitOffset] = 1; + +    buf.data = commentData + headerLen; +    buf.size = commentSize - headerLen; +    buf.refcount = 1; +    buf.ptr.owner = NULL; + +    ref.buffer = &buf; +    ref.begin = 0; +    ref.length = commentSize - headerLen; +    ref.next = NULL; + +    oggpack_readinit(&bits, &ref); +    int err = _vorbis_unpack_comment(&mVc, &bits); +    if (0 != err) { +        return ERROR_MALFORMED; +    } + +    parseFileMetaData(); +    return OK; +} +  status_t MyVorbisExtractor::verifyHeader(          MediaBuffer *buffer, uint8_t type) {      const uint8_t *data = @@ -753,7 +1075,9 @@ status_t MyVorbisExtractor::verifyHeader(      oggpack_buffer bits;      oggpack_readinit(&bits, &ref); -    CHECK_EQ(oggpack_read(&bits, 8), type); +    if (oggpack_read(&bits, 8) != type) { +        return ERROR_MALFORMED; +    }      for (size_t i = 0; i < 6; ++i) {          oggpack_read(&bits, 8);  // skip 'vorbis'      } @@ -761,7 +1085,9 @@ status_t MyVorbisExtractor::verifyHeader(      switch (type) {          case 1:          { -            CHECK_EQ(0, _vorbis_unpack_info(&mVi, &bits)); +            if (0 != _vorbis_unpack_info(&mVi, &bits)) { +                return ERROR_MALFORMED; +            }              mMeta->setData(kKeyVorbisInfo, 0, data, size);              mMeta->setInt32(kKeySampleRate, mVi.rate); @@ -810,7 +1136,7 @@ status_t MyVorbisExtractor::verifyHeader(      return OK;  } -uint64_t MyVorbisExtractor::approxBitrate() { +uint64_t MyVorbisExtractor::approxBitrate() const {      if (mVi.bitrate_nominal != 0) {          return mVi.bitrate_nominal;      } @@ -818,7 +1144,7 @@ uint64_t MyVorbisExtractor::approxBitrate() {      return (mVi.bitrate_lower + mVi.bitrate_upper) / 2;  } -void MyVorbisExtractor::parseFileMetaData() { +void MyOggExtractor::parseFileMetaData() {      mFileMeta = new MetaData;      mFileMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_OGG); @@ -1027,11 +1353,23 @@ OggExtractor::OggExtractor(const sp<DataSource> &source)      : mDataSource(source),        mInitCheck(NO_INIT),        mImpl(NULL) { -    mImpl = new MyVorbisExtractor(mDataSource); -    mInitCheck = mImpl->seekToOffset(0); +    for (int i = 0; i < 2; ++i) { +        if (mImpl != NULL) { +            delete mImpl; +        } +        if (i == 0) { +            mImpl = new MyVorbisExtractor(mDataSource); +        } else { +            mImpl = new MyOpusExtractor(mDataSource); +        } +        mInitCheck = mImpl->seekToOffset(0); -    if (mInitCheck == OK) { -        mInitCheck = mImpl->init(); +        if (mInitCheck == OK) { +            mInitCheck = mImpl->init(); +            if (mInitCheck == OK) { +                break; +            } +        }      }  } diff --git a/media/libstagefright/ProcessInfo.cpp b/media/libstagefright/ProcessInfo.cpp new file mode 100644 index 0000000..b4172b3 --- /dev/null +++ b/media/libstagefright/ProcessInfo.cpp @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ProcessInfo" +#include <utils/Log.h> + +#include <media/stagefright/ProcessInfo.h> + +#include <binder/IProcessInfoService.h> +#include <binder/IServiceManager.h> + +namespace android { + +ProcessInfo::ProcessInfo() {} + +bool ProcessInfo::getPriority(int pid, int* priority) { +    sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo")); +    sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder); + +    size_t length = 1; +    int32_t states; +    status_t err = service->getProcessStatesFromPids(length, &pid, &states); +    if (err != OK) { +        ALOGE("getProcessStatesFromPids failed"); +        return false; +    } +    ALOGV("pid %d states %d", pid, states); +    if (states < 0) { +        return false; +    } + +    // Use process state as the priority. Lower the value, higher the priority. +    *priority = states; +    return true; +} + +ProcessInfo::~ProcessInfo() {} + +}  // namespace android diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp index d7251f4..97dff43 100644 --- a/media/libstagefright/SampleTable.cpp +++ b/media/libstagefright/SampleTable.cpp @@ -235,11 +235,13 @@ status_t SampleTable::setSampleToChunkParams(          return ERROR_MALFORMED;      } -    if (SIZE_MAX / sizeof(SampleToChunkEntry) <= mNumSampleToChunkOffsets) +    if (SIZE_MAX / sizeof(SampleToChunkEntry) <= (size_t)mNumSampleToChunkOffsets)          return ERROR_OUT_OF_RANGE;      mSampleToChunkEntries = -        new SampleToChunkEntry[mNumSampleToChunkOffsets]; +        new (std::nothrow) SampleToChunkEntry[mNumSampleToChunkOffsets]; +    if (!mSampleToChunkEntries) +        return ERROR_OUT_OF_RANGE;      for (uint32_t i = 0; i < mNumSampleToChunkOffsets; ++i) {          uint8_t buffer[12]; @@ -342,10 +344,12 @@ status_t SampleTable::setTimeToSampleParams(      mTimeToSampleCount = U32_AT(&header[4]);      uint64_t allocSize = (uint64_t)mTimeToSampleCount * 2 * sizeof(uint32_t); -    if (allocSize > SIZE_MAX) { +    if (allocSize > UINT32_MAX) {          return ERROR_OUT_OF_RANGE;      } -    mTimeToSample = new uint32_t[mTimeToSampleCount * 2]; +    mTimeToSample = new (std::nothrow) uint32_t[mTimeToSampleCount * 2]; +    if (!mTimeToSample) +        return ERROR_OUT_OF_RANGE;      size_t size = sizeof(uint32_t) * mTimeToSampleCount * 2;      if (mDataSource->readAt( @@ -388,11 +392,13 @@ status_t SampleTable::setCompositionTimeToSampleParams(      mNumCompositionTimeDeltaEntries = numEntries;      uint64_t allocSize = (uint64_t)numEntries * 2 * sizeof(uint32_t); -    if (allocSize > SIZE_MAX) { +    if (allocSize > UINT32_MAX) {          return ERROR_OUT_OF_RANGE;      } -    mCompositionTimeDeltaEntries = new uint32_t[2 * numEntries]; +    mCompositionTimeDeltaEntries = new (std::nothrow) uint32_t[2 * numEntries]; +    if (!mCompositionTimeDeltaEntries) +        return ERROR_OUT_OF_RANGE;      if (mDataSource->readAt(                  data_offset + 8, mCompositionTimeDeltaEntries, numEntries * 8) @@ -442,7 +448,10 @@ status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size)          return ERROR_OUT_OF_RANGE;      } -    mSyncSamples = new uint32_t[mNumSyncSamples]; +    mSyncSamples = new (std::nothrow) uint32_t[mNumSyncSamples]; +    if (!mSyncSamples) +        return ERROR_OUT_OF_RANGE; +      size_t size = mNumSyncSamples * sizeof(uint32_t);      if (mDataSource->readAt(mSyncSampleOffset + 8, mSyncSamples, size)              != (ssize_t)size) { @@ -510,7 +519,9 @@ void SampleTable::buildSampleEntriesTable() {          return;      } -    mSampleTimeEntries = new SampleTimeEntry[mNumSampleSizes]; +    mSampleTimeEntries = new (std::nothrow) SampleTimeEntry[mNumSampleSizes]; +    if (!mSampleTimeEntries) +        return;      uint32_t sampleIndex = 0;      uint32_t sampleTime = 0; diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp index e2e6d79..1da1e5e 100644 --- a/media/libstagefright/SkipCutBuffer.cpp +++ b/media/libstagefright/SkipCutBuffer.cpp @@ -25,6 +25,13 @@  namespace android {  SkipCutBuffer::SkipCutBuffer(int32_t skip, int32_t cut) { + +    if (skip < 0 || cut < 0 || cut > 64 * 1024) { +        ALOGW("out of range skip/cut: %d/%d, using passthrough instead", skip, cut); +        skip = 0; +        cut = 0; +    } +      mFrontPadding = mSkip = skip;      mBackPadding = cut;      mWriteHead = 0; diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp index 101fc8a..7c554db 100644 --- a/media/libstagefright/StagefrightMetadataRetriever.cpp +++ b/media/libstagefright/StagefrightMetadataRetriever.cpp @@ -20,22 +20,35 @@  #include <inttypes.h>  #include <utils/Log.h> +#include <gui/Surface.h>  #include "include/StagefrightMetadataRetriever.h" +#include <media/ICrypto.h>  #include <media/IMediaHTTPService.h> + +#include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h>  #include <media/stagefright/ColorConverter.h>  #include <media/stagefright/DataSource.h>  #include <media/stagefright/FileSource.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaCodec.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MediaExtractor.h>  #include <media/stagefright/MetaData.h>  #include <media/stagefright/OMXCodec.h> -#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/Utils.h> +  #include <CharacterEncodingDetector.h>  namespace android { +static const int64_t kBufferTimeOutUs = 30000ll; // 30 msec +static const size_t kRetryCount = 20; // must be >0 +  StagefrightMetadataRetriever::StagefrightMetadataRetriever()      : mParsedMetaData(false),        mAlbumArt(NULL) { @@ -47,10 +60,7 @@ StagefrightMetadataRetriever::StagefrightMetadataRetriever()  StagefrightMetadataRetriever::~StagefrightMetadataRetriever() {      ALOGV("~StagefrightMetadataRetriever()"); - -    delete mAlbumArt; -    mAlbumArt = NULL; - +    clearMetadata();      mClient.disconnect();  } @@ -60,11 +70,7 @@ status_t StagefrightMetadataRetriever::setDataSource(          const KeyedVector<String8, String8> *headers) {      ALOGV("setDataSource(%s)", uri); -    mParsedMetaData = false; -    mMetaData.clear(); -    delete mAlbumArt; -    mAlbumArt = NULL; - +    clearMetadata();      mSource = DataSource::CreateFromURI(httpService, uri, headers);      if (mSource == NULL) { @@ -92,11 +98,7 @@ status_t StagefrightMetadataRetriever::setDataSource(      ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length); -    mParsedMetaData = false; -    mMetaData.clear(); -    delete mAlbumArt; -    mAlbumArt = NULL; - +    clearMetadata();      mSource = new FileSource(fd, offset, length);      status_t err; @@ -117,73 +119,69 @@ status_t StagefrightMetadataRetriever::setDataSource(      return OK;  } -static bool isYUV420PlanarSupported( -            OMXClient *client, -            const sp<MetaData> &trackMeta) { +status_t StagefrightMetadataRetriever::setDataSource( +        const sp<DataSource>& source) { +    ALOGV("setDataSource(DataSource)"); -    const char *mime; -    CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); +    clearMetadata(); +    mSource = source; +    mExtractor = MediaExtractor::Create(mSource); -    Vector<CodecCapabilities> caps; -    if (QueryCodecs(client->interface(), mime, -                    true, /* queryDecoders */ -                    true, /* hwCodecOnly */ -                    &caps) == OK) { - -        for (size_t j = 0; j < caps.size(); ++j) { -            CodecCapabilities cap = caps[j]; -            for (size_t i = 0; i < cap.mColorFormats.size(); ++i) { -                if (cap.mColorFormats[i] == OMX_COLOR_FormatYUV420Planar) { -                    return true; -                } -            } -        } +    if (mExtractor == NULL) { +        ALOGE("Failed to instantiate a MediaExtractor."); +        mSource.clear(); +        return UNKNOWN_ERROR;      } -    return false; + +    return OK;  } -static VideoFrame *extractVideoFrameWithCodecFlags( -        OMXClient *client, +static VideoFrame *extractVideoFrame( +        const char *componentName,          const sp<MetaData> &trackMeta,          const sp<MediaSource> &source, -        uint32_t flags,          int64_t frameTimeUs,          int seekMode) {      sp<MetaData> format = source->getFormat(); -    // XXX: -    // Once all vendors support OMX_COLOR_FormatYUV420Planar, we can -    // remove this check and always set the decoder output color format -    if (isYUV420PlanarSupported(client, trackMeta)) { -        format->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); -    } +    sp<AMessage> videoFormat; +    convertMetaDataToMessage(trackMeta, &videoFormat); -    sp<MediaSource> decoder = -        OMXCodec::Create( -                client->interface(), format, false, source, -                NULL, flags | OMXCodec::kClientNeedsFramebuffer); +    // TODO: Use Flexible color instead +    videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar); -    if (decoder.get() == NULL) { -        ALOGV("unable to instantiate video decoder."); +    status_t err; +    sp<ALooper> looper = new ALooper; +    looper->start(); +    sp<MediaCodec> decoder = MediaCodec::CreateByComponentName( +            looper, componentName, &err); +    if (decoder.get() == NULL || err != OK) { +        ALOGW("Failed to instantiate decoder [%s]", componentName);          return NULL;      } -    status_t err = decoder->start(); +    err = decoder->configure(videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);      if (err != OK) { -        ALOGW("OMXCodec::start returned error %d (0x%08x)\n", err, err); +        ALOGW("configure returned error %d (%s)", err, asString(err)); +        decoder->release();          return NULL;      } -    // Read one output buffer, ignore format change notifications -    // and spurious empty buffers. +    err = decoder->start(); +    if (err != OK) { +        ALOGW("start returned error %d (%s)", err, asString(err)); +        decoder->release(); +        return NULL; +    }      MediaSource::ReadOptions options;      if (seekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||          seekMode > MediaSource::ReadOptions::SEEK_CLOSEST) {          ALOGE("Unknown seek mode: %d", seekMode); +        decoder->release();          return NULL;      } @@ -202,64 +200,155 @@ static VideoFrame *extractVideoFrameWithCodecFlags(          options.setSeekTo(frameTimeUs, mode);      } -    MediaBuffer *buffer = NULL; -    do { -        if (buffer != NULL) { -            buffer->release(); -            buffer = NULL; -        } -        err = decoder->read(&buffer, &options); -        options.clearSeekTo(); -    } while (err == INFO_FORMAT_CHANGED -             || (buffer != NULL && buffer->range_length() == 0)); - +    err = source->start();      if (err != OK) { -        CHECK(buffer == NULL); +        ALOGW("source failed to start: %d (%s)", err, asString(err)); +        decoder->release(); +        return NULL; +    } -        ALOGV("decoding frame failed."); -        decoder->stop(); +    Vector<sp<ABuffer> > inputBuffers; +    err = decoder->getInputBuffers(&inputBuffers); +    if (err != OK) { +        ALOGW("failed to get input buffers: %d (%s)", err, asString(err)); +        decoder->release(); +        return NULL; +    } +    Vector<sp<ABuffer> > outputBuffers; +    err = decoder->getOutputBuffers(&outputBuffers); +    if (err != OK) { +        ALOGW("failed to get output buffers: %d (%s)", err, asString(err)); +        decoder->release();          return NULL;      } -    ALOGV("successfully decoded video frame."); +    sp<AMessage> outputFormat = NULL; +    bool haveMoreInputs = true; +    size_t index, offset, size; +    int64_t timeUs; +    size_t retriesLeft = kRetryCount; +    bool done = false; -    int32_t unreadable; -    if (buffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable) -            && unreadable != 0) { -        ALOGV("video frame is unreadable, decoder does not give us access " -             "to the video data."); +    do { +        size_t inputIndex = -1; +        int64_t ptsUs = 0ll; +        uint32_t flags = 0; +        sp<ABuffer> codecBuffer = NULL; + +        while (haveMoreInputs) { +            err = decoder->dequeueInputBuffer(&inputIndex, kBufferTimeOutUs); +            if (err != OK) { +                ALOGW("Timed out waiting for input"); +                if (retriesLeft) { +                    err = OK; +                } +                break; +            } +            codecBuffer = inputBuffers[inputIndex]; -        buffer->release(); -        buffer = NULL; +            MediaBuffer *mediaBuffer = NULL; -        decoder->stop(); +            err = source->read(&mediaBuffer, &options); +            options.clearSeekTo(); +            if (err != OK) { +                ALOGW("Input Error or EOS"); +                haveMoreInputs = false; +                break; +            } + +            if (mediaBuffer->range_length() > codecBuffer->capacity()) { +                ALOGE("buffer size (%zu) too large for codec input size (%zu)", +                        mediaBuffer->range_length(), codecBuffer->capacity()); +                err = BAD_VALUE; +            } else { +                codecBuffer->setRange(0, mediaBuffer->range_length()); + +                CHECK(mediaBuffer->meta_data()->findInt64(kKeyTime, &ptsUs)); +                memcpy(codecBuffer->data(), +                        (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(), +                        mediaBuffer->range_length()); +            } + +            mediaBuffer->release(); +            break; +        } + +        if (err == OK && inputIndex < inputBuffers.size()) { +            ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x", +                    codecBuffer->size(), ptsUs, flags); +            err = decoder->queueInputBuffer( +                    inputIndex, +                    codecBuffer->offset(), +                    codecBuffer->size(), +                    ptsUs, +                    flags); + +            // we don't expect an output from codec config buffer +            if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) { +                continue; +            } +        } + +        while (err == OK) { +            // wait for a decoded buffer +            err = decoder->dequeueOutputBuffer( +                    &index, +                    &offset, +                    &size, +                    &timeUs, +                    &flags, +                    kBufferTimeOutUs); + +            if (err == INFO_FORMAT_CHANGED) { +                ALOGV("Received format change"); +                err = decoder->getOutputFormat(&outputFormat); +            } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { +                ALOGV("Output buffers changed"); +                err = decoder->getOutputBuffers(&outputBuffers); +            } else { +                if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) { +                    ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft); +                    err = OK; +                } else if (err == OK) { +                    ALOGV("Received an output buffer"); +                    done = true; +                } else { +                    ALOGW("Received error %d (%s) instead of output", err, asString(err)); +                    done = true; +                } +                break; +            } +        } +    } while (err == OK && !done); +    if (err != OK || size <= 0 || outputFormat == NULL) { +        ALOGE("Failed to decode thumbnail frame"); +        source->stop(); +        decoder->stop(); +        decoder->release();          return NULL;      } -    int64_t timeUs; -    CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs)); +    ALOGV("successfully decoded video frame."); +    sp<ABuffer> videoFrameBuffer = outputBuffers.itemAt(index); +      if (thumbNailTime >= 0) {          if (timeUs != thumbNailTime) { -            const char *mime; -            CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); +            AString mime; +            CHECK(outputFormat->findString("mime", &mime)); -            ALOGV("thumbNailTime = %" PRId64 " us, timeUs = %" PRId64 " us, mime = %s", -                 thumbNailTime, timeUs, mime); +            ALOGV("thumbNailTime = %lld us, timeUs = %lld us, mime = %s", +                    (long long)thumbNailTime, (long long)timeUs, mime.c_str());          }      } -    sp<MetaData> meta = decoder->getFormat(); -      int32_t width, height; -    CHECK(meta->findInt32(kKeyWidth, &width)); -    CHECK(meta->findInt32(kKeyHeight, &height)); +    CHECK(outputFormat->findInt32("width", &width)); +    CHECK(outputFormat->findInt32("height", &height));      int32_t crop_left, crop_top, crop_right, crop_bottom; -    if (!meta->findRect( -                kKeyCropRect, -                &crop_left, &crop_top, &crop_right, &crop_bottom)) { +    if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {          crop_left = crop_top = 0;          crop_right = width - 1;          crop_bottom = height - 1; @@ -279,23 +368,21 @@ static VideoFrame *extractVideoFrameWithCodecFlags(      frame->mData = new uint8_t[frame->mSize];      frame->mRotationAngle = rotationAngle; -    int32_t displayWidth, displayHeight; -    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) { -        frame->mDisplayWidth = displayWidth; -    } -    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) { -        frame->mDisplayHeight = displayHeight; +    int32_t sarWidth, sarHeight; +    if (trackMeta->findInt32(kKeySARWidth, &sarWidth) +            && trackMeta->findInt32(kKeySARHeight, &sarHeight) +            && sarHeight != 0) { +        frame->mDisplayWidth = (frame->mDisplayWidth * sarWidth) / sarHeight;      }      int32_t srcFormat; -    CHECK(meta->findInt32(kKeyColorFormat, &srcFormat)); +    CHECK(outputFormat->findInt32("color-format", &srcFormat)); -    ColorConverter converter( -            (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); +    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565);      if (converter.isValid()) {          err = converter.convert( -                (const uint8_t *)buffer->data() + buffer->range_offset(), +                (const uint8_t *)videoFrameBuffer->data(),                  width, height,                  crop_left, crop_top, crop_right, crop_bottom,                  frame->mData, @@ -303,17 +390,16 @@ static VideoFrame *extractVideoFrameWithCodecFlags(                  frame->mHeight,                  0, 0, frame->mWidth - 1, frame->mHeight - 1);      } else { -        ALOGE("Unable to instantiate color conversion from format 0x%08x to " -              "RGB565", -              srcFormat); +        ALOGE("Unable to convert from format 0x%08x to RGB565", srcFormat);          err = ERROR_UNSUPPORTED;      } -    buffer->release(); -    buffer = NULL; - +    videoFrameBuffer.clear(); +    source->stop(); +    decoder->releaseOutputBuffer(index);      decoder->stop(); +    decoder->release();      if (err != OK) {          ALOGE("Colorconverter failed to convert frame."); @@ -384,20 +470,29 @@ VideoFrame *StagefrightMetadataRetriever::getFrameAtTime(          mAlbumArt = MediaAlbumArt::fromData(dataSize, data);      } -    VideoFrame *frame = -        extractVideoFrameWithCodecFlags( -                &mClient, trackMeta, source, OMXCodec::kPreferSoftwareCodecs, -                timeUs, option); - -    if (frame == NULL) { -        ALOGV("Software decoder failed to extract thumbnail, " -             "trying hardware decoder."); +    const char *mime; +    CHECK(trackMeta->findCString(kKeyMIMEType, &mime)); -        frame = extractVideoFrameWithCodecFlags(&mClient, trackMeta, source, 0, -                        timeUs, option); +    Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs; +    OMXCodec::findMatchingCodecs( +            mime, +            false, /* encoder */ +            NULL, /* matchComponentName */ +            OMXCodec::kPreferSoftwareCodecs, +            &matchingCodecs); + +    for (size_t i = 0; i < matchingCodecs.size(); ++i) { +        const char *componentName = matchingCodecs[i].mName.string(); +        VideoFrame *frame = +            extractVideoFrame(componentName, trackMeta, source, timeUs, option); + +        if (frame != NULL) { +            return frame; +        } +        ALOGV("%s failed to extract thumbnail, trying next decoder.", componentName);      } -    return frame; +    return NULL;  }  MediaAlbumArt *StagefrightMetadataRetriever::extractAlbumArt() { @@ -519,6 +614,12 @@ void StagefrightMetadataRetriever::parseMetaData() {      mMetaData.add(METADATA_KEY_NUM_TRACKS, String8(tmp)); +    float captureFps; +    if (meta->findFloat(kKeyCaptureFramerate, &captureFps)) { +        sprintf(tmp, "%f", captureFps); +        mMetaData.add(METADATA_KEY_CAPTURE_FRAMERATE, String8(tmp)); +    } +      bool hasAudio = false;      bool hasVideo = false;      int32_t videoWidth = -1; @@ -629,4 +730,11 @@ void StagefrightMetadataRetriever::parseMetaData() {      }  } +void StagefrightMetadataRetriever::clearMetadata() { +    mParsedMetaData = false; +    mMetaData.clear(); +    delete mAlbumArt; +    mAlbumArt = NULL; +} +  }  // namespace android diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp new file mode 100644 index 0000000..6b62e43 --- /dev/null +++ b/media/libstagefright/SurfaceUtils.cpp @@ -0,0 +1,215 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SurfaceUtils" +#include <utils/Log.h> + +#include <media/stagefright/SurfaceUtils.h> + +#include <gui/Surface.h> + +namespace android { + +status_t setNativeWindowSizeFormatAndUsage( +        ANativeWindow *nativeWindow /* nonnull */, +        int width, int height, int format, int rotation, int usage) { +    status_t err = native_window_set_buffers_dimensions(nativeWindow, width, height); +    if (err != NO_ERROR) { +        ALOGE("native_window_set_buffers_dimensions failed: %s (%d)", strerror(-err), -err); +        return err; +    } + +    err = native_window_set_buffers_format(nativeWindow, format); +    if (err != NO_ERROR) { +        ALOGE("native_window_set_buffers_format failed: %s (%d)", strerror(-err), -err); +        return err; +    } + +    int transform = 0; +    if ((rotation % 90) == 0) { +        switch ((rotation / 90) & 3) { +            case 1:  transform = HAL_TRANSFORM_ROT_90;  break; +            case 2:  transform = HAL_TRANSFORM_ROT_180; break; +            case 3:  transform = HAL_TRANSFORM_ROT_270; break; +            default: transform = 0;                     break; +        } +    } + +    err = native_window_set_buffers_transform(nativeWindow, transform); +    if (err != NO_ERROR) { +        ALOGE("native_window_set_buffers_transform failed: %s (%d)", strerror(-err), -err); +        return err; +    } + +    // Make sure to check whether either Stagefright or the video decoder +    // requested protected buffers. +    if (usage & GRALLOC_USAGE_PROTECTED) { +        // Verify that the ANativeWindow sends images directly to +        // SurfaceFlinger. +        int queuesToNativeWindow = 0; +        err = nativeWindow->query( +                nativeWindow, NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &queuesToNativeWindow); +        if (err != NO_ERROR) { +            ALOGE("error authenticating native window: %s (%d)", strerror(-err), -err); +            return err; +        } +        if (queuesToNativeWindow != 1) { +            ALOGE("native window could not be authenticated"); +            return PERMISSION_DENIED; +        } +    } + +    int consumerUsage = 0; +    err = nativeWindow->query(nativeWindow, NATIVE_WINDOW_CONSUMER_USAGE_BITS, &consumerUsage); +    if (err != NO_ERROR) { +        ALOGW("failed to get consumer usage bits. ignoring"); +        err = NO_ERROR; +    } + +    int finalUsage = usage | consumerUsage; +    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage); +    err = native_window_set_usage(nativeWindow, finalUsage); +    if (err != NO_ERROR) { +        ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); +        return err; +    } + +    err = native_window_set_scaling_mode( +            nativeWindow, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); +    if (err != NO_ERROR) { +        ALOGE("native_window_set_scaling_mode failed: %s (%d)", strerror(-err), -err); +        return err; +    } + +    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x", +            nativeWindow, width, height, format, rotation, finalUsage); +    return NO_ERROR; +} + +status_t pushBlankBuffersToNativeWindow(ANativeWindow *nativeWindow /* nonnull */) { +    status_t err = NO_ERROR; +    ANativeWindowBuffer* anb = NULL; +    int numBufs = 0; +    int minUndequeuedBufs = 0; + +    // We need to reconnect to the ANativeWindow as a CPU client to ensure that +    // no frames get dropped by SurfaceFlinger assuming that these are video +    // frames. +    err = native_window_api_disconnect(nativeWindow, NATIVE_WINDOW_API_MEDIA); +    if (err != NO_ERROR) { +        ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", strerror(-err), -err); +        return err; +    } + +    err = native_window_api_connect(nativeWindow, NATIVE_WINDOW_API_CPU); +    if (err != NO_ERROR) { +        ALOGE("error pushing blank frames: api_connect failed: %s (%d)", strerror(-err), -err); +        (void)native_window_api_connect(nativeWindow, NATIVE_WINDOW_API_MEDIA); +        return err; +    } + +    err = setNativeWindowSizeFormatAndUsage( +            nativeWindow, 1, 1, HAL_PIXEL_FORMAT_RGBX_8888, 0, GRALLOC_USAGE_SW_WRITE_OFTEN); +    if (err != NO_ERROR) { +        goto error; +    } + +    static_cast<Surface*>(nativeWindow)->getIGraphicBufferProducer()->allowAllocation(true); + +    err = nativeWindow->query(nativeWindow, +            NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs); +    if (err != NO_ERROR) { +        ALOGE("error pushing blank frames: MIN_UNDEQUEUED_BUFFERS query " +                "failed: %s (%d)", strerror(-err), -err); +        goto error; +    } + +    numBufs = minUndequeuedBufs + 1; +    err = native_window_set_buffer_count(nativeWindow, numBufs); +    if (err != NO_ERROR) { +        ALOGE("error pushing blank frames: set_buffer_count failed: %s (%d)", strerror(-err), -err); +        goto error; +    } + +    // We push numBufs + 1 buffers to ensure that we've drawn into the same +    // buffer twice.  This should guarantee that the buffer has been displayed +    // on the screen and then been replaced, so an previous video frames are +    // guaranteed NOT to be currently displayed. +    for (int i = 0; i < numBufs + 1; i++) { +        err = native_window_dequeue_buffer_and_wait(nativeWindow, &anb); +        if (err != NO_ERROR) { +            ALOGE("error pushing blank frames: dequeueBuffer failed: %s (%d)", +                    strerror(-err), -err); +            break; +        } + +        sp<GraphicBuffer> buf(new GraphicBuffer(anb, false)); + +        // Fill the buffer with the a 1x1 checkerboard pattern ;) +        uint32_t *img = NULL; +        err = buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img)); +        if (err != NO_ERROR) { +            ALOGE("error pushing blank frames: lock failed: %s (%d)", strerror(-err), -err); +            break; +        } + +        *img = 0; + +        err = buf->unlock(); +        if (err != NO_ERROR) { +            ALOGE("error pushing blank frames: unlock failed: %s (%d)", strerror(-err), -err); +            break; +        } + +        err = nativeWindow->queueBuffer(nativeWindow, buf->getNativeBuffer(), -1); +        if (err != NO_ERROR) { +            ALOGE("error pushing blank frames: queueBuffer failed: %s (%d)", strerror(-err), -err); +            break; +        } + +        anb = NULL; +    } + +error: + +    if (anb != NULL) { +        nativeWindow->cancelBuffer(nativeWindow, anb, -1); +        anb = NULL; +    } + +    // Clean up after success or error. +    status_t err2 = native_window_api_disconnect(nativeWindow, NATIVE_WINDOW_API_CPU); +    if (err2 != NO_ERROR) { +        ALOGE("error pushing blank frames: api_disconnect failed: %s (%d)", strerror(-err2), -err2); +        if (err == NO_ERROR) { +            err = err2; +        } +    } + +    err2 = native_window_api_connect(nativeWindow, NATIVE_WINDOW_API_MEDIA); +    if (err2 != NO_ERROR) { +        ALOGE("error pushing blank frames: api_connect failed: %s (%d)", strerror(-err), -err); +        if (err == NO_ERROR) { +            err = err2; +        } +    } + +    return err; +} + +}  // namespace android + diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index b3a79a0..f0a7277 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -70,6 +70,23 @@ uint64_t hton64(uint64_t x) {      return ((uint64_t)htonl(x & 0xffffffff) << 32) | htonl(x >> 32);  } +static status_t copyNALUToABuffer(sp<ABuffer> *buffer, const uint8_t *ptr, size_t length) { +    if (((*buffer)->size() + 4 + length) > ((*buffer)->capacity() - (*buffer)->offset())) { +        sp<ABuffer> tmpBuffer = new (std::nothrow) ABuffer((*buffer)->size() + 4 + length + 1024); +        if (tmpBuffer.get() == NULL || tmpBuffer->base() == NULL) { +            return NO_MEMORY; +        } +        memcpy(tmpBuffer->data(), (*buffer)->data(), (*buffer)->size()); +        tmpBuffer->setRange(0, (*buffer)->size()); +        (*buffer) = tmpBuffer; +    } + +    memcpy((*buffer)->data() + (*buffer)->size(), "\x00\x00\x00\x01", 4); +    memcpy((*buffer)->data() + (*buffer)->size() + 4, ptr, length); +    (*buffer)->setRange((*buffer)->offset(), (*buffer)->size() + 4 + length); +    return OK; +} +  status_t convertMetaDataToMessage(          const sp<MetaData> &meta, sp<AMessage> *format) {      format->clear(); @@ -166,11 +183,26 @@ status_t convertMetaDataToMessage(          msg->setInt32("max-input-size", maxInputSize);      } +    int32_t maxWidth; +    if (meta->findInt32(kKeyMaxWidth, &maxWidth)) { +        msg->setInt32("max-width", maxWidth); +    } + +    int32_t maxHeight; +    if (meta->findInt32(kKeyMaxHeight, &maxHeight)) { +        msg->setInt32("max-height", maxHeight); +    } +      int32_t rotationDegrees;      if (meta->findInt32(kKeyRotation, &rotationDegrees)) {          msg->setInt32("rotation-degrees", rotationDegrees);      } +    int32_t fps; +    if (meta->findInt32(kKeyFrameRate, &fps)) { +        msg->setInt32("frame-rate", fps); +    } +      uint32_t type;      const void *data;      size_t size; @@ -199,7 +231,10 @@ status_t convertMetaDataToMessage(          ptr += 6;          size -= 6; -        sp<ABuffer> buffer = new ABuffer(1024); +        sp<ABuffer> buffer = new (std::nothrow) ABuffer(1024); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          buffer->setRange(0, 0);          for (size_t i = 0; i < numSeqParameterSets; ++i) { @@ -209,11 +244,13 @@ status_t convertMetaDataToMessage(              ptr += 2;              size -= 2; -            CHECK(size >= length); - -            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); -            memcpy(buffer->data() + buffer->size() + 4, ptr, length); -            buffer->setRange(0, buffer->size() + 4 + length); +            if (size < length) { +                return BAD_VALUE; +            } +            status_t err = copyNALUToABuffer(&buffer, ptr, length); +            if (err != OK) { +                return err; +            }              ptr += length;              size -= length; @@ -224,7 +261,10 @@ status_t convertMetaDataToMessage(          msg->setBuffer("csd-0", buffer); -        buffer = new ABuffer(1024); +        buffer = new (std::nothrow) ABuffer(1024); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          buffer->setRange(0, 0);          CHECK(size >= 1); @@ -239,11 +279,13 @@ status_t convertMetaDataToMessage(              ptr += 2;              size -= 2; -            CHECK(size >= length); - -            memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); -            memcpy(buffer->data() + buffer->size() + 4, ptr, length); -            buffer->setRange(0, buffer->size() + 4 + length); +            if (size < length) { +                return BAD_VALUE; +            } +            status_t err = copyNALUToABuffer(&buffer, ptr, length); +            if (err != OK) { +                return err; +            }              ptr += length;              size -= length; @@ -268,7 +310,10 @@ status_t convertMetaDataToMessage(          size -= 1;          size_t j = 0, i = 0; -        sp<ABuffer> buffer = new ABuffer(1024); +        sp<ABuffer> buffer = new (std::nothrow) ABuffer(1024); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          buffer->setRange(0, 0);          for (i = 0; i < numofArrays; i++) { @@ -288,11 +333,13 @@ status_t convertMetaDataToMessage(                  ptr += 2;                  size -= 2; -                CHECK(size >= length); - -                memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); -                memcpy(buffer->data() + buffer->size() + 4, ptr, length); -                buffer->setRange(0, buffer->size() + 4 + length); +                if (size < length) { +                    return BAD_VALUE; +                } +                status_t err = copyNALUToABuffer(&buffer, ptr, length); +                if (err != OK) { +                    return err; +                }                  ptr += length;                  size -= length; @@ -311,7 +358,10 @@ status_t convertMetaDataToMessage(          esds.getCodecSpecificInfo(                  &codec_specific_data, &codec_specific_data_size); -        sp<ABuffer> buffer = new ABuffer(codec_specific_data_size); +        sp<ABuffer> buffer = new (std::nothrow) ABuffer(codec_specific_data_size); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          memcpy(buffer->data(), codec_specific_data,                 codec_specific_data_size); @@ -320,7 +370,10 @@ status_t convertMetaDataToMessage(          buffer->meta()->setInt64("timeUs", 0);          msg->setBuffer("csd-0", buffer);      } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) { -        sp<ABuffer> buffer = new ABuffer(size); +        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          memcpy(buffer->data(), data, size);          buffer->meta()->setInt32("csd", true); @@ -331,19 +384,53 @@ status_t convertMetaDataToMessage(              return -EINVAL;          } -        buffer = new ABuffer(size); +        buffer = new (std::nothrow) ABuffer(size); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          memcpy(buffer->data(), data, size);          buffer->meta()->setInt32("csd", true);          buffer->meta()->setInt64("timeUs", 0);          msg->setBuffer("csd-1", buffer);      } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) { -        sp<ABuffer> buffer = new ABuffer(size); +        sp<ABuffer> buffer = new (std::nothrow) ABuffer(size); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        }          memcpy(buffer->data(), data, size);          buffer->meta()->setInt32("csd", true);          buffer->meta()->setInt64("timeUs", 0);          msg->setBuffer("csd-0", buffer); + +        if (!meta->findData(kKeyOpusCodecDelay, &type, &data, &size)) { +            return -EINVAL; +        } + +        buffer = new (std::nothrow) ABuffer(size); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        } +        memcpy(buffer->data(), data, size); + +        buffer->meta()->setInt32("csd", true); +        buffer->meta()->setInt64("timeUs", 0); +        msg->setBuffer("csd-1", buffer); + +        if (!meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size)) { +            return -EINVAL; +        } + +        buffer = new (std::nothrow) ABuffer(size); +        if (buffer.get() == NULL || buffer->base() == NULL) { +            return NO_MEMORY; +        } +        memcpy(buffer->data(), data, size); + +        buffer->meta()->setInt32("csd", true); +        buffer->meta()->setInt64("timeUs", 0); +        msg->setBuffer("csd-2", buffer);      }      *format = msg; @@ -546,19 +633,36 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {          meta->setInt32(kKeyMaxInputSize, maxInputSize);      } +    int32_t maxWidth; +    if (msg->findInt32("max-width", &maxWidth)) { +        meta->setInt32(kKeyMaxWidth, maxWidth); +    } + +    int32_t maxHeight; +    if (msg->findInt32("max-height", &maxHeight)) { +        meta->setInt32(kKeyMaxHeight, maxHeight); +    } + +    int32_t fps; +    if (msg->findInt32("frame-rate", &fps)) { +        meta->setInt32(kKeyFrameRate, fps); +    } +      // reassemble the csd data into its original form      sp<ABuffer> csd0;      if (msg->findBuffer("csd-0", &csd0)) { -        if (mime.startsWith("video/")) { // do we need to be stricter than this? +        if (mime == MEDIA_MIMETYPE_VIDEO_AVC) {              sp<ABuffer> csd1;              if (msg->findBuffer("csd-1", &csd1)) {                  char avcc[1024]; // that oughta be enough, right?                  size_t outsize = reassembleAVCC(csd0, csd1, avcc);                  meta->setData(kKeyAVCC, kKeyAVCC, avcc, outsize);              } -        } else if (mime.startsWith("audio/")) { +        } else if (mime == MEDIA_MIMETYPE_AUDIO_AAC || mime == MEDIA_MIMETYPE_VIDEO_MPEG4) {              int csd0size = csd0->size();              char esds[csd0size + 31]; +            // The written ESDS is actually for an audio stream, but it's enough +            // for transporting the CSD to muxers.              reassembleESDS(csd0, esds);              meta->setData(kKeyESDS, kKeyESDS, esds, sizeof(esds));          } @@ -800,5 +904,88 @@ AString uriDebugString(const AString &uri, bool incognito) {      return AString("<no-scheme URI suppressed>");  } +HLSTime::HLSTime(const sp<AMessage>& meta) : +    mSeq(-1), +    mTimeUs(-1ll), +    mMeta(meta) { +    if (meta != NULL) { +        CHECK(meta->findInt32("discontinuitySeq", &mSeq)); +        CHECK(meta->findInt64("timeUs", &mTimeUs)); +    } +} + +int64_t HLSTime::getSegmentTimeUs() const { +    int64_t segmentStartTimeUs = -1ll; +    if (mMeta != NULL) { +        CHECK(mMeta->findInt64("segmentStartTimeUs", &segmentStartTimeUs)); + +        int64_t segmentFirstTimeUs; +        if (mMeta->findInt64("segmentFirstTimeUs", &segmentFirstTimeUs)) { +            segmentStartTimeUs += mTimeUs - segmentFirstTimeUs; +        } + +        // adjust segment time by playlist age (for live streaming) +        int64_t playlistTimeUs; +        if (mMeta->findInt64("playlistTimeUs", &playlistTimeUs)) { +            int64_t playlistAgeUs = ALooper::GetNowUs() - playlistTimeUs; + +            int64_t durationUs; +            CHECK(mMeta->findInt64("segmentDurationUs", &durationUs)); + +            // round to nearest whole segment +            playlistAgeUs = (playlistAgeUs + durationUs / 2) +                    / durationUs * durationUs; + +            segmentStartTimeUs -= playlistAgeUs; +            if (segmentStartTimeUs < 0) { +                segmentStartTimeUs = 0; +            } +        } +    } +    return segmentStartTimeUs; +} + +bool operator <(const HLSTime &t0, const HLSTime &t1) { +    // we can only compare discontinuity sequence and timestamp. +    // (mSegmentTimeUs is not reliable in live streaming case, it's the +    // time starting from beginning of playlist but playlist could change.) +    return t0.mSeq < t1.mSeq +            || (t0.mSeq == t1.mSeq && t0.mTimeUs < t1.mTimeUs); +} + +void writeToAMessage(sp<AMessage> msg, const AudioPlaybackRate &rate) { +    msg->setFloat("speed", rate.mSpeed); +    msg->setFloat("pitch", rate.mPitch); +    msg->setInt32("audio-fallback-mode", rate.mFallbackMode); +    msg->setInt32("audio-stretch-mode", rate.mStretchMode); +} + +void readFromAMessage(const sp<AMessage> &msg, AudioPlaybackRate *rate /* nonnull */) { +    *rate = AUDIO_PLAYBACK_RATE_DEFAULT; +    CHECK(msg->findFloat("speed", &rate->mSpeed)); +    CHECK(msg->findFloat("pitch", &rate->mPitch)); +    CHECK(msg->findInt32("audio-fallback-mode", (int32_t *)&rate->mFallbackMode)); +    CHECK(msg->findInt32("audio-stretch-mode", (int32_t *)&rate->mStretchMode)); +} + +void writeToAMessage(sp<AMessage> msg, const AVSyncSettings &sync, float videoFpsHint) { +    msg->setInt32("sync-source", sync.mSource); +    msg->setInt32("audio-adjust-mode", sync.mAudioAdjustMode); +    msg->setFloat("tolerance", sync.mTolerance); +    msg->setFloat("video-fps", videoFpsHint); +} + +void readFromAMessage( +        const sp<AMessage> &msg, +        AVSyncSettings *sync /* nonnull */, +        float *videoFps /* nonnull */) { +    AVSyncSettings settings; +    CHECK(msg->findInt32("sync-source", (int32_t *)&settings.mSource)); +    CHECK(msg->findInt32("audio-adjust-mode", (int32_t *)&settings.mAudioAdjustMode)); +    CHECK(msg->findFloat("tolerance", &settings.mTolerance)); +    CHECK(msg->findFloat("video-fps", videoFps)); +    *sync = settings; +} +  }  // namespace android diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp index e988f6d..8a0fcac 100644 --- a/media/libstagefright/VBRISeeker.cpp +++ b/media/libstagefright/VBRISeeker.cpp @@ -122,7 +122,7 @@ sp<VBRISeeker> VBRISeeker::CreateFromSource(          seeker->mSegments.push(numBytes); -        ALOGV("entry #%zu: %u offset 0x%016llx", i, numBytes, offset); +        ALOGV("entry #%zu: %u offset %#016llx", i, numBytes, (long long)offset);          offset += numBytes;      } @@ -163,7 +163,7 @@ bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {          *pos += mSegments.itemAt(segmentIndex++);      } -    ALOGV("getOffsetForTime %" PRId64 " us => 0x%016llx", *timeUs, *pos); +    ALOGV("getOffsetForTime %lld us => 0x%016llx", (long long)*timeUs, (long long)*pos);      *timeUs = nowUs; diff --git a/media/libmediaplayerservice/VideoFrameScheduler.cpp b/media/libstagefright/VideoFrameScheduler.cpp index ce5f5fe..5fe9bf9 100644 --- a/media/libmediaplayerservice/VideoFrameScheduler.cpp +++ b/media/libstagefright/VideoFrameScheduler.cpp @@ -28,8 +28,7 @@  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AUtils.h> - -#include "VideoFrameScheduler.h" +#include <media/stagefright/VideoFrameScheduler.h>  namespace android { @@ -56,7 +55,7 @@ static const size_t kMinSamplesToEstimatePeriod = 3;  static const size_t kMaxSamplesToEstimatePeriod = VideoFrameScheduler::kHistorySize;  static const size_t kPrecision = 12; -static const size_t kErrorThreshold = (1 << (kPrecision * 2)) / 10; +static const int64_t kErrorThreshold = (1 << (kPrecision * 2)) / 10;  static const int64_t kMultiplesThresholdDiv = 4;            // 25%  static const int64_t kReFitThresholdDiv = 100;              // 1%  static const nsecs_t kMaxAllowedFrameSkip = kNanosIn1s;     // 1 sec @@ -258,7 +257,8 @@ void VideoFrameScheduler::PLL::prime(size_t numSamplesToUse) {              mPhase = firstTime;          }      } -    ALOGV("priming[%zu] phase:%lld period:%lld", numSamplesToUse, mPhase, mPeriod); +    ALOGV("priming[%zu] phase:%lld period:%lld", +            numSamplesToUse, (long long)mPhase, (long long)mPeriod);  }  nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) { @@ -316,6 +316,10 @@ nsecs_t VideoFrameScheduler::PLL::addSample(nsecs_t time) {      return mPeriod;  } +nsecs_t VideoFrameScheduler::PLL::getPeriod() const { +    return mPrimed ? mPeriod : 0; +} +  /* ======================================================================= */  /*                             Frame Scheduler                             */  /* ======================================================================= */ @@ -382,6 +386,14 @@ nsecs_t VideoFrameScheduler::getVsyncPeriod() {      return kDefaultVsyncPeriod;  } +float VideoFrameScheduler::getFrameRate() { +    nsecs_t videoPeriod = mPll.getPeriod(); +    if (videoPeriod > 0) { +        return 1e9 / videoPeriod; +    } +    return 0.f; +} +  nsecs_t VideoFrameScheduler::schedule(nsecs_t renderTime) {      nsecs_t origRenderTime = renderTime; diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp index 5ec3438..8ef2dca 100644 --- a/media/libstagefright/avc_utils.cpp +++ b/media/libstagefright/avc_utils.cpp @@ -26,6 +26,7 @@  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MetaData.h> +#include <utils/misc.h>  namespace android { @@ -186,17 +187,31 @@ void FindAVCDimensions(              if (aspect_ratio_idc == 255 /* extendedSAR */) {                  sar_width = br.getBits(16);                  sar_height = br.getBits(16); -            } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) { -                static const int32_t kFixedSARWidth[] = { -                    1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160 +            } else { +                static const struct { unsigned width, height; } kFixedSARs[] = { +                        {   0,  0 }, // Invalid +                        {   1,  1 }, +                        {  12, 11 }, +                        {  10, 11 }, +                        {  16, 11 }, +                        {  40, 33 }, +                        {  24, 11 }, +                        {  20, 11 }, +                        {  32, 11 }, +                        {  80, 33 }, +                        {  18, 11 }, +                        {  15, 11 }, +                        {  64, 33 }, +                        { 160, 99 }, +                        {   4,  3 }, +                        {   3,  2 }, +                        {   2,  1 },                  }; -                static const int32_t kFixedSARHeight[] = { -                    1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99 -                }; - -                sar_width = kFixedSARWidth[aspect_ratio_idc - 1]; -                sar_height = kFixedSARHeight[aspect_ratio_idc - 1]; +                if (aspect_ratio_idc > 0 && aspect_ratio_idc < NELEM(kFixedSARs)) { +                    sar_width = kFixedSARs[aspect_ratio_idc].width; +                    sar_height = kFixedSARs[aspect_ratio_idc].height; +                }              }          } diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index 495bad0..965c55e 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -75,7 +75,7 @@ SoftAAC2::SoftAAC2(  SoftAAC2::~SoftAAC2() {      aacDecoder_Close(mAACDecoder); -    delete mOutputDelayRingBuffer; +    delete[] mOutputDelayRingBuffer;  }  void SoftAAC2::initPorts() { @@ -623,7 +623,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {                  } else {                      int64_t currentTime = mBufferTimestamps.top();                      currentTime += mStreamInfo->aacSamplesPerFrame * -                            1000000ll / mStreamInfo->sampleRate; +                            1000000ll / mStreamInfo->aacSampleRate;                      mBufferTimestamps.add(currentTime);                  }              } else { @@ -874,7 +874,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {                          // adjust/interpolate next time stamp                          *currentBufLeft -= decodedSize;                          *nextTimeStamp += mStreamInfo->aacSamplesPerFrame * -                                1000000ll / mStreamInfo->sampleRate; +                                1000000ll / mStreamInfo->aacSampleRate;                          ALOGV("adjusted nextTimeStamp/size to %lld/%d",                                  (long long) *nextTimeStamp, *currentBufLeft);                      } else { @@ -975,6 +975,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {          mBufferSizes.clear();          mDecodedSizes.clear();          mLastInHeader = NULL; +        mEndOfInput = false;      } else {          int avail;          while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) { @@ -989,6 +990,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {              mOutputBufferCount++;          }          mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos; +        mEndOfOutput = false;      }  } diff --git a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp index d1b0f76..a9723ea 100644 --- a/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp +++ b/media/libstagefright/codecs/amrnb/dec/SoftAMR.cpp @@ -428,7 +428,15 @@ void SoftAMR::onQueueFilled(OMX_U32 /* portIndex */) {      }  } -void SoftAMR::onPortFlushCompleted(OMX_U32 /* portIndex */) { +void SoftAMR::onPortFlushCompleted(OMX_U32 portIndex) { +    ALOGV("onPortFlushCompleted portindex %d, resetting frame ", portIndex); +    if (portIndex == 0) { +        if (mMode == MODE_NARROW) { +           Speech_Decode_Frame_reset(mState); +        } else { +           pvDecoder_AmrWb_Reset(mState, 0 /* reset_all */); +        } +    }  }  void SoftAMR::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp index 8388472..e083315 100644 --- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp +++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp @@ -21,7 +21,6 @@  #include "ih264_typedefs.h"  #include "iv.h"  #include "ivd.h" -#include "ithread.h"  #include "ih264d.h"  #include "SoftAVCDec.h" @@ -115,13 +114,16 @@ SoftAVC::SoftAVC(              kProfileLevels, ARRAY_SIZE(kProfileLevels),              320 /* width */, 240 /* height */, callbacks,              appData, component), +      mCodecCtx(NULL),        mMemRecords(NULL),        mFlushOutBuffer(NULL),        mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),        mIvColorFormat(IV_YUV_420P),        mNewWidth(mWidth),        mNewHeight(mHeight), -      mChangingResolution(false) { +      mNewLevel(0), +      mChangingResolution(false), +      mSignalledError(false) {      initPorts(              kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE); @@ -131,7 +133,7 @@ SoftAVC::SoftAVC(      GENERATE_FILE_NAMES();      CREATE_DUMP_FILE(mInFile); -    CHECK_EQ(initDecoder(), (status_t)OK); +    CHECK_EQ(initDecoder(mWidth, mHeight), (status_t)OK);  }  SoftAVC::~SoftAVC() { @@ -191,7 +193,7 @@ status_t SoftAVC::setParams(size_t stride) {      s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);      s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t); -    ALOGV("Set the run-time (dynamic) parameters stride = %u", stride); +    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);      status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);      if (status != IV_SUCCESS) { @@ -231,6 +233,7 @@ status_t SoftAVC::resetDecoder() {          ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);          return UNKNOWN_ERROR;      } +    mSignalledError = false;      /* Set the run-time (dynamic) parameters */      setParams(outputBufferWidth()); @@ -284,7 +287,7 @@ status_t SoftAVC::setFlushMode() {      return OK;  } -status_t SoftAVC::initDecoder() { +status_t SoftAVC::initDecoder(uint32_t width, uint32_t height) {      IV_API_CALL_STATUS_T status;      UWORD32 u4_num_reorder_frames; @@ -293,28 +296,33 @@ status_t SoftAVC::initDecoder() {      WORD32 i4_level;      mNumCores = GetCPUCoreCount(); +    mCodecCtx = NULL;      /* Initialize number of ref and reorder modes (for H264) */      u4_num_reorder_frames = 16;      u4_num_ref_frames = 16;      u4_share_disp_buf = 0; -    uint32_t displayStride = outputBufferWidth(); -    uint32_t displayHeight = outputBufferHeight(); +    uint32_t displayStride = mIsAdaptive ? mAdaptiveMaxWidth : width; +    uint32_t displayHeight = mIsAdaptive ? mAdaptiveMaxHeight : height;      uint32_t displaySizeY = displayStride * displayHeight; -    if (displaySizeY > (1920 * 1088)) { -        i4_level = 50; -    } else if (displaySizeY > (1280 * 720)) { -        i4_level = 40; -    } else if (displaySizeY > (720 * 576)) { -        i4_level = 31; -    } else if (displaySizeY > (624 * 320)) { -        i4_level = 30; -    } else if (displaySizeY > (352 * 288)) { -        i4_level = 21; +    if(mNewLevel == 0){ +        if (displaySizeY > (1920 * 1088)) { +            i4_level = 50; +        } else if (displaySizeY > (1280 * 720)) { +            i4_level = 40; +        } else if (displaySizeY > (720 * 576)) { +            i4_level = 31; +        } else if (displaySizeY > (624 * 320)) { +            i4_level = 30; +        } else if (displaySizeY > (352 * 288)) { +            i4_level = 21; +        } else { +            i4_level = 20; +        }      } else { -        i4_level = 20; +        i4_level = mNewLevel;      }      { @@ -430,6 +438,7 @@ status_t SoftAVC::initDecoder() {          status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op);          if (status != IV_SUCCESS) { +            mCodecCtx = NULL;              ALOGE("Error in init: 0x%x",                      s_init_op.s_ivd_init_op_t.u4_error_code);              return UNKNOWN_ERROR; @@ -452,7 +461,7 @@ status_t SoftAVC::initDecoder() {      uint32_t bufferSize = displaySizeY * 3 / 2;      mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);      if (NULL == mFlushOutBuffer) { -        ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize); +        ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);          return NO_MEMORY;      } @@ -489,12 +498,12 @@ status_t SoftAVC::deInitDecoder() {      return OK;  } -status_t SoftAVC::reInitDecoder() { +status_t SoftAVC::reInitDecoder(uint32_t width, uint32_t height) {      status_t ret;      deInitDecoder(); -    ret = initDecoder(); +    ret = initDecoder(width, height);      if (OK != ret) {          ALOGE("Create failure");          deInitDecoder(); @@ -506,6 +515,7 @@ status_t SoftAVC::reInitDecoder() {  void SoftAVC::onReset() {      SoftVideoDecoderOMXComponent::onReset(); +    mSignalledError = false;      resetDecoder();      resetPlugin();  } @@ -515,7 +525,14 @@ OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR p      const uint32_t oldHeight = mHeight;      OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);      if (mWidth != oldWidth || mHeight != oldHeight) { -        reInitDecoder(); +        mNewWidth = mWidth; +        mNewHeight = mHeight; +        status_t err = reInitDecoder(mNewWidth, mNewHeight); +        if (err != OK) { +            notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL); +            mSignalledError = true; +            return OMX_ErrorUnsupportedSetting; +        }      }      return ret;  } @@ -590,6 +607,9 @@ void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {  void SoftAVC::onQueueFilled(OMX_U32 portIndex) {      UNUSED(portIndex); +    if (mSignalledError) { +        return; +    }      if (mOutputPortSettingsChange != NONE) {          return;      } @@ -622,6 +642,11 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {              if (!inQueue.empty()) {                  inInfo = *inQueue.begin();                  inHeader = inInfo->mHeader; +                if (inHeader == NULL) { +                    inQueue.erase(inQueue.begin()); +                    inInfo->mOwnedByUs = false; +                    continue; +                }              } else {                  break;              } @@ -633,14 +658,21 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {          outHeader->nTimeStamp = 0;          outHeader->nOffset = 0; -        if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) { -            mReceivedEOS = true; +        if (inHeader != NULL) {              if (inHeader->nFilledLen == 0) {                  inQueue.erase(inQueue.begin());                  inInfo->mOwnedByUs = false;                  notifyEmptyBufferDone(inHeader); + +                if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) { +                    continue; +                } + +                mReceivedEOS = true;                  inHeader = NULL;                  setFlushMode(); +            } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { +                mReceivedEOS = true;              }          } @@ -648,9 +680,15 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {          // update output port's definition and reinitialize decoder.          if (mInitNeeded && !mIsInFlush) {              bool portWillReset = false; -            handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight); -            CHECK_EQ(reInitDecoder(), (status_t)OK); +            status_t err = reInitDecoder(mNewWidth, mNewHeight); +            if (err != OK) { +                notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL); +                mSignalledError = true; +                return; +            } + +            handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);              return;          } @@ -691,6 +729,7 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {              bool unsupportedDimensions =                  (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));              bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF)); +            bool unsupportedLevel = (IH264D_UNSUPPORTED_LEVEL == (s_dec_op.u4_error_code & 0xFF));              GETTIME(&mTimeEnd, NULL);              /* Compute time taken for decode() */ @@ -708,13 +747,39 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {                  mTimeStampsValid[timeStampIx] = false;              } +              // This is needed to handle CTS DecoderTest testCodecResetsH264WithoutSurface,              // which is not sending SPS/PPS after port reconfiguration and flush to the codec.              if (unsupportedDimensions && !mFlushNeeded) {                  bool portWillReset = false; -                handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht); +                mNewWidth = s_dec_op.u4_pic_wd; +                mNewHeight = s_dec_op.u4_pic_ht; -                CHECK_EQ(reInitDecoder(), (status_t)OK); +                status_t err = reInitDecoder(mNewWidth, mNewHeight); +                if (err != OK) { +                    notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL); +                    mSignalledError = true; +                    return; +                } + +                handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight); + +                setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx); + +                ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); +                return; +            } + +            if (unsupportedLevel && !mFlushNeeded) { + +                mNewLevel = 51; + +                status_t err = reInitDecoder(mNewWidth, mNewHeight); +                if (err != OK) { +                    notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL); +                    mSignalledError = true; +                    return; +                }                  setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx); @@ -745,6 +810,17 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {                  continue;              } +            if (unsupportedLevel) { + +                if (mFlushNeeded) { +                    setFlushMode(); +                } + +                mNewLevel = 51; +                mInitNeeded = true; +                continue; +            } +              if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {                  uint32_t width = s_dec_op.u4_pic_wd;                  uint32_t height = s_dec_op.u4_pic_ht; @@ -758,7 +834,7 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {              }              if (s_dec_op.u4_output_present) { -                outHeader->nFilledLen = (mWidth * mHeight * 3) / 2; +                outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;                  outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];                  mTimeStampsValid[s_dec_op.u4_ts] = false; diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h index 191a71d..1ec8991 100644 --- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h +++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h @@ -100,12 +100,14 @@ private:      bool mInitNeeded;      uint32_t mNewWidth;      uint32_t mNewHeight; +    uint32_t mNewLevel;      // The input stream has changed to a different resolution, which is still supported by the      // codec. So the codec is switching to decode the new resolution.      bool mChangingResolution;      bool mFlushNeeded; +    bool mSignalledError; -    status_t initDecoder(); +    status_t initDecoder(uint32_t width, uint32_t height);      status_t deInitDecoder();      status_t setFlushMode();      status_t setParams(size_t stride); @@ -113,7 +115,7 @@ private:      status_t setNumCores();      status_t resetDecoder();      status_t resetPlugin(); -    status_t reInitDecoder(); +    status_t reInitDecoder(uint32_t width, uint32_t height);      void setDecodeArgs(              ivd_video_decode_ip_t *ps_dec_ip, diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp index bf5e353..6e55034 100644 --- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp +++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp @@ -49,10 +49,10 @@ static void InitOMXParams(T *params) {      params->nVersion.s.nStep = 0;  } -typedef struct LevelConversion { +struct LevelConversion {      OMX_VIDEO_AVCLEVELTYPE omxLevel;      WORD32 avcLevel; -} LevelConcersion; +};  static LevelConversion ConversionTable[] = {      { OMX_VIDEO_AVCLevel1,  10 }, @@ -87,8 +87,21 @@ static const CodecProfileLevel kProfileLevels[] = {      { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 },      { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4  },      { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 }, -}; +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1  }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1b }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel11 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel12 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel13 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel2  }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel21 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel22 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel3  }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel31 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel32 }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel4  }, +    { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel41 }, +};  static size_t GetCPUCoreCount() {      long cpuCoreCount = 1; @@ -99,7 +112,7 @@ static size_t GetCPUCoreCount() {      cpuCoreCount = sysconf(_SC_NPROC_ONLN);  #endif      CHECK(cpuCoreCount >= 1); -    ALOGD("Number of CPU cores: %ld", cpuCoreCount); +    ALOGV("Number of CPU cores: %ld", cpuCoreCount);      return (size_t)cpuCoreCount;  } @@ -144,15 +157,15 @@ SoftAVC::SoftAVC(              kProfileLevels, NELEM(kProfileLevels),              176 /* width */, 144 /* height */,              callbacks, appData, component), +      mBitrateUpdated(false), +      mKeyFrameRequested(false),        mIvVideoColorFormat(IV_YUV_420P), -      mIDRFrameRefreshIntervalInSec(1),        mAVCEncProfile(IV_PROFILE_BASE), -      mAVCEncLevel(31), -      mPrevTimestampUs(-1), +      mAVCEncLevel(41),        mStarted(false),        mSawInputEOS(false), +      mSawOutputEOS(false),        mSignalledError(false), -      mConversionBuffer(NULL),        mCodecCtx(NULL) {      initPorts(kNumBuffers, kNumBuffers, ((mWidth * mHeight * 3) >> 1), @@ -162,6 +175,10 @@ SoftAVC::SoftAVC(      GENERATE_FILE_NAMES();      CREATE_DUMP_FILE(mInFile);      CREATE_DUMP_FILE(mOutFile); +    memset(mConversionBuffers, 0, sizeof(mConversionBuffers)); +    memset(mInputBufferInfo, 0, sizeof(mInputBufferInfo)); + +    initEncParams();  } @@ -173,7 +190,7 @@ SoftAVC::~SoftAVC() {      CHECK(inQueue.empty());  } -OMX_ERRORTYPE SoftAVC::initEncParams() { +void  SoftAVC::initEncParams() {      mCodecCtx = NULL;      mMemRecords = NULL;      mNumMemRecords = DEFAULT_MEM_REC_CNT; @@ -186,7 +203,6 @@ OMX_ERRORTYPE SoftAVC::initEncParams() {      mIInterval = DEFAULT_I_INTERVAL;      mIDRInterval = DEFAULT_IDR_INTERVAL;      mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL; -    mFrameRate = DEFAULT_SRC_FRAME_RATE;      mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;      mEnableAltRef = DEFAULT_ENABLE_ALT_REF;      mEncSpeed = DEFAULT_ENC_SPEED; @@ -195,11 +211,12 @@ OMX_ERRORTYPE SoftAVC::initEncParams() {      mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD;      mPSNREnable = DEFAULT_PSNR_ENABLE;      mReconEnable = DEFAULT_RECON_ENABLE; +    mEntropyMode = DEFAULT_ENTROPY_MODE; +    mBframes = DEFAULT_B_FRAMES;      gettimeofday(&mTimeStart, NULL);      gettimeofday(&mTimeEnd, NULL); -    return OMX_ErrorNone;  } @@ -212,7 +229,6 @@ OMX_ERRORTYPE SoftAVC::setDimensions() {      s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;      s_dimensions_ip.u4_ht = mHeight;      s_dimensions_ip.u4_wd = mWidth; -    s_dimensions_ip.u4_strd = mStride;      s_dimensions_ip.u4_timestamp_high = -1;      s_dimensions_ip.u4_timestamp_low = -1; @@ -260,8 +276,8 @@ OMX_ERRORTYPE SoftAVC::setFrameRate() {      s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;      s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE; -    s_frame_rate_ip.u4_src_frame_rate = mFrameRate; -    s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate; +    s_frame_rate_ip.u4_src_frame_rate = mFramerate >> 16; +    s_frame_rate_ip.u4_tgt_frame_rate = mFramerate >> 16;      s_frame_rate_ip.u4_timestamp_high = -1;      s_frame_rate_ip.u4_timestamp_low = -1; @@ -332,7 +348,6 @@ OMX_ERRORTYPE SoftAVC::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {      ive_ctl_set_frame_type_ip_t s_frame_type_ip;      ive_ctl_set_frame_type_op_t s_frame_type_op;      IV_STATUS_T status; -      s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;      s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE; @@ -503,7 +518,6 @@ OMX_ERRORTYPE SoftAVC::setGopParams() {      s_gop_params_ip.u4_i_frm_interval = mIInterval;      s_gop_params_ip.u4_idr_frm_interval = mIDRInterval; -    s_gop_params_ip.u4_num_b_frames = DEFAULT_B_FRAMES;      s_gop_params_ip.u4_timestamp_high = -1;      s_gop_params_ip.u4_timestamp_low = -1; @@ -529,7 +543,7 @@ OMX_ERRORTYPE SoftAVC::setProfileParams() {      s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;      s_profile_params_ip.e_profile = DEFAULT_EPROFILE; - +    s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode;      s_profile_params_ip.u4_timestamp_high = -1;      s_profile_params_ip.u4_timestamp_low = -1; @@ -595,7 +609,6 @@ void SoftAVC::logVersion() {  OMX_ERRORTYPE SoftAVC::initEncoder() {      IV_STATUS_T status; -    size_t i;      WORD32 level;      uint32_t displaySizeY;      CHECK(!mStarted); @@ -618,27 +631,26 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {      }      mAVCEncLevel = MAX(level, mAVCEncLevel); -    if (OMX_ErrorNone != (errType = initEncParams())) { -        ALOGE("Failed to initialize encoder params"); -        mSignalledError = true; -        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); -        return errType; -    } - -    mStride = ALIGN16(mWidth); +    mStride = mWidth;      if (mInputDataIsMeta) { -        if (mConversionBuffer) { -            free(mConversionBuffer); -            mConversionBuffer = NULL; -        } +        for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) { +            if (mConversionBuffers[i] != NULL) { +                free(mConversionBuffers[i]); +            } + +            if (((uint64_t)mStride * mHeight) > ((uint64_t)INT32_MAX / 3)) { +                ALOGE("Buffer size is too big."); +                return OMX_ErrorUndefined; +            } +            mConversionBuffers[i] = (uint8_t *)malloc(mStride * mHeight * 3 / 2); -        if (mConversionBuffer == NULL) { -            mConversionBuffer = (uint8_t *)malloc(mStride * mHeight * 3 / 2); -            if (mConversionBuffer == NULL) { +            if (mConversionBuffers[i] == NULL) {                  ALOGE("Allocating conversion buffer failed.");                  return OMX_ErrorUndefined;              } + +            mConversionBuffersFree[i] = 1;          }      } @@ -654,7 +666,7 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {              break;      } -    ALOGV("Params width %d height %d level %d colorFormat %d", mWidth, +    ALOGD("Params width %d height %d level %d colorFormat %d", mWidth,              mHeight, mAVCEncLevel, mIvVideoColorFormat);      /* Getting Number of MemRecords */ @@ -679,9 +691,13 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {      }      /* Allocate array to hold memory records */ +    if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) { +        ALOGE("requested memory size is too big."); +        return OMX_ErrorUndefined; +    }      mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));      if (NULL == mMemRecords) { -        ALOGE("Unable to allocate memory for hold memory records: Size %d", +        ALOGE("Unable to allocate memory for hold memory records: Size %zu",                  mNumMemRecords * sizeof(iv_mem_rec_t));          mSignalledError = true;          notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); @@ -691,7 +707,7 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {      {          iv_mem_rec_t *ps_mem_rec;          ps_mem_rec = mMemRecords; -        for (i = 0; i < mNumMemRecords; i++) { +        for (size_t i = 0; i < mNumMemRecords; i++) {              ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);              ps_mem_rec->pv_base = NULL;              ps_mem_rec->u4_mem_size = 0; @@ -738,13 +754,13 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {          WORD32 total_size;          iv_mem_rec_t *ps_mem_rec;          total_size = 0; -          ps_mem_rec = mMemRecords; -        for (i = 0; i < mNumMemRecords; i++) { + +        for (size_t i = 0; i < mNumMemRecords; i++) {              ps_mem_rec->pv_base = ive_aligned_malloc(                      ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);              if (ps_mem_rec->pv_base == NULL) { -                ALOGE("Allocation failure for mem record id %d size %d\n", i, +                ALOGE("Allocation failure for mem record id %zu size %u\n", i,                          ps_mem_rec->u4_mem_size);                  mSignalledError = true;                  notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); @@ -755,7 +771,6 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {              ps_mem_rec++;          } -        printf("\nTotal memory for codec %d\n", total_size);      }      /* Codec Instance Creation */ @@ -789,7 +804,7 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {          s_init_ip.e_rc_mode = DEFAULT_RC_MODE;          s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE;          s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE; -        s_init_ip.u4_max_num_bframes = DEFAULT_B_FRAMES; +        s_init_ip.u4_num_bframes = mBframes;          s_init_ip.e_content_type = IV_PROGRESSIVE;          s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;          s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y; @@ -864,7 +879,6 @@ OMX_ERRORTYPE SoftAVC::releaseEncoder() {      iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;      iv_retrieve_mem_rec_op_t s_retrieve_mem_op;      iv_mem_rec_t *ps_mem_rec; -    UWORD32 i;      if (!mStarted) {          return OMX_ErrorNone; @@ -885,16 +899,18 @@ OMX_ERRORTYPE SoftAVC::releaseEncoder() {      /* Free memory records */      ps_mem_rec = mMemRecords; -    for (i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) { +    for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {          ive_aligned_free(ps_mem_rec->pv_base);          ps_mem_rec++;      }      free(mMemRecords); -    if (mConversionBuffer != NULL) { -        free(mConversionBuffer); -        mConversionBuffer = NULL; +    for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) { +        if (mConversionBuffers[i]) { +            free(mConversionBuffers[i]); +            mConversionBuffers[i] = NULL; +        }      }      mStarted = false; @@ -926,23 +942,21 @@ OMX_ERRORTYPE SoftAVC::internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params)                  return OMX_ErrorUndefined;              } -            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline; -            OMX_VIDEO_AVCLEVELTYPE omxLevel = OMX_VIDEO_AVCLevel31; +            OMX_VIDEO_AVCLEVELTYPE omxLevel = OMX_VIDEO_AVCLevel41;              if (OMX_ErrorNone                      != ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) {                  return OMX_ErrorUndefined;              } +            avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline;              avcParams->eLevel = omxLevel;              avcParams->nRefFrames = 1; -            avcParams->nBFrames = 0;              avcParams->bUseHadamard = OMX_TRUE;              avcParams->nAllowedPictureTypes = (OMX_VIDEO_PictureTypeI -                    | OMX_VIDEO_PictureTypeP); +                    | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB);              avcParams->nRefIdx10ActiveMinus1 = 0;              avcParams->nRefIdx11ActiveMinus1 = 0;              avcParams->bWeightedPPrediction = OMX_FALSE; -            avcParams->bEntropyCodingCABAC = OMX_FALSE;              avcParams->bconstIpred = OMX_FALSE;              avcParams->bDirect8x8Inference = OMX_FALSE;              avcParams->bDirectSpatialTemporal = OMX_FALSE; @@ -973,14 +987,26 @@ OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR p                  return OMX_ErrorUndefined;              } -            if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline -                    || avcType->nRefFrames != 1 || avcType->nBFrames != 0 +            mEntropyMode = 0; + +            if (OMX_TRUE == avcType->bEntropyCodingCABAC) +                mEntropyMode = 1; + +            if ((avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) && +                    avcType->nPFrames) { +                mBframes = avcType->nBFrames / avcType->nPFrames; +            } + +            mIInterval = avcType->nPFrames + avcType->nBFrames; + +            if (OMX_VIDEO_AVCLoopFilterDisable == avcType->eLoopFilterMode) +                mDisableDeblkLevel = 4; + +            if (avcType->nRefFrames != 1                      || avcType->bUseHadamard != OMX_TRUE -                    || (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0                      || avcType->nRefIdx10ActiveMinus1 != 0                      || avcType->nRefIdx11ActiveMinus1 != 0                      || avcType->bWeightedPPrediction != OMX_FALSE -                    || avcType->bEntropyCodingCABAC != OMX_FALSE                      || avcType->bconstIpred != OMX_FALSE                      || avcType->bDirect8x8Inference != OMX_FALSE                      || avcType->bDirectSpatialTemporal != OMX_FALSE @@ -1075,14 +1101,27 @@ OMX_ERRORTYPE SoftAVC::setEncodeArgs(      /* Initialize color formats */      ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat; -      source = NULL; -    if (inputBufferHeader) { +    if ((inputBufferHeader != NULL) && inputBufferHeader->nFilledLen) {          source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset;          if (mInputDataIsMeta) { +            uint8_t *conversionBuffer = NULL; +            for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) { +                if (mConversionBuffersFree[i]) { +                    mConversionBuffersFree[i] = 0; +                    conversionBuffer = mConversionBuffers[i]; +                    break; +                } +            } + +            if (NULL == conversionBuffer) { +                ALOGE("No free buffers to hold conversion data"); +                return OMX_ErrorUndefined; +            } +              source = extractGraphicBuffer( -                    mConversionBuffer, (mWidth * mHeight * 3 / 2), source, +                    conversionBuffer, (mWidth * mHeight * 3 / 2), source,                      inputBufferHeader->nFilledLen, mWidth, mHeight);              if (source == NULL) { @@ -1091,6 +1130,18 @@ OMX_ERRORTYPE SoftAVC::setEncodeArgs(                  return OMX_ErrorUndefined;              }          } +        ps_encode_ip->u4_is_last = 0; +        ps_encode_ip->u4_timestamp_high = (inputBufferHeader->nTimeStamp) >> 32; +        ps_encode_ip->u4_timestamp_low = (inputBufferHeader->nTimeStamp) & 0xFFFFFFFF; +    } +    else { +        if (mSawInputEOS){ +            ps_encode_ip->u4_is_last = 1; +        } +        memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t)); +        ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat; +        ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t); +        return OMX_ErrorNone;      }      pu1_buf = (UWORD8 *)source; @@ -1145,14 +1196,6 @@ OMX_ERRORTYPE SoftAVC::setEncodeArgs(              break;          }      } - -    ps_encode_ip->u4_is_last = 0; - -    if (inputBufferHeader) { -        ps_encode_ip->u4_timestamp_high = (inputBufferHeader->nTimeStamp) >> 32; -        ps_encode_ip->u4_timestamp_low = (inputBufferHeader->nTimeStamp) & 0xFFFFFFFF; -    } -      return OMX_ErrorNone;  } @@ -1170,35 +1213,31 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {              return;          }      } -    if (mSignalledError || mSawInputEOS) { +    if (mSignalledError) {          return;      }      List<BufferInfo *> &inQueue = getPortQueue(0);      List<BufferInfo *> &outQueue = getPortQueue(1); -    while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) { +    while (!mSawOutputEOS && !outQueue.empty()) { +          OMX_ERRORTYPE error;          ive_video_encode_ip_t s_encode_ip;          ive_video_encode_op_t s_encode_op; - -        BufferInfo *inputBufferInfo = *inQueue.begin(); -        OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader; -          BufferInfo *outputBufferInfo = *outQueue.begin();          OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; -        if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { -            inQueue.erase(inQueue.begin()); -            inputBufferInfo->mOwnedByUs = false; -            notifyEmptyBufferDone(inputBufferHeader); +        BufferInfo *inputBufferInfo; +        OMX_BUFFERHEADERTYPE *inputBufferHeader; -            outputBufferHeader->nFilledLen = 0; -            outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS; - -            outQueue.erase(outQueue.begin()); -            outputBufferInfo->mOwnedByUs = false; -            notifyFillBufferDone(outputBufferHeader); +        if (mSawInputEOS) { +            inputBufferHeader = NULL; +            inputBufferInfo = NULL; +        } else if (!inQueue.empty()) { +            inputBufferInfo = *inQueue.begin(); +            inputBufferHeader = inputBufferInfo->mHeader; +        } else {              return;          } @@ -1208,6 +1247,10 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {          outputBufferHeader->nFilledLen = 0;          outputBufferHeader->nOffset = 0; +        if (inputBufferHeader != NULL) { +            outputBufferHeader->nFlags = inputBufferHeader->nFlags; +        } +          uint8_t *outPtr = (uint8_t *)outputBufferHeader->pBuffer;          if (!mSpsPpsHeaderReceived) { @@ -1231,10 +1274,13 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {              outputBufferHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;              outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes; -            outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp; +            if (inputBufferHeader != NULL) { +                outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp; +            }              outQueue.erase(outQueue.begin());              outputBufferInfo->mOwnedByUs = false; +              DUMP_TO_FILE(                      mOutFile, outputBufferHeader->pBuffer,                      outputBufferHeader->nFilledLen); @@ -1252,14 +1298,24 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {              setFrameType(IV_IDR_FRAME);          } -        mPrevTimestampUs = inputBufferHeader->nTimeStamp; - -        if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { +        if ((inputBufferHeader != NULL) +                && (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS)) {              mSawInputEOS = true;          } +        /* In normal mode, store inputBufferInfo and this will be returned +           when encoder consumes this input */ +        if (!mInputDataIsMeta && (inputBufferInfo != NULL)) { +            for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) { +                if (NULL == mInputBufferInfo[i]) { +                    mInputBufferInfo[i] = inputBufferInfo; +                    break; +                } +            } +        }          error = setEncodeArgs(                  &s_encode_ip, &s_encode_op, inputBufferHeader, outputBufferHeader); +          if (error != OMX_ErrorNone) {              mSignalledError = true;              notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); @@ -1269,14 +1325,11 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {          DUMP_TO_FILE(                  mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],                  (mHeight * mStride * 3 / 2)); -        //DUMP_TO_FILE(mInFile, inputBufferHeader->pBuffer + inputBufferHeader->nOffset, -        //    inputBufferHeader->nFilledLen);          GETTIME(&mTimeStart, NULL);          /* Compute time elapsed between end of previous decode()           * to start of current decode() */          TIME_DIFF(mTimeEnd, mTimeStart, timeDelay); -          status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op);          if (IV_SUCCESS != status) { @@ -1294,38 +1347,77 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {          ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,                  s_encode_op.s_out_buf.u4_bytes); +        /* In encoder frees up an input buffer, mark it as free */ +        if (s_encode_op.s_inp_buf.apv_bufs[0] != NULL) { +            if (mInputDataIsMeta) { +                for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) { +                    if (mConversionBuffers[i] == s_encode_op.s_inp_buf.apv_bufs[0]) { +                        mConversionBuffersFree[i] = 1; +                        break; +                    } +                } +            } else { +                /* In normal mode, call EBD on inBuffeHeader that is freed by the codec */ +                for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) { +                    uint8_t *buf = NULL; +                    OMX_BUFFERHEADERTYPE *bufHdr = NULL; +                    if (mInputBufferInfo[i] != NULL) { +                        bufHdr = mInputBufferInfo[i]->mHeader; +                        buf = bufHdr->pBuffer + bufHdr->nOffset; +                    } +                    if (s_encode_op.s_inp_buf.apv_bufs[0] == buf) { +                        mInputBufferInfo[i]->mOwnedByUs = false; +                        notifyEmptyBufferDone(bufHdr); +                        mInputBufferInfo[i] = NULL; +                        break; +                    } +                } +            } +        } -        outputBufferHeader->nFlags = inputBufferHeader->nFlags;          outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes; -        outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp; -        if (IV_IDR_FRAME -                == s_encode_op.u4_encoded_frame_type) { +        if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) {              outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;          } -        inQueue.erase(inQueue.begin()); -        inputBufferInfo->mOwnedByUs = false; +        if (inputBufferHeader != NULL) { +            inQueue.erase(inQueue.begin()); -        notifyEmptyBufferDone(inputBufferHeader); +            /* If in meta data, call EBD on input */ +            /* In case of normal mode, EBD will be done once encoder +            releases the input buffer */ +            if (mInputDataIsMeta) { +                inputBufferInfo->mOwnedByUs = false; +                notifyEmptyBufferDone(inputBufferHeader); +            } +        } -        if (mSawInputEOS) { +        if (s_encode_op.u4_is_last) {              outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS; +            mSawOutputEOS = true; +        } else { +            outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS;          } -        outputBufferInfo->mOwnedByUs = false; -        outQueue.erase(outQueue.begin()); - -        DUMP_TO_FILE( -                mOutFile, outputBufferHeader->pBuffer, -                outputBufferHeader->nFilledLen); -        notifyFillBufferDone(outputBufferHeader); +        if (outputBufferHeader->nFilledLen || s_encode_op.u4_is_last) { +            outputBufferHeader->nTimeStamp = s_encode_op.u4_timestamp_high; +            outputBufferHeader->nTimeStamp <<= 32; +            outputBufferHeader->nTimeStamp |= s_encode_op.u4_timestamp_low; +            outputBufferInfo->mOwnedByUs = false; +            outQueue.erase(outQueue.begin()); +            DUMP_TO_FILE(mOutFile, outputBufferHeader->pBuffer, +                    outputBufferHeader->nFilledLen); +            notifyFillBufferDone(outputBufferHeader); +        } +        if (s_encode_op.u4_is_last == 1) { +            return; +        }      }      return;  } -  }  // namespace android  android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h index c4e26a9..4418a7f 100644 --- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h +++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h @@ -17,7 +17,7 @@  #ifndef __SOFT_AVC_ENC_H__  #define __SOFT_AVC_ENC_H__ -#include <media/stagefright/MediaBuffer.h> +  #include <media/stagefright/foundation/ABase.h>  #include <utils/Vector.h> @@ -25,14 +25,14 @@  namespace android { -struct MediaBuffer; - +#define MAX_INPUT_BUFFER_HEADERS 4 +#define MAX_CONVERSION_BUFFERS   4  #define CODEC_MAX_CORES          4  #define LEN_STATUS_BUFFER        (10  * 1024)  #define MAX_VBV_BUFF_SIZE        (120 * 16384)  #define MAX_NUM_IO_BUFS           3 -#define DEFAULT_MAX_REF_FRM         1 +#define DEFAULT_MAX_REF_FRM         2  #define DEFAULT_MAX_REORDER_FRM     0  #define DEFAULT_QP_MIN              10  #define DEFAULT_QP_MAX              40 @@ -57,7 +57,7 @@ struct MediaBuffer;  #define DEFAULT_TGT_FRAME_RATE      30  #define DEFAULT_MAX_WD              1920  #define DEFAULT_MAX_HT              1920 -#define DEFAULT_MAX_LEVEL           40 +#define DEFAULT_MAX_LEVEL           41  #define DEFAULT_STRIDE              0  #define DEFAULT_WD                  1280  #define DEFAULT_HT                  720 @@ -88,6 +88,7 @@ struct MediaBuffer;  #define DEFAULT_QPEL                1  #define DEFAULT_I4                  1  #define DEFAULT_EPROFILE            IV_PROFILE_BASE +#define DEFAULT_ENTROPY_MODE        0  #define DEFAULT_SLICE_MODE          IVE_SLICE_MODE_NONE  #define DEFAULT_SLICE_PARAM         256  #define DEFAULT_ARCH                ARCH_ARM_A9Q @@ -149,8 +150,6 @@ private:      int32_t  mStride; -    uint32_t mFrameRate; -      struct timeval mTimeStart;   // Time at the start of decode()      struct timeval mTimeEnd;     // Time at the end of decode() @@ -167,32 +166,32 @@ private:      IV_COLOR_FORMAT_T mIvVideoColorFormat; -    int32_t  mIDRFrameRefreshIntervalInSec;      IV_PROFILE_T mAVCEncProfile;      WORD32   mAVCEncLevel; -    int64_t  mNumInputFrames; -    int64_t  mPrevTimestampUs;      bool     mStarted;      bool     mSpsPpsHeaderReceived;      bool     mSawInputEOS; +    bool     mSawOutputEOS;      bool     mSignalledError;      bool     mIntra4x4;      bool     mEnableFastSad;      bool     mEnableAltRef; -    bool    mReconEnable; -    bool    mPSNREnable; +    bool     mReconEnable; +    bool     mPSNREnable; +    bool     mEntropyMode;      IVE_SPEED_CONFIG     mEncSpeed; -    uint8_t *mConversionBuffer; - +    uint8_t *mConversionBuffers[MAX_CONVERSION_BUFFERS]; +    bool     mConversionBuffersFree[MAX_CONVERSION_BUFFERS]; +    BufferInfo *mInputBufferInfo[MAX_INPUT_BUFFER_HEADERS];      iv_obj_t *mCodecCtx;         // Codec context      iv_mem_rec_t *mMemRecords;   // Memory records requested by the codec      size_t mNumMemRecords;       // Number of memory records requested by codec      size_t mNumCores;            // Number of cores used by the codec      UWORD32 mHeaderGenerated; - +    UWORD32 mBframes;      IV_ARCH_T mArch;      IVE_SLICE_MODE_T mSliceMode;      UWORD32 mSliceParam; @@ -203,7 +202,7 @@ private:      IVE_AIR_MODE_T mAIRMode;      UWORD32 mAIRRefreshPeriod; -    OMX_ERRORTYPE initEncParams(); +    void initEncParams();      OMX_ERRORTYPE initEncoder();      OMX_ERRORTYPE releaseEncoder(); @@ -292,6 +291,8 @@ private:          fclose(fp);                                     \      } else {                                            \          ALOGD("Could not write to file %s", m_filename);\ +        if (fp != NULL)                                 \ +            fclose(fp);                                 \      }                                                   \  }  #else /* FILE_DUMP_ENABLE */ diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp index cddd176..4b2ec1c 100644 --- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp +++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp @@ -21,7 +21,6 @@  #include "ihevc_typedefs.h"  #include "iv.h"  #include "ivd.h" -#include "ithread.h"  #include "ihevcd_cxa.h"  #include "SoftHEVC.h" @@ -82,7 +81,10 @@ SoftHEVC::SoftHEVC(      initPorts(              kNumBuffers, max(kMaxOutputBufferSize / kMinCompressionRatio, (size_t)INPUT_BUF_SIZE),              kNumBuffers, CODEC_MIME_TYPE, kMinCompressionRatio); -    CHECK_EQ(initDecoder(), (status_t)OK); +} + +status_t SoftHEVC::init() { +    return initDecoder();  }  SoftHEVC::~SoftHEVC() { @@ -143,7 +145,7 @@ status_t SoftHEVC::setParams(size_t stride) {      s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);      s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t); -    ALOGV("Set the run-time (dynamic) parameters stride = %u", stride); +    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);      status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,              (void *)&s_ctl_op); @@ -408,7 +410,7 @@ status_t SoftHEVC::initDecoder() {      uint32_t bufferSize = displaySizeY * 3 / 2;      mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);      if (NULL == mFlushOutBuffer) { -        ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize); +        ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);          return NO_MEMORY;      } @@ -766,5 +768,10 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {  android::SoftOMXComponent *createSoftOMXComponent(const char *name,          const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,          OMX_COMPONENTTYPE **component) { -    return new android::SoftHEVC(name, callbacks, appData, component); +    android::SoftHEVC *codec = new android::SoftHEVC(name, callbacks, appData, component); +    if (codec->init() != android::OK) { +        android::sp<android::SoftOMXComponent> release = codec; +        return NULL; +    } +    return codec;  } diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h index a91f528..c6344cf 100644 --- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h +++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h @@ -56,6 +56,8 @@ struct SoftHEVC: public SoftVideoDecoderOMXComponent {      SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,              OMX_PTR appData, OMX_COMPONENTTYPE **component); +    status_t init(); +  protected:      virtual ~SoftHEVC(); diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp index ede645c..0c1a149 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp @@ -103,34 +103,41 @@ void SoftMPEG4::onQueueFilled(OMX_U32 /* portIndex */) {      while (!inQueue.empty() && outQueue.size() == kNumOutputBuffers) {          BufferInfo *inInfo = *inQueue.begin();          OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; +        if (inHeader == NULL) { +            inQueue.erase(inQueue.begin()); +            inInfo->mOwnedByUs = false; +            continue; +        }          PortInfo *port = editPortInfo(1);          OMX_BUFFERHEADERTYPE *outHeader =              port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader; -        if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) { +        if (inHeader->nFilledLen == 0) {              inQueue.erase(inQueue.begin());              inInfo->mOwnedByUs = false;              notifyEmptyBufferDone(inHeader);              ++mInputBufferCount; -            outHeader->nFilledLen = 0; -            outHeader->nFlags = OMX_BUFFERFLAG_EOS; +            if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { +                outHeader->nFilledLen = 0; +                outHeader->nFlags = OMX_BUFFERFLAG_EOS; -            List<BufferInfo *>::iterator it = outQueue.begin(); -            while ((*it)->mHeader != outHeader) { -                ++it; -            } +                List<BufferInfo *>::iterator it = outQueue.begin(); +                while ((*it)->mHeader != outHeader) { +                    ++it; +                } -            BufferInfo *outInfo = *it; -            outInfo->mOwnedByUs = false; -            outQueue.erase(it); -            outInfo = NULL; +                BufferInfo *outInfo = *it; +                outInfo->mOwnedByUs = false; +                outQueue.erase(it); +                outInfo = NULL; -            notifyFillBufferDone(outHeader); -            outHeader = NULL; +                notifyFillBufferDone(outHeader); +                outHeader = NULL; +            }              return;          } diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp index 5396022..f743b1c 100644 --- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp +++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp @@ -283,6 +283,11 @@ void SoftMP3::onQueueFilled(OMX_U32 /* portIndex */) {              } else {                  // This is recoverable, just ignore the current frame and                  // play silence instead. + +                // TODO: should we skip silence (and consume input data) +                // if mIsFirst is true as we may not have a valid +                // mConfig->samplingRate and mConfig->num_channels? +                ALOGV_IF(mIsFirst, "insufficient data for first frame, sending silence");                  memset(outHeader->pBuffer,                         0,                         mConfig->outputFrameSize * sizeof(int16_t)); @@ -317,8 +322,7 @@ void SoftMP3::onQueueFilled(OMX_U32 /* portIndex */) {          }          outHeader->nTimeStamp = -            mAnchorTimeUs -                + (mNumFramesOutput * 1000000ll) / mConfig->samplingRate; +            mAnchorTimeUs + (mNumFramesOutput * 1000000ll) / mSamplingRate;          if (inHeader) {              CHECK_GE(inHeader->nFilledLen, mConfig->inputBufferUsedLength); diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp index 7e98928..32e5da7 100644 --- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp +++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp @@ -21,7 +21,6 @@  #include "iv_datatypedef.h"  #include "iv.h"  #include "ivd.h" -#include "ithread.h"  #include "impeg2d.h"  #include "SoftMPEG2.h" @@ -156,7 +155,7 @@ status_t SoftMPEG2::setParams(size_t stride) {      s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);      s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t); -    ALOGV("Set the run-time (dynamic) parameters stride = %u", stride); +    ALOGV("Set the run-time (dynamic) parameters stride = %zu", stride);      status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op);      if (status != IV_SUCCESS) { @@ -396,7 +395,7 @@ status_t SoftMPEG2::initDecoder() {      uint32_t bufferSize = displaySizeY * 3 / 2;      mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);      if (NULL == mFlushOutBuffer) { -        ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize); +        ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);          return NO_MEMORY;      } diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index 8a95643..e161fb8 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -38,7 +38,10 @@ SoftVPX::SoftVPX(              NULL /* profileLevels */, 0 /* numProfileLevels */,              320 /* width */, 240 /* height */, callbacks, appData, component),        mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9), +      mEOSStatus(INPUT_DATA_AVAILABLE),        mCtx(NULL), +      mFrameParallelMode(false), +      mTimeStampIdx(0),        mImg(NULL) {      // arbitrary from avc/hevc as vpx does not specify a min compression ratio      const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4; @@ -51,9 +54,7 @@ SoftVPX::SoftVPX(  }  SoftVPX::~SoftVPX() { -    vpx_codec_destroy((vpx_codec_ctx_t *)mCtx); -    delete (vpx_codec_ctx_t *)mCtx; -    mCtx = NULL; +    destroyDecoder();  }  static int GetCPUCoreCount() { @@ -73,12 +74,19 @@ status_t SoftVPX::initDecoder() {      mCtx = new vpx_codec_ctx_t;      vpx_codec_err_t vpx_err;      vpx_codec_dec_cfg_t cfg; +    vpx_codec_flags_t flags;      memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t)); +    memset(&flags, 0, sizeof(vpx_codec_flags_t));      cfg.threads = GetCPUCoreCount(); + +    if (mFrameParallelMode) { +        flags |= VPX_CODEC_USE_FRAME_THREADING; +    } +      if ((vpx_err = vpx_codec_dec_init(                  (vpx_codec_ctx_t *)mCtx,                   mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo, -                 &cfg, 0))) { +                 &cfg, flags))) {          ALOGE("on2 decoder failed to initialize. (%d)", vpx_err);          return UNKNOWN_ERROR;      } @@ -86,86 +94,155 @@ status_t SoftVPX::initDecoder() {      return OK;  } +status_t SoftVPX::destroyDecoder() { +    vpx_codec_destroy((vpx_codec_ctx_t *)mCtx); +    delete (vpx_codec_ctx_t *)mCtx; +    mCtx = NULL; +    return OK; +} + +bool SoftVPX::outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset) { +    List<BufferInfo *> &inQueue = getPortQueue(0); +    List<BufferInfo *> &outQueue = getPortQueue(1); +    BufferInfo *outInfo = NULL; +    OMX_BUFFERHEADERTYPE *outHeader = NULL; +    vpx_codec_iter_t iter = NULL; + +    if (flushDecoder && mFrameParallelMode) { +        // Flush decoder by passing NULL data ptr and 0 size. +        // Ideally, this should never fail. +        if (vpx_codec_decode((vpx_codec_ctx_t *)mCtx, NULL, 0, NULL, 0)) { +            ALOGE("Failed to flush on2 decoder."); +            return false; +        } +    } + +    if (!display) { +        if (!flushDecoder) { +            ALOGE("Invalid operation."); +            return false; +        } +        // Drop all the decoded frames in decoder. +        while ((mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter))) { +        } +        return true; +    } + +    while (!outQueue.empty()) { +        if (mImg == NULL) { +            mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter); +            if (mImg == NULL) { +                break; +            } +        } +        uint32_t width = mImg->d_w; +        uint32_t height = mImg->d_h; +        outInfo = *outQueue.begin(); +        outHeader = outInfo->mHeader; +        CHECK_EQ(mImg->fmt, VPX_IMG_FMT_I420); +        handlePortSettingsChange(portWillReset, width, height); +        if (*portWillReset) { +            return true; +        } + +        outHeader->nOffset = 0; +        outHeader->nFlags = 0; +        outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2; +        outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv; + +        uint8_t *dst = outHeader->pBuffer; +        const uint8_t *srcY = (const uint8_t *)mImg->planes[VPX_PLANE_Y]; +        const uint8_t *srcU = (const uint8_t *)mImg->planes[VPX_PLANE_U]; +        const uint8_t *srcV = (const uint8_t *)mImg->planes[VPX_PLANE_V]; +        size_t srcYStride = mImg->stride[VPX_PLANE_Y]; +        size_t srcUStride = mImg->stride[VPX_PLANE_U]; +        size_t srcVStride = mImg->stride[VPX_PLANE_V]; +        copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride); + +        mImg = NULL; +        outInfo->mOwnedByUs = false; +        outQueue.erase(outQueue.begin()); +        outInfo = NULL; +        notifyFillBufferDone(outHeader); +        outHeader = NULL; +    } + +    if (!eos) { +        return true; +    } + +    if (!outQueue.empty()) { +        outInfo = *outQueue.begin(); +        outQueue.erase(outQueue.begin()); +        outHeader = outInfo->mHeader; +        outHeader->nTimeStamp = 0; +        outHeader->nFilledLen = 0; +        outHeader->nFlags = OMX_BUFFERFLAG_EOS; +        outInfo->mOwnedByUs = false; +        notifyFillBufferDone(outHeader); +        mEOSStatus = OUTPUT_FRAMES_FLUSHED; +    } +    return true; +} +  void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) { -    if (mOutputPortSettingsChange != NONE) { +    if (mOutputPortSettingsChange != NONE || mEOSStatus == OUTPUT_FRAMES_FLUSHED) {          return;      }      List<BufferInfo *> &inQueue = getPortQueue(0);      List<BufferInfo *> &outQueue = getPortQueue(1);      bool EOSseen = false; +    vpx_codec_err_t err; +    bool portWillReset = false; + +    while ((mEOSStatus == INPUT_EOS_SEEN || !inQueue.empty()) +            && !outQueue.empty()) { +        // Output the pending frames that left from last port reset or decoder flush. +        if (mEOSStatus == INPUT_EOS_SEEN || mImg != NULL) { +            if (!outputBuffers( +                     mEOSStatus == INPUT_EOS_SEEN, true /* display */, +                     mEOSStatus == INPUT_EOS_SEEN, &portWillReset)) { +                ALOGE("on2 decoder failed to output frame."); +                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); +                return; +            } +            if (portWillReset || mEOSStatus == OUTPUT_FRAMES_FLUSHED || +                    mEOSStatus == INPUT_EOS_SEEN) { +                return; +            } +        } -    while (!inQueue.empty() && !outQueue.empty()) {          BufferInfo *inInfo = *inQueue.begin();          OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; +        mTimeStamps[mTimeStampIdx] = inHeader->nTimeStamp;          BufferInfo *outInfo = *outQueue.begin();          OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; -          if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { +            mEOSStatus = INPUT_EOS_SEEN;              EOSseen = true; -            if (inHeader->nFilledLen == 0) { -                inQueue.erase(inQueue.begin()); -                inInfo->mOwnedByUs = false; -                notifyEmptyBufferDone(inHeader); - -                outHeader->nFilledLen = 0; -                outHeader->nFlags = OMX_BUFFERFLAG_EOS; - -                outQueue.erase(outQueue.begin()); -                outInfo->mOwnedByUs = false; -                notifyFillBufferDone(outHeader); -                return; -            }          } -        if (mImg == NULL) { -            if (vpx_codec_decode( -                        (vpx_codec_ctx_t *)mCtx, -                        inHeader->pBuffer + inHeader->nOffset, -                        inHeader->nFilledLen, -                        NULL, -                        0)) { -                ALOGE("on2 decoder failed to decode frame."); - -                notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); -                return; -            } -            vpx_codec_iter_t iter = NULL; -            mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter); +        if (inHeader->nFilledLen > 0 && +            vpx_codec_decode((vpx_codec_ctx_t *)mCtx, +                              inHeader->pBuffer + inHeader->nOffset, +                              inHeader->nFilledLen, +                              &mTimeStamps[mTimeStampIdx], 0)) { +            ALOGE("on2 decoder failed to decode frame."); +            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); +            return;          } +        mTimeStampIdx = (mTimeStampIdx + 1) % kNumBuffers; -        if (mImg != NULL) { -            CHECK_EQ(mImg->fmt, IMG_FMT_I420); - -            uint32_t width = mImg->d_w; -            uint32_t height = mImg->d_h; -            bool portWillReset = false; -            handlePortSettingsChange(&portWillReset, width, height); -            if (portWillReset) { -                return; -            } - -            outHeader->nOffset = 0; -            outHeader->nFilledLen = (width * height * 3) / 2; -            outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0; -            outHeader->nTimeStamp = inHeader->nTimeStamp; - -            uint8_t *dst = outHeader->pBuffer; -            const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y]; -            const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U]; -            const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V]; -            size_t srcYStride = mImg->stride[PLANE_Y]; -            size_t srcUStride = mImg->stride[PLANE_U]; -            size_t srcVStride = mImg->stride[PLANE_V]; -            copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride); - -            mImg = NULL; -            outInfo->mOwnedByUs = false; -            outQueue.erase(outQueue.begin()); -            outInfo = NULL; -            notifyFillBufferDone(outHeader); -            outHeader = NULL; +        if (!outputBuffers( +                 EOSseen /* flushDecoder */, true /* display */, EOSseen, &portWillReset)) { +            ALOGE("on2 decoder failed to output frame."); +            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); +            return; +        } +        if (portWillReset) { +            return;          }          inInfo->mOwnedByUs = false; @@ -176,6 +253,30 @@ void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {      }  } +void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) { +    if (portIndex == kInputPortIndex) { +        bool portWillReset = false; +        if (!outputBuffers( +                 true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) { +            ALOGE("Failed to flush decoder."); +            notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); +            return; +        } +        mEOSStatus = INPUT_DATA_AVAILABLE; +    } +} + +void SoftVPX::onReset() { +    bool portWillReset = false; +    if (!outputBuffers( +             true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) { +        ALOGW("Failed to flush decoder. Try to hard reset decoder"); +        destroyDecoder(); +        initDecoder(); +    } +    mEOSStatus = INPUT_DATA_AVAILABLE; +} +  }  // namespace android  android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h index 8f68693..8ccbae2 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.h +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h @@ -38,6 +38,8 @@ protected:      virtual ~SoftVPX();      virtual void onQueueFilled(OMX_U32 portIndex); +    virtual void onPortFlushCompleted(OMX_U32 portIndex); +    virtual void onReset();  private:      enum { @@ -49,11 +51,21 @@ private:          MODE_VP9      } mMode; -    void *mCtx; +    enum { +        INPUT_DATA_AVAILABLE,  // VPX component is ready to decode data. +        INPUT_EOS_SEEN,        // VPX component saw EOS and is flushing On2 decoder. +        OUTPUT_FRAMES_FLUSHED  // VPX component finished flushing On2 decoder. +    } mEOSStatus; +    void *mCtx; +    bool mFrameParallelMode;  // Frame parallel is only supported by VP9 decoder. +    OMX_TICKS mTimeStamps[kNumBuffers]; +    uint8_t mTimeStampIdx;      vpx_image_t *mImg;      status_t initDecoder(); +    status_t destroyDecoder(); +    bool outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset);      DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);  }; diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp index 970acf3..e654843 100644 --- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp +++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp @@ -661,7 +661,8 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 /* portIndex */) {          BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin();          OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; -        if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { +        if ((inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) && +                inputBufferHeader->nFilledLen == 0) {              inputBufferInfoQueue.erase(inputBufferInfoQueue.begin());              inputBufferInfo->mOwnedByUs = false;              notifyEmptyBufferDone(inputBufferHeader); @@ -762,6 +763,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 /* portIndex */) {                         encoded_packet->data.frame.sz);                  outputBufferInfo->mOwnedByUs = false;                  outputBufferInfoQueue.erase(outputBufferInfoQueue.begin()); +                if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { +                    outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS; +                }                  notifyFillBufferDone(outputBufferHeader);              }          } diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp index ed52a37..cb10bce 100644 --- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp +++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp @@ -345,9 +345,15 @@ void SoftOpus::onQueueFilled(OMX_U32 portIndex) {              }              uint8_t channel_mapping[kMaxChannels] = {0}; -            memcpy(&channel_mapping, -                   kDefaultOpusChannelLayout, -                   kMaxChannelsWithDefaultLayout); +            if (mHeader->channels <= kMaxChannelsWithDefaultLayout) { +                memcpy(&channel_mapping, +                       kDefaultOpusChannelLayout, +                       kMaxChannelsWithDefaultLayout); +            } else { +                memcpy(&channel_mapping, +                       mHeader->stream_map, +                       mHeader->channels); +            }              int status = OPUS_INVALID_STATE;              mDecoder = opus_multistream_decoder_create(kRate, @@ -397,6 +403,14 @@ void SoftOpus::onQueueFilled(OMX_U32 portIndex) {          BufferInfo *inInfo = *inQueue.begin();          OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; +        // Ignore CSD re-submissions. +        if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) { +            inQueue.erase(inQueue.begin()); +            inInfo->mOwnedByUs = false; +            notifyEmptyBufferDone(inHeader); +            return; +        } +          BufferInfo *outInfo = *outQueue.begin();          OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk index 59a64ba..4f7c48f 100644 --- a/media/libstagefright/colorconversion/Android.mk +++ b/media/libstagefright/colorconversion/Android.mk @@ -9,6 +9,9 @@ LOCAL_C_INCLUDES := \          $(TOP)/frameworks/native/include/media/openmax \          $(TOP)/hardware/msm7k +LOCAL_CFLAGS += -Werror +LOCAL_CLANG := true +  LOCAL_MODULE:= libstagefright_color_conversion  include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp index 4e75250..e92c192 100644 --- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp +++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp @@ -38,7 +38,8 @@ static int ALIGN(int x, int y) {      return (x + y - 1) & ~(y - 1);  } -SoftwareRenderer::SoftwareRenderer(const sp<ANativeWindow> &nativeWindow) +SoftwareRenderer::SoftwareRenderer( +        const sp<ANativeWindow> &nativeWindow, int32_t rotation)      : mColorFormat(OMX_COLOR_FormatUnused),        mConverter(NULL),        mYUVMode(None), @@ -50,7 +51,8 @@ SoftwareRenderer::SoftwareRenderer(const sp<ANativeWindow> &nativeWindow)        mCropRight(0),        mCropBottom(0),        mCropWidth(0), -      mCropHeight(0) { +      mCropHeight(0), +      mRotationDegrees(rotation) {  }  SoftwareRenderer::~SoftwareRenderer() { @@ -98,33 +100,49 @@ void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) {      mCropWidth = mCropRight - mCropLeft + 1;      mCropHeight = mCropBottom - mCropTop + 1; -    int halFormat; -    size_t bufWidth, bufHeight; - -    switch (mColorFormat) { -        case OMX_COLOR_FormatYUV420Planar: -        case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: -        case OMX_COLOR_FormatYUV420SemiPlanar: -        { -            if (!runningInEmulator()) { +    // by default convert everything to RGB565 +    int halFormat = HAL_PIXEL_FORMAT_RGB_565; +    size_t bufWidth = mCropWidth; +    size_t bufHeight = mCropHeight; + +    // hardware has YUV12 and RGBA8888 support, so convert known formats +    if (!runningInEmulator()) { +        switch (mColorFormat) { +            case OMX_COLOR_FormatYUV420Planar: +            case OMX_COLOR_FormatYUV420SemiPlanar: +            case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: +            {                  halFormat = HAL_PIXEL_FORMAT_YV12;                  bufWidth = (mCropWidth + 1) & ~1;                  bufHeight = (mCropHeight + 1) & ~1;                  break;              } - -            // fall through. +            case OMX_COLOR_Format24bitRGB888: +            { +                halFormat = HAL_PIXEL_FORMAT_RGB_888; +                bufWidth = (mCropWidth + 1) & ~1; +                bufHeight = (mCropHeight + 1) & ~1; +                break; +            } +            case OMX_COLOR_Format32bitARGB8888: +            case OMX_COLOR_Format32BitRGBA8888: +            { +                halFormat = HAL_PIXEL_FORMAT_RGBA_8888; +                bufWidth = (mCropWidth + 1) & ~1; +                bufHeight = (mCropHeight + 1) & ~1; +                break; +            } +            default: +            { +                break; +            }          } +    } -        default: -            halFormat = HAL_PIXEL_FORMAT_RGB_565; -            bufWidth = mCropWidth; -            bufHeight = mCropHeight; - -            mConverter = new ColorConverter( -                    mColorFormat, OMX_COLOR_Format16bitRGB565); -            CHECK(mConverter->isValid()); -            break; +    if (halFormat == HAL_PIXEL_FORMAT_RGB_565) { +        mConverter = new ColorConverter( +                mColorFormat, OMX_COLOR_Format16bitRGB565); +        CHECK(mConverter->isValid());      }      CHECK(mNativeWindow != NULL); @@ -165,7 +183,7 @@ void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) {      int32_t rotationDegrees;      if (!format->findInt32("rotation-degrees", &rotationDegrees)) { -        rotationDegrees = 0; +        rotationDegrees = mRotationDegrees;      }      uint32_t transform;      switch (rotationDegrees) { @@ -180,17 +198,29 @@ void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) {                  mNativeWindow.get(), transform));  } -void SoftwareRenderer::render( -        const void *data, size_t /*size*/, int64_t timestampNs, +void SoftwareRenderer::clearTracker() { +    mRenderTracker.clear(-1 /* lastRenderTimeNs */); +} + +std::list<FrameRenderTracker::Info> SoftwareRenderer::render( +        const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs,          void* /*platformPrivate*/, const sp<AMessage>& format) {      resetFormatIfChanged(format); +    FrameRenderTracker::Info *info = NULL;      ANativeWindowBuffer *buf; -    int err; -    if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), -            &buf)) != 0) { +    int fenceFd = -1; +    int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); +    if (err == 0 && fenceFd >= 0) { +        info = mRenderTracker.updateInfoForDequeuedBuffer(buf, fenceFd, 0); +        sp<Fence> fence = new Fence(fenceFd); +        err = fence->waitForever("SoftwareRenderer::render"); +    } +    if (err != 0) {          ALOGW("Surface::dequeueBuffer returned error %d", err); -        return; +        // complete (drop) dequeued frame if fence wait failed; otherwise, +        // this returns an empty list as no frames should have rendered and not yet returned. +        return mRenderTracker.checkFencesAndGetRenderedFrames(info, false /* dropIncomplete */);      }      GraphicBufferMapper &mapper = GraphicBufferMapper::get(); @@ -201,6 +231,8 @@ void SoftwareRenderer::render(      CHECK_EQ(0, mapper.lock(                  buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst)); +    // TODO move the other conversions also into ColorConverter, and +    // fix cropping issues (when mCropLeft/Top != 0 or mWidth != mCropWidth)      if (mConverter) {          mConverter->convert(                  data, @@ -210,8 +242,12 @@ void SoftwareRenderer::render(                  buf->stride, buf->height,                  0, 0, mCropWidth - 1, mCropHeight - 1);      } else if (mColorFormat == OMX_COLOR_FormatYUV420Planar) { +        if ((size_t)mWidth * mHeight * 3 / 2 > size) { +            goto skip_copying; +        }          const uint8_t *src_y = (const uint8_t *)data; -        const uint8_t *src_u = (const uint8_t *)data + mWidth * mHeight; +        const uint8_t *src_u = +                (const uint8_t *)data + mWidth * mHeight;          const uint8_t *src_v = src_u + (mWidth / 2 * mHeight / 2);          uint8_t *dst_y = (uint8_t *)dst; @@ -239,11 +275,12 @@ void SoftwareRenderer::render(          }      } else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar              || mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { -        const uint8_t *src_y = -            (const uint8_t *)data; - -        const uint8_t *src_uv = -            (const uint8_t *)data + mWidth * (mHeight - mCropTop / 2); +        if ((size_t)mWidth * mHeight * 3 / 2 > size) { +            goto skip_copying; +        } +        const uint8_t *src_y = (const uint8_t *)data; +        const uint8_t *src_uv = (const uint8_t *)data +                + mWidth * (mHeight - mCropTop / 2);          uint8_t *dst_y = (uint8_t *)dst; @@ -271,22 +308,69 @@ void SoftwareRenderer::render(              dst_u += dst_c_stride;              dst_v += dst_c_stride;          } +    } else if (mColorFormat == OMX_COLOR_Format24bitRGB888) { +        if ((size_t)mWidth * mHeight * 3 > size) { +            goto skip_copying; +        } +        uint8_t* srcPtr = (uint8_t*)data; +        uint8_t* dstPtr = (uint8_t*)dst; + +        for (size_t y = 0; y < (size_t)mCropHeight; ++y) { +            memcpy(dstPtr, srcPtr, mCropWidth * 3); +            srcPtr += mWidth * 3; +            dstPtr += buf->stride * 3; +        } +    } else if (mColorFormat == OMX_COLOR_Format32bitARGB8888) { +        if ((size_t)mWidth * mHeight * 4 > size) { +            goto skip_copying; +        } +        uint8_t *srcPtr, *dstPtr; + +        for (size_t y = 0; y < (size_t)mCropHeight; ++y) { +            srcPtr = (uint8_t*)data + mWidth * 4 * y; +            dstPtr = (uint8_t*)dst + buf->stride * 4 * y; +            for (size_t x = 0; x < (size_t)mCropWidth; ++x) { +                uint8_t a = *srcPtr++; +                for (size_t i = 0; i < 3; ++i) {   // copy RGB +                    *dstPtr++ = *srcPtr++; +                } +                *dstPtr++ = a;  // alpha last (ARGB to RGBA) +            } +        } +    } else if (mColorFormat == OMX_COLOR_Format32BitRGBA8888) { +        if ((size_t)mWidth * mHeight * 4 > size) { +            goto skip_copying; +        } +        uint8_t* srcPtr = (uint8_t*)data; +        uint8_t* dstPtr = (uint8_t*)dst; + +        for (size_t y = 0; y < (size_t)mCropHeight; ++y) { +            memcpy(dstPtr, srcPtr, mCropWidth * 4); +            srcPtr += mWidth * 4; +            dstPtr += buf->stride * 4; +        }      } else {          LOG_ALWAYS_FATAL("bad color format %#x", mColorFormat);      } +skip_copying:      CHECK_EQ(0, mapper.unlock(buf->handle)); -    if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(), -            timestampNs)) != 0) { -        ALOGW("Surface::set_buffers_timestamp returned error %d", err); +    if (renderTimeNs >= 0) { +        if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(), +                renderTimeNs)) != 0) { +            ALOGW("Surface::set_buffers_timestamp returned error %d", err); +        }      } -    if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -            -1)) != 0) { +    if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf, -1)) != 0) {          ALOGW("Surface::queueBuffer returned error %d", err); +    } else { +        mRenderTracker.onFrameQueued(mediaTimeUs, (GraphicBuffer *)buf, Fence::NO_FENCE);      } +      buf = NULL; +    return mRenderTracker.checkFencesAndGetRenderedFrames(info, info != NULL /* dropIncomplete */);  }  }  // namespace android diff --git a/media/libstagefright/filters/Android.mk b/media/libstagefright/filters/Android.mk new file mode 100644 index 0000000..179f054 --- /dev/null +++ b/media/libstagefright/filters/Android.mk @@ -0,0 +1,28 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ +        ColorConvert.cpp          \ +        GraphicBufferListener.cpp \ +        IntrinsicBlurFilter.cpp   \ +        MediaFilter.cpp           \ +        RSFilter.cpp              \ +        SaturationFilter.cpp      \ +        saturationARGB.rs         \ +        SimpleFilter.cpp          \ +        ZeroFilter.cpp + +LOCAL_C_INCLUDES := \ +        $(TOP)/frameworks/native/include/media/openmax \ +        $(TOP)/frameworks/rs/cpp \ +        $(TOP)/frameworks/rs \ + +intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,) +LOCAL_C_INCLUDES += $(intermediates) + +LOCAL_CFLAGS += -Wno-multichar -Werror -Wall +LOCAL_CLANG := true + +LOCAL_MODULE:= libstagefright_mediafilter + +include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/filters/ColorConvert.cpp b/media/libstagefright/filters/ColorConvert.cpp new file mode 100644 index 0000000..a8d5dd2 --- /dev/null +++ b/media/libstagefright/filters/ColorConvert.cpp @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ColorConvert.h" + +#ifndef max +#define max(a,b) ((a) > (b) ? (a) : (b)) +#endif +#ifndef min +#define min(a,b) ((a) < (b) ? (a) : (b)) +#endif + +namespace android { + +void YUVToRGB( +        int32_t y, int32_t u, int32_t v, +        int32_t* r, int32_t* g, int32_t* b) { +    y -= 16; +    u -= 128; +    v -= 128; + +    *b = 1192 * y + 2066 * u; +    *g = 1192 * y - 833 * v - 400 * u; +    *r = 1192 * y + 1634 * v; + +    *r = min(262143, max(0, *r)); +    *g = min(262143, max(0, *g)); +    *b = min(262143, max(0, *b)); + +    *r >>= 10; +    *g >>= 10; +    *b >>= 10; +} + +void convertYUV420spToARGB( +        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, +        uint8_t *dest) { +    const int32_t bytes_per_pixel = 2; + +    for (int32_t i = 0; i < height; i++) { +        for (int32_t j = 0; j < width; j++) { +            int32_t y = *(pY + i * width + j); +            int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2)); +            int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1); + +            int32_t r, g, b; +            YUVToRGB(y, u, v, &r, &g, &b); + +            *dest++ = 0xFF; +            *dest++ = r; +            *dest++ = g; +            *dest++ = b; +        } +    } +} + +void convertYUV420spToRGB888( +        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, +        uint8_t *dest) { +    const int32_t bytes_per_pixel = 2; + +    for (int32_t i = 0; i < height; i++) { +        for (int32_t j = 0; j < width; j++) { +            int32_t y = *(pY + i * width + j); +            int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2)); +            int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1); + +            int32_t r, g, b; +            YUVToRGB(y, u, v, &r, &g, &b); + +            *dest++ = r; +            *dest++ = g; +            *dest++ = b; +        } +    } +} + +// HACK - not even slightly optimized +// TODO: remove when RGBA support is added to SoftwareRenderer +void convertRGBAToARGB( +        uint8_t *src, int32_t width, int32_t height, uint32_t stride, +        uint8_t *dest) { +    for (int32_t i = 0; i < height; ++i) { +        for (int32_t j = 0; j < width; ++j) { +            uint8_t r = *src++; +            uint8_t g = *src++; +            uint8_t b = *src++; +            uint8_t a = *src++; +            *dest++ = a; +            *dest++ = r; +            *dest++ = g; +            *dest++ = b; +        } +        src += (stride - width) * 4; +    } +} + +}   // namespace android diff --git a/media/libstagefright/filters/ColorConvert.h b/media/libstagefright/filters/ColorConvert.h new file mode 100644 index 0000000..13faa02 --- /dev/null +++ b/media/libstagefright/filters/ColorConvert.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COLOR_CONVERT_H_ +#define COLOR_CONVERT_H_ + +#include <inttypes.h> + +namespace android { + +void YUVToRGB( +        int32_t y, int32_t u, int32_t v, +        int32_t* r, int32_t* g, int32_t* b); + +void convertYUV420spToARGB( +        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, +        uint8_t *dest); + +void convertYUV420spToRGB888( +        uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, +        uint8_t *dest); + +// TODO: remove when RGBA support is added to SoftwareRenderer +void convertRGBAToARGB( +        uint8_t *src, int32_t width, int32_t height, uint32_t stride, +        uint8_t *dest); + +}   // namespace android + +#endif  // COLOR_CONVERT_H_ diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp new file mode 100644 index 0000000..a606315 --- /dev/null +++ b/media/libstagefright/filters/GraphicBufferListener.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "GraphicBufferListener" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaErrors.h> + +#include <gui/BufferItem.h> + +#include "GraphicBufferListener.h" + +namespace android { + +status_t GraphicBufferListener::init( +        const sp<AMessage> ¬ify, +        size_t bufferWidth, size_t bufferHeight, size_t bufferCount) { +    mNotify = notify; + +    String8 name("GraphicBufferListener"); +    BufferQueue::createBufferQueue(&mProducer, &mConsumer); +    mConsumer->setConsumerName(name); +    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight); +    mConsumer->setConsumerUsageBits(GRALLOC_USAGE_SW_READ_OFTEN); + +    status_t err = mConsumer->setMaxAcquiredBufferCount(bufferCount); +    if (err != NO_ERROR) { +        ALOGE("Unable to set BQ max acquired buffer count to %zu: %d", +                bufferCount, err); +        return err; +    } + +    wp<BufferQueue::ConsumerListener> listener = +        static_cast<BufferQueue::ConsumerListener*>(this); +    sp<BufferQueue::ProxyConsumerListener> proxy = +        new BufferQueue::ProxyConsumerListener(listener); + +    err = mConsumer->consumerConnect(proxy, false); +    if (err != NO_ERROR) { +        ALOGE("Error connecting to BufferQueue: %s (%d)", +                strerror(-err), err); +        return err; +    } + +    ALOGV("init() successful."); + +    return OK; +} + +void GraphicBufferListener::onFrameAvailable(const BufferItem& /* item */) { +    ALOGV("onFrameAvailable() called"); + +    { +        Mutex::Autolock autoLock(mMutex); +        mNumFramesAvailable++; +    } + +    sp<AMessage> notify = mNotify->dup(); +    mNotify->setWhat(kWhatFrameAvailable); +    mNotify->post(); +} + +void GraphicBufferListener::onBuffersReleased() { +    ALOGV("onBuffersReleased() called"); +    // nothing to do +} + +void GraphicBufferListener::onSidebandStreamChanged() { +    ALOGW("GraphicBufferListener cannot consume sideband streams."); +    // nothing to do +} + +BufferItem GraphicBufferListener::getBufferItem() { +    BufferItem item; + +    { +        Mutex::Autolock autoLock(mMutex); +        if (mNumFramesAvailable <= 0) { +            ALOGE("getBuffer() called with no frames available"); +            return item; +        } +        mNumFramesAvailable--; +    } + +    status_t err = mConsumer->acquireBuffer(&item, 0); +    if (err == BufferQueue::NO_BUFFER_AVAILABLE) { +        // shouldn't happen, since we track num frames available +        ALOGE("frame was not available"); +        item.mBuf = -1; +        return item; +    } else if (err != OK) { +        ALOGE("acquireBuffer returned err=%d", err); +        item.mBuf = -1; +        return item; +    } + +    // Wait for it to become available. +    err = item.mFence->waitForever("GraphicBufferListener::getBufferItem"); +    if (err != OK) { +        ALOGW("failed to wait for buffer fence: %d", err); +        // keep going +    } + +    // If this is the first time we're seeing this buffer, add it to our +    // slot table. +    if (item.mGraphicBuffer != NULL) { +        ALOGV("setting mBufferSlot %d", item.mBuf); +        mBufferSlot[item.mBuf] = item.mGraphicBuffer; +    } + +    return item; +} + +sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) { +    sp<GraphicBuffer> buf; +    if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) { +        ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf); +        return buf; +    } + +    buf = mBufferSlot[item.mBuf]; +    CHECK(buf.get() != NULL); + +    return buf; +} + +status_t GraphicBufferListener::releaseBuffer(BufferItem item) { +    if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) { +        ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf); +        return ERROR_OUT_OF_RANGE; +    } + +    mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, +            EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); + +    return OK; +} + +}   // namespace android diff --git a/media/libstagefright/filters/GraphicBufferListener.h b/media/libstagefright/filters/GraphicBufferListener.h new file mode 100644 index 0000000..586bf65 --- /dev/null +++ b/media/libstagefright/filters/GraphicBufferListener.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GRAPHIC_BUFFER_LISTENER_H_ +#define GRAPHIC_BUFFER_LISTENER_H_ + +#include <gui/BufferQueue.h> + +namespace android { + +struct AMessage; + +struct GraphicBufferListener : public BufferQueue::ConsumerListener { +public: +    GraphicBufferListener() {}; + +    status_t init( +            const sp<AMessage> ¬ify, +            size_t bufferWidth, size_t bufferHeight, size_t bufferCount); + +    virtual void onFrameAvailable(const BufferItem& item); +    virtual void onBuffersReleased(); +    virtual void onSidebandStreamChanged(); + +    // Returns the handle to the producer side of the BufferQueue.  Buffers +    // queued on this will be received by GraphicBufferListener. +    sp<IGraphicBufferProducer> getIGraphicBufferProducer() const { +        return mProducer; +    } + +    BufferItem getBufferItem(); +    sp<GraphicBuffer> getBuffer(BufferItem item); +    status_t releaseBuffer(BufferItem item); + +    enum { +        kWhatFrameAvailable = 'frav', +    }; + +private: +    sp<AMessage> mNotify; +    size_t mNumFramesAvailable; + +    mutable Mutex mMutex; + +    // Our BufferQueue interfaces. mProducer is passed to the producer through +    // getIGraphicBufferProducer, and mConsumer is used internally to retrieve +    // the buffers queued by the producer. +    sp<IGraphicBufferProducer> mProducer; +    sp<IGraphicBufferConsumer> mConsumer; + +    // Cache of GraphicBuffers from the buffer queue. +    sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS]; +}; + +}   // namespace android + +#endif  // GRAPHIC_BUFFER_LISTENER_H diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp new file mode 100644 index 0000000..cbcf699 --- /dev/null +++ b/media/libstagefright/filters/IntrinsicBlurFilter.cpp @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IntrinsicBlurFilter" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "IntrinsicBlurFilter.h" + +namespace android { + +status_t IntrinsicBlurFilter::configure(const sp<AMessage> &msg) { +    status_t err = SimpleFilter::configure(msg); +    if (err != OK) { +        return err; +    } + +    if (!msg->findString("cacheDir", &mCacheDir)) { +        ALOGE("Failed to find cache directory in config message."); +        return NAME_NOT_FOUND; +    } + +    return OK; +} + +status_t IntrinsicBlurFilter::start() { +    // TODO: use a single RS context object for entire application +    mRS = new RSC::RS(); + +    if (!mRS->init(mCacheDir.c_str())) { +        ALOGE("Failed to initialize RenderScript context."); +        return NO_INIT; +    } + +    // 32-bit elements for ARGB8888 +    RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS); + +    RSC::Type::Builder tb(mRS, e); +    tb.setX(mWidth); +    tb.setY(mHeight); +    RSC::sp<const RSC::Type> t = tb.create(); + +    mAllocIn = RSC::Allocation::createTyped(mRS, t); +    mAllocOut = RSC::Allocation::createTyped(mRS, t); + +    mBlur = RSC::ScriptIntrinsicBlur::create(mRS, e); +    mBlur->setRadius(mBlurRadius); +    mBlur->setInput(mAllocIn); + +    return OK; +} + +void IntrinsicBlurFilter::reset() { +    mBlur.clear(); +    mAllocOut.clear(); +    mAllocIn.clear(); +    mRS.clear(); +} + +status_t IntrinsicBlurFilter::setParameters(const sp<AMessage> &msg) { +    sp<AMessage> params; +    CHECK(msg->findMessage("params", ¶ms)); + +    float blurRadius; +    if (params->findFloat("blur-radius", &blurRadius)) { +        mBlurRadius = blurRadius; +    } + +    return OK; +} + +status_t IntrinsicBlurFilter::processBuffers( +        const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { +    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data()); +    mBlur->forEach(mAllocOut); +    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data()); + +    return OK; +} + +}   // namespace android diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/IntrinsicBlurFilter.h new file mode 100644 index 0000000..4707ab7 --- /dev/null +++ b/media/libstagefright/filters/IntrinsicBlurFilter.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTRINSIC_BLUR_FILTER_H_ +#define INTRINSIC_BLUR_FILTER_H_ + +#include "RenderScript.h" +#include "SimpleFilter.h" + +namespace android { + +struct IntrinsicBlurFilter : public SimpleFilter { +public: +    IntrinsicBlurFilter() : mBlurRadius(1.f) {}; + +    virtual status_t configure(const sp<AMessage> &msg); +    virtual status_t start(); +    virtual void reset(); +    virtual status_t setParameters(const sp<AMessage> &msg); +    virtual status_t processBuffers( +            const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: +    virtual ~IntrinsicBlurFilter() {}; + +private: +    AString mCacheDir; +    RSC::sp<RSC::RS> mRS; +    RSC::sp<RSC::Allocation> mAllocIn; +    RSC::sp<RSC::Allocation> mAllocOut; +    RSC::sp<RSC::ScriptIntrinsicBlur> mBlur; +    float mBlurRadius; +}; + +}   // namespace android + +#endif  // INTRINSIC_BLUR_FILTER_H_ diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp new file mode 100644 index 0000000..0cf6b06 --- /dev/null +++ b/media/libstagefright/filters/MediaFilter.cpp @@ -0,0 +1,823 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaFilter" + +#include <inttypes.h> +#include <utils/Trace.h> + +#include <binder/MemoryDealer.h> + +#include <media/stagefright/BufferProducerWrapper.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaFilter.h> + +#include <gui/BufferItem.h> + +#include "ColorConvert.h" +#include "GraphicBufferListener.h" +#include "IntrinsicBlurFilter.h" +#include "RSFilter.h" +#include "SaturationFilter.h" +#include "ZeroFilter.h" + +namespace android { + +// parameter: number of input and output buffers +static const size_t kBufferCountActual = 4; + +MediaFilter::MediaFilter() +    : mState(UNINITIALIZED), +      mGeneration(0), +      mGraphicBufferListener(NULL) { +} + +MediaFilter::~MediaFilter() { +} + +//////////////////// PUBLIC FUNCTIONS ////////////////////////////////////////// + +void MediaFilter::setNotificationMessage(const sp<AMessage> &msg) { +    mNotify = msg; +} + +void MediaFilter::initiateAllocateComponent(const sp<AMessage> &msg) { +    msg->setWhat(kWhatAllocateComponent); +    msg->setTarget(this); +    msg->post(); +} + +void MediaFilter::initiateConfigureComponent(const sp<AMessage> &msg) { +    msg->setWhat(kWhatConfigureComponent); +    msg->setTarget(this); +    msg->post(); +} + +void MediaFilter::initiateCreateInputSurface() { +    (new AMessage(kWhatCreateInputSurface, this))->post(); +} + +void MediaFilter::initiateSetInputSurface( +        const sp<PersistentSurface> & /* surface */) { +    ALOGW("initiateSetInputSurface() unsupported"); +} + +void MediaFilter::initiateStart() { +    (new AMessage(kWhatStart, this))->post(); +} + +void MediaFilter::initiateShutdown(bool keepComponentAllocated) { +    sp<AMessage> msg = new AMessage(kWhatShutdown, this); +    msg->setInt32("keepComponentAllocated", keepComponentAllocated); +    msg->post(); +} + +void MediaFilter::signalFlush() { +    (new AMessage(kWhatFlush, this))->post(); +} + +void MediaFilter::signalResume() { +    (new AMessage(kWhatResume, this))->post(); +} + +// nothing to do +void MediaFilter::signalRequestIDRFrame() { +    return; +} + +void MediaFilter::signalSetParameters(const sp<AMessage> ¶ms) { +    sp<AMessage> msg = new AMessage(kWhatSetParameters, this); +    msg->setMessage("params", params); +    msg->post(); +} + +void MediaFilter::signalEndOfInputStream() { +    (new AMessage(kWhatSignalEndOfInputStream, this))->post(); +} + +void MediaFilter::onMessageReceived(const sp<AMessage> &msg) { +    switch (msg->what()) { +        case kWhatAllocateComponent: +        { +            onAllocateComponent(msg); +            break; +        } +        case kWhatConfigureComponent: +        { +            onConfigureComponent(msg); +            break; +        } +        case kWhatStart: +        { +            onStart(); +            break; +        } +        case kWhatProcessBuffers: +        { +            processBuffers(); +            break; +        } +        case kWhatInputBufferFilled: +        { +            onInputBufferFilled(msg); +            break; +        } +        case kWhatOutputBufferDrained: +        { +            onOutputBufferDrained(msg); +            break; +        } +        case kWhatShutdown: +        { +            onShutdown(msg); +            break; +        } +        case kWhatFlush: +        { +            onFlush(); +            break; +        } +        case kWhatResume: +        { +            // nothing to do +            break; +        } +        case kWhatSetParameters: +        { +            onSetParameters(msg); +            break; +        } +        case kWhatCreateInputSurface: +        { +            onCreateInputSurface(); +            break; +        } +        case GraphicBufferListener::kWhatFrameAvailable: +        { +            onInputFrameAvailable(); +            break; +        } +        case kWhatSignalEndOfInputStream: +        { +            onSignalEndOfInputStream(); +            break; +        } +        default: +        { +            ALOGE("Message not handled:\n%s", msg->debugString().c_str()); +            break; +        } +    } +} + +//////////////////// PORT DESCRIPTION ////////////////////////////////////////// + +MediaFilter::PortDescription::PortDescription() { +} + +void MediaFilter::PortDescription::addBuffer( +        IOMX::buffer_id id, const sp<ABuffer> &buffer) { +    mBufferIDs.push_back(id); +    mBuffers.push_back(buffer); +} + +size_t MediaFilter::PortDescription::countBuffers() { +    return mBufferIDs.size(); +} + +IOMX::buffer_id MediaFilter::PortDescription::bufferIDAt(size_t index) const { +    return mBufferIDs.itemAt(index); +} + +sp<ABuffer> MediaFilter::PortDescription::bufferAt(size_t index) const { +    return mBuffers.itemAt(index); +} + +//////////////////// HELPER FUNCTIONS ////////////////////////////////////////// + +void MediaFilter::signalProcessBuffers() { +    (new AMessage(kWhatProcessBuffers, this))->post(); +} + +void MediaFilter::signalError(status_t error) { +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatError); +    notify->setInt32("err", error); +    notify->post(); +} + +status_t MediaFilter::allocateBuffersOnPort(OMX_U32 portIndex) { +    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); +    const bool isInput = portIndex == kPortIndexInput; +    const size_t bufferSize = isInput ? mMaxInputSize : mMaxOutputSize; + +    CHECK(mDealer[portIndex] == NULL); +    CHECK(mBuffers[portIndex].isEmpty()); + +    ALOGV("Allocating %zu buffers of size %zu on %s port", +            kBufferCountActual, bufferSize, +            isInput ? "input" : "output"); + +    size_t totalSize = kBufferCountActual * bufferSize; + +    mDealer[portIndex] = new MemoryDealer(totalSize, "MediaFilter"); + +    for (size_t i = 0; i < kBufferCountActual; ++i) { +        sp<IMemory> mem = mDealer[portIndex]->allocate(bufferSize); +        CHECK(mem.get() != NULL); + +        BufferInfo info; +        info.mStatus = BufferInfo::OWNED_BY_US; +        info.mBufferID = i; +        info.mGeneration = mGeneration; +        info.mOutputFlags = 0; +        info.mData = new ABuffer(mem->pointer(), bufferSize); +        info.mData->meta()->setInt64("timeUs", 0); + +        mBuffers[portIndex].push_back(info); + +        if (!isInput) { +            mAvailableOutputBuffers.push( +                    &mBuffers[portIndex].editItemAt(i)); +        } +    } + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatBuffersAllocated); + +    notify->setInt32("portIndex", portIndex); + +    sp<PortDescription> desc = new PortDescription; + +    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { +        const BufferInfo &info = mBuffers[portIndex][i]; + +        desc->addBuffer(info.mBufferID, info.mData); +    } + +    notify->setObject("portDesc", desc); +    notify->post(); + +    return OK; +} + +MediaFilter::BufferInfo* MediaFilter::findBufferByID( +        uint32_t portIndex, IOMX::buffer_id bufferID, +        ssize_t *index) { +    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { +        BufferInfo *info = &mBuffers[portIndex].editItemAt(i); + +        if (info->mBufferID == bufferID) { +            if (index != NULL) { +                *index = i; +            } +            return info; +        } +    } + +    TRESPASS(); + +    return NULL; +} + +void MediaFilter::postFillThisBuffer(BufferInfo *info) { +    ALOGV("postFillThisBuffer on buffer %d", info->mBufferID); +    if (mPortEOS[kPortIndexInput]) { +        return; +    } + +    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + +    info->mGeneration = mGeneration; + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatFillThisBuffer); +    notify->setInt32("buffer-id", info->mBufferID); + +    info->mData->meta()->clear(); +    notify->setBuffer("buffer", info->mData); + +    sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, this); +    reply->setInt32("buffer-id", info->mBufferID); + +    notify->setMessage("reply", reply); + +    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; +    notify->post(); +} + +void MediaFilter::postDrainThisBuffer(BufferInfo *info) { +    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + +    info->mGeneration = mGeneration; + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); +    notify->setInt32("buffer-id", info->mBufferID); +    notify->setInt32("flags", info->mOutputFlags); +    notify->setBuffer("buffer", info->mData); + +    sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, this); +    reply->setInt32("buffer-id", info->mBufferID); + +    notify->setMessage("reply", reply); + +    notify->post(); + +    info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; +} + +void MediaFilter::postEOS() { +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatEOS); +    notify->setInt32("err", ERROR_END_OF_STREAM); +    notify->post(); + +    ALOGV("Sent kWhatEOS."); +} + +void MediaFilter::sendFormatChange() { +    sp<AMessage> notify = mNotify->dup(); + +    notify->setInt32("what", kWhatOutputFormatChanged); + +    AString mime; +    CHECK(mOutputFormat->findString("mime", &mime)); +    notify->setString("mime", mime.c_str()); + +    notify->setInt32("stride", mStride); +    notify->setInt32("slice-height", mSliceHeight); +    notify->setInt32("color-format", mColorFormatOut); +    notify->setRect("crop", 0, 0, mStride - 1, mSliceHeight - 1); +    notify->setInt32("width", mWidth); +    notify->setInt32("height", mHeight); + +    notify->post(); +} + +void MediaFilter::requestFillEmptyInput() { +    if (mPortEOS[kPortIndexInput]) { +        return; +    } + +    for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) { +        BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i); + +        if (info->mStatus == BufferInfo::OWNED_BY_US) { +            postFillThisBuffer(info); +        } +    } +} + +void MediaFilter::processBuffers() { +    if (mAvailableInputBuffers.empty() || mAvailableOutputBuffers.empty()) { +        ALOGV("Skipping process (buffers unavailable)"); +        return; +    } + +    if (mPortEOS[kPortIndexOutput]) { +        // TODO notify caller of queueInput error when it is supported +        // in MediaCodec +        ALOGW("Tried to process a buffer after EOS."); +        return; +    } + +    BufferInfo *inputInfo = mAvailableInputBuffers[0]; +    mAvailableInputBuffers.removeAt(0); +    BufferInfo *outputInfo = mAvailableOutputBuffers[0]; +    mAvailableOutputBuffers.removeAt(0); + +    status_t err; +    err = mFilter->processBuffers(inputInfo->mData, outputInfo->mData); +    if (err != (status_t)OK) { +        outputInfo->mData->meta()->setInt32("err", err); +    } + +    int64_t timeUs; +    CHECK(inputInfo->mData->meta()->findInt64("timeUs", &timeUs)); +    outputInfo->mData->meta()->setInt64("timeUs", timeUs); +    outputInfo->mOutputFlags = 0; +    int32_t eos = 0; +    if (inputInfo->mData->meta()->findInt32("eos", &eos) && eos != 0) { +        outputInfo->mOutputFlags |= OMX_BUFFERFLAG_EOS; +        mPortEOS[kPortIndexOutput] = true; +        outputInfo->mData->meta()->setInt32("eos", eos); +        postEOS(); +        ALOGV("Output stream saw EOS."); +    } + +    ALOGV("Processed input buffer %u [%zu], output buffer %u [%zu]", +                inputInfo->mBufferID, inputInfo->mData->size(), +                outputInfo->mBufferID, outputInfo->mData->size()); + +    if (mGraphicBufferListener != NULL) { +        delete inputInfo; +    } else { +        postFillThisBuffer(inputInfo); +    } +    postDrainThisBuffer(outputInfo); + +    // prevent any corner case where buffers could get stuck in queue +    signalProcessBuffers(); +} + +void MediaFilter::onAllocateComponent(const sp<AMessage> &msg) { +    CHECK_EQ(mState, UNINITIALIZED); + +    CHECK(msg->findString("componentName", &mComponentName)); +    const char* name = mComponentName.c_str(); +    if (!strcasecmp(name, "android.filter.zerofilter")) { +        mFilter = new ZeroFilter; +    } else if (!strcasecmp(name, "android.filter.saturation")) { +        mFilter = new SaturationFilter; +    } else if (!strcasecmp(name, "android.filter.intrinsicblur")) { +        mFilter = new IntrinsicBlurFilter; +    } else if (!strcasecmp(name, "android.filter.RenderScript")) { +        mFilter = new RSFilter; +    } else { +        ALOGE("Unrecognized filter name: %s", name); +        signalError(NAME_NOT_FOUND); +        return; +    } + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatComponentAllocated); +    // HACK - need "OMX.google" to use MediaCodec's software renderer +    notify->setString("componentName", "OMX.google.MediaFilter"); +    notify->post(); +    mState = INITIALIZED; +    ALOGV("Handled kWhatAllocateComponent."); +} + +void MediaFilter::onConfigureComponent(const sp<AMessage> &msg) { +    // TODO: generalize to allow audio filters as well as video + +    CHECK_EQ(mState, INITIALIZED); + +    // get params - at least mime, width & height +    AString mime; +    CHECK(msg->findString("mime", &mime)); +    if (strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) { +        ALOGE("Bad mime: %s", mime.c_str()); +        signalError(BAD_VALUE); +        return; +    } + +    CHECK(msg->findInt32("width", &mWidth)); +    CHECK(msg->findInt32("height", &mHeight)); +    if (!msg->findInt32("stride", &mStride)) { +        mStride = mWidth; +    } +    if (!msg->findInt32("slice-height", &mSliceHeight)) { +        mSliceHeight = mHeight; +    } + +    mMaxInputSize = mWidth * mHeight * 4;   // room for ARGB8888 +    int32_t maxInputSize; +    if (msg->findInt32("max-input-size", &maxInputSize) +            && (size_t)maxInputSize > mMaxInputSize) { +        mMaxInputSize = maxInputSize; +    } + +    if (!msg->findInt32("color-format", &mColorFormatIn)) { +        // default to OMX_COLOR_Format32bitARGB8888 +        mColorFormatIn = OMX_COLOR_Format32bitARGB8888; +        msg->setInt32("color-format", mColorFormatIn); +    } +    mColorFormatOut = mColorFormatIn; + +    mMaxOutputSize = mWidth * mHeight * 4;  // room for ARGB8888 + +    AString cacheDir; +    if (!msg->findString("cacheDir", &cacheDir)) { +        ALOGE("Failed to find cache directory in config message."); +        signalError(NAME_NOT_FOUND); +        return; +    } + +    status_t err; +    err = mFilter->configure(msg); +    if (err != (status_t)OK) { +        ALOGE("Failed to configure filter component, err %d", err); +        signalError(err); +        return; +    } + +    mInputFormat = new AMessage(); +    mInputFormat->setString("mime", mime.c_str()); +    mInputFormat->setInt32("stride", mStride); +    mInputFormat->setInt32("slice-height", mSliceHeight); +    mInputFormat->setInt32("color-format", mColorFormatIn); +    mInputFormat->setRect("crop", 0, 0, mStride, mSliceHeight); +    mInputFormat->setInt32("width", mWidth); +    mInputFormat->setInt32("height", mHeight); + +    mOutputFormat = new AMessage(); +    mOutputFormat->setString("mime", mime.c_str()); +    mOutputFormat->setInt32("stride", mStride); +    mOutputFormat->setInt32("slice-height", mSliceHeight); +    mOutputFormat->setInt32("color-format", mColorFormatOut); +    mOutputFormat->setRect("crop", 0, 0, mStride, mSliceHeight); +    mOutputFormat->setInt32("width", mWidth); +    mOutputFormat->setInt32("height", mHeight); + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatComponentConfigured); +    notify->setString("componentName", "MediaFilter"); +    notify->setMessage("input-format", mInputFormat); +    notify->setMessage("output-format", mOutputFormat); +    notify->post(); +    mState = CONFIGURED; +    ALOGV("Handled kWhatConfigureComponent."); + +    sendFormatChange(); +} + +void MediaFilter::onStart() { +    CHECK_EQ(mState, CONFIGURED); + +    allocateBuffersOnPort(kPortIndexInput); + +    allocateBuffersOnPort(kPortIndexOutput); + +    status_t err = mFilter->start(); +    if (err != (status_t)OK) { +        ALOGE("Failed to start filter component, err %d", err); +        signalError(err); +        return; +    } + +    mPortEOS[kPortIndexInput] = false; +    mPortEOS[kPortIndexOutput] = false; +    mInputEOSResult = OK; +    mState = STARTED; + +    requestFillEmptyInput(); +    ALOGV("Handled kWhatStart."); +} + +void MediaFilter::onInputBufferFilled(const sp<AMessage> &msg) { +    IOMX::buffer_id bufferID; +    CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); +    BufferInfo *info = findBufferByID(kPortIndexInput, bufferID); + +    if (mState != STARTED) { +        // we're not running, so we'll just keep that buffer... +        info->mStatus = BufferInfo::OWNED_BY_US; +        return; +    } + +    if (info->mGeneration != mGeneration) { +        ALOGV("Caught a stale input buffer [ID %d]", bufferID); +        // buffer is stale (taken before a flush/shutdown) - repost it +        CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US); +        postFillThisBuffer(info); +        return; +    } + +    CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM); +    info->mStatus = BufferInfo::OWNED_BY_US; + +    sp<ABuffer> buffer; +    int32_t err = OK; +    bool eos = false; + +    if (!msg->findBuffer("buffer", &buffer)) { +        // these are unfilled buffers returned by client +        CHECK(msg->findInt32("err", &err)); + +        if (err == OK) { +            // buffers with no errors are returned on MediaCodec.flush +            ALOGV("saw unfilled buffer (MediaCodec.flush)"); +            postFillThisBuffer(info); +            return; +        } else { +            ALOGV("saw error %d instead of an input buffer", err); +            eos = true; +        } + +        buffer.clear(); +    } + +    int32_t isCSD; +    if (buffer != NULL && buffer->meta()->findInt32("csd", &isCSD) +            && isCSD != 0) { +        // ignore codec-specific data buffers +        ALOGW("MediaFilter received a codec-specific data buffer"); +        postFillThisBuffer(info); +        return; +    } + +    int32_t tmp; +    if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { +        eos = true; +        err = ERROR_END_OF_STREAM; +    } + +    mAvailableInputBuffers.push_back(info); +    processBuffers(); + +    if (eos) { +        mPortEOS[kPortIndexInput] = true; +        mInputEOSResult = err; +    } + +    ALOGV("Handled kWhatInputBufferFilled. [ID %u]", bufferID); +} + +void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) { +    IOMX::buffer_id bufferID; +    CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); +    BufferInfo *info = findBufferByID(kPortIndexOutput, bufferID); + +    if (mState != STARTED) { +        // we're not running, so we'll just keep that buffer... +        info->mStatus = BufferInfo::OWNED_BY_US; +        return; +    } + +    if (info->mGeneration != mGeneration) { +        ALOGV("Caught a stale output buffer [ID %d]", bufferID); +        // buffer is stale (taken before a flush/shutdown) - keep it +        CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US); +        return; +    } + +    CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM); +    info->mStatus = BufferInfo::OWNED_BY_US; + +    mAvailableOutputBuffers.push_back(info); + +    processBuffers(); + +    ALOGV("Handled kWhatOutputBufferDrained. [ID %u]", +            bufferID); +} + +void MediaFilter::onShutdown(const sp<AMessage> &msg) { +    mGeneration++; + +    if (mState != UNINITIALIZED) { +        mFilter->reset(); +    } + +    int32_t keepComponentAllocated; +    CHECK(msg->findInt32("keepComponentAllocated", &keepComponentAllocated)); +    if (!keepComponentAllocated || mState == UNINITIALIZED) { +        mState = UNINITIALIZED; +    } else { +        mState = INITIALIZED; +    } + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatShutdownCompleted); +    notify->post(); +} + +void MediaFilter::onFlush() { +    mGeneration++; + +    mAvailableInputBuffers.clear(); +    for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) { +        BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i); +        info->mStatus = BufferInfo::OWNED_BY_US; +    } +    mAvailableOutputBuffers.clear(); +    for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { +        BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); +        info->mStatus = BufferInfo::OWNED_BY_US; +        mAvailableOutputBuffers.push_back(info); +    } + +    mPortEOS[kPortIndexInput] = false; +    mPortEOS[kPortIndexOutput] = false; +    mInputEOSResult = OK; + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatFlushCompleted); +    notify->post(); +    ALOGV("Posted kWhatFlushCompleted"); + +    // MediaCodec returns all input buffers after flush, so in +    // onInputBufferFilled we call postFillThisBuffer on them +} + +void MediaFilter::onSetParameters(const sp<AMessage> &msg) { +    CHECK(mState != STARTED); + +    status_t err = mFilter->setParameters(msg); +    if (err != (status_t)OK) { +        ALOGE("setParameters returned err %d", err); +    } +} + +void MediaFilter::onCreateInputSurface() { +    CHECK(mState == CONFIGURED); + +    mGraphicBufferListener = new GraphicBufferListener; + +    sp<AMessage> notify = new AMessage(); +    notify->setTarget(this); +    status_t err = mGraphicBufferListener->init( +            notify, mStride, mSliceHeight, kBufferCountActual); + +    if (err != OK) { +        ALOGE("Failed to init mGraphicBufferListener: %d", err); +        signalError(err); +        return; +    } + +    sp<AMessage> reply = mNotify->dup(); +    reply->setInt32("what", CodecBase::kWhatInputSurfaceCreated); +    reply->setObject( +            "input-surface", +            new BufferProducerWrapper( +                    mGraphicBufferListener->getIGraphicBufferProducer())); +    reply->post(); +} + +void MediaFilter::onInputFrameAvailable() { +    BufferItem item = mGraphicBufferListener->getBufferItem(); +    sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item); + +    // get pointer to graphic buffer +    void* bufPtr; +    buf->lock(GraphicBuffer::USAGE_SW_READ_OFTEN, &bufPtr); + +    // HACK - there is no OMX_COLOR_FORMATTYPE value for RGBA, so the format +    // conversion is hardcoded until we add this. +    // TODO: check input format and convert only if necessary +    // copy RGBA graphic buffer into temporary ARGB input buffer +    BufferInfo *inputInfo = new BufferInfo; +    inputInfo->mData = new ABuffer(buf->getWidth() * buf->getHeight() * 4); +    ALOGV("Copying surface data into temp buffer."); +    convertRGBAToARGB( +            (uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(), +            buf->getStride(), inputInfo->mData->data()); +    inputInfo->mBufferID = item.mBuf; +    inputInfo->mGeneration = mGeneration; +    inputInfo->mOutputFlags = 0; +    inputInfo->mStatus = BufferInfo::OWNED_BY_US; +    inputInfo->mData->meta()->setInt64("timeUs", item.mTimestamp / 1000); + +    mAvailableInputBuffers.push_back(inputInfo); + +    mGraphicBufferListener->releaseBuffer(item); + +    signalProcessBuffers(); +} + +void MediaFilter::onSignalEndOfInputStream() { +    // if using input surface, need to send an EOS output buffer +    if (mGraphicBufferListener != NULL) { +        Vector<BufferInfo> *outputBufs = &mBuffers[kPortIndexOutput]; +        BufferInfo* eosBuf; +        bool foundBuf = false; +        for (size_t i = 0; i < kBufferCountActual; i++) { +            eosBuf = &outputBufs->editItemAt(i); +            if (eosBuf->mStatus == BufferInfo::OWNED_BY_US) { +                foundBuf = true; +                break; +            } +        } + +        if (!foundBuf) { +            ALOGE("onSignalEndOfInputStream failed to find an output buffer"); +            return; +        } + +        eosBuf->mOutputFlags = OMX_BUFFERFLAG_EOS; +        eosBuf->mGeneration = mGeneration; +        eosBuf->mData->setRange(0, 0); +        postDrainThisBuffer(eosBuf); +        ALOGV("Posted EOS on output buffer %u", eosBuf->mBufferID); +    } + +    mPortEOS[kPortIndexOutput] = true; +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); +    notify->post(); + +    ALOGV("Output stream saw EOS."); +} + +}   // namespace android diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp new file mode 100644 index 0000000..b569945 --- /dev/null +++ b/media/libstagefright/filters/RSFilter.cpp @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RSFilter" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "RSFilter.h" + +namespace android { + +RSFilter::RSFilter() { + +} + +RSFilter::~RSFilter() { + +} + +status_t RSFilter::configure(const sp<AMessage> &msg) { +    status_t err = SimpleFilter::configure(msg); +    if (err != OK) { +        return err; +    } + +    if (!msg->findString("cacheDir", &mCacheDir)) { +        ALOGE("Failed to find cache directory in config message."); +        return NAME_NOT_FOUND; +    } + +    sp<RenderScriptWrapper> wrapper; +    if (!msg->findObject("rs-wrapper", (sp<RefBase>*)&wrapper)) { +        ALOGE("Failed to find RenderScriptWrapper in config message."); +        return NAME_NOT_FOUND; +    } + +    mRS = wrapper->mContext; +    mCallback = wrapper->mCallback; + +    return OK; +} + +status_t RSFilter::start() { +    // 32-bit elements for ARGB8888 +    RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS); + +    RSC::Type::Builder tb(mRS, e); +    tb.setX(mWidth); +    tb.setY(mHeight); +    RSC::sp<const RSC::Type> t = tb.create(); + +    mAllocIn = RSC::Allocation::createTyped(mRS, t); +    mAllocOut = RSC::Allocation::createTyped(mRS, t); + +    return OK; +} + +void RSFilter::reset() { +    mCallback.clear(); +    mAllocOut.clear(); +    mAllocIn.clear(); +    mRS.clear(); +} + +status_t RSFilter::setParameters(const sp<AMessage> &msg) { +    return mCallback->handleSetParameters(msg); +} + +status_t RSFilter::processBuffers( +        const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { +    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data()); +    mCallback->processBuffers(mAllocIn.get(), mAllocOut.get()); +    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data()); + +    return OK; +} + +}   // namespace android diff --git a/media/libstagefright/filters/RSFilter.h b/media/libstagefright/filters/RSFilter.h new file mode 100644 index 0000000..c5b5074 --- /dev/null +++ b/media/libstagefright/filters/RSFilter.h @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RS_FILTER_H_ +#define RS_FILTER_H_ + +#include <media/stagefright/RenderScriptWrapper.h> +#include <RenderScript.h> + +#include "SimpleFilter.h" + +namespace android { + +struct AString; + +struct RSFilter : public SimpleFilter { +public: +    RSFilter(); + +    virtual status_t configure(const sp<AMessage> &msg); +    virtual status_t start(); +    virtual void reset(); +    virtual status_t setParameters(const sp<AMessage> &msg); +    virtual status_t processBuffers( +            const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: +    virtual ~RSFilter(); + +private: +    AString mCacheDir; +    sp<RenderScriptWrapper::RSFilterCallback> mCallback; +    RSC::sp<RSC::RS> mRS; +    RSC::sp<RSC::Allocation> mAllocIn; +    RSC::sp<RSC::Allocation> mAllocOut; +}; + +}   // namespace android + +#endif  // RS_FILTER_H_ diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp new file mode 100644 index 0000000..ba5f75a --- /dev/null +++ b/media/libstagefright/filters/SaturationFilter.cpp @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SaturationFilter" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "SaturationFilter.h" + +namespace android { + +status_t SaturationFilter::configure(const sp<AMessage> &msg) { +    status_t err = SimpleFilter::configure(msg); +    if (err != OK) { +        return err; +    } + +    if (!msg->findString("cacheDir", &mCacheDir)) { +        ALOGE("Failed to find cache directory in config message."); +        return NAME_NOT_FOUND; +    } + +    return OK; +} + +status_t SaturationFilter::start() { +    // TODO: use a single RS context object for entire application +    mRS = new RSC::RS(); + +    if (!mRS->init(mCacheDir.c_str())) { +        ALOGE("Failed to initialize RenderScript context."); +        return NO_INIT; +    } + +    // 32-bit elements for ARGB8888 +    RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS); + +    RSC::Type::Builder tb(mRS, e); +    tb.setX(mWidth); +    tb.setY(mHeight); +    RSC::sp<const RSC::Type> t = tb.create(); + +    mAllocIn = RSC::Allocation::createTyped(mRS, t); +    mAllocOut = RSC::Allocation::createTyped(mRS, t); + +    mScript = new ScriptC_saturationARGB(mRS); + +    mScript->set_gSaturation(mSaturation); + +    return OK; +} + +void SaturationFilter::reset() { +    mScript.clear(); +    mAllocOut.clear(); +    mAllocIn.clear(); +    mRS.clear(); +} + +status_t SaturationFilter::setParameters(const sp<AMessage> &msg) { +    sp<AMessage> params; +    CHECK(msg->findMessage("params", ¶ms)); + +    float saturation; +    if (params->findFloat("saturation", &saturation)) { +        mSaturation = saturation; +    } + +    return OK; +} + +status_t SaturationFilter::processBuffers( +        const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { +    mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data()); +    mScript->forEach_root(mAllocIn, mAllocOut); +    mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data()); + +    return OK; +} + +}   // namespace android diff --git a/media/libstagefright/filters/SaturationFilter.h b/media/libstagefright/filters/SaturationFilter.h new file mode 100644 index 0000000..0545021 --- /dev/null +++ b/media/libstagefright/filters/SaturationFilter.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SATURATION_FILTER_H_ +#define SATURATION_FILTER_H_ + +#include <RenderScript.h> + +#include "ScriptC_saturationARGB.h" +#include "SimpleFilter.h" + +namespace android { + +struct SaturationFilter : public SimpleFilter { +public: +    SaturationFilter() : mSaturation(1.f) {}; + +    virtual status_t configure(const sp<AMessage> &msg); +    virtual status_t start(); +    virtual void reset(); +    virtual status_t setParameters(const sp<AMessage> &msg); +    virtual status_t processBuffers( +            const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: +    virtual ~SaturationFilter() {}; + +private: +    AString mCacheDir; +    RSC::sp<RSC::RS> mRS; +    RSC::sp<RSC::Allocation> mAllocIn; +    RSC::sp<RSC::Allocation> mAllocOut; +    RSC::sp<ScriptC_saturationARGB> mScript; +    float mSaturation; +}; + +}   // namespace android + +#endif  // SATURATION_FILTER_H_ diff --git a/media/libstagefright/filters/SimpleFilter.cpp b/media/libstagefright/filters/SimpleFilter.cpp new file mode 100644 index 0000000..6c1ca2c --- /dev/null +++ b/media/libstagefright/filters/SimpleFilter.cpp @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "SimpleFilter.h" + +namespace android { + +status_t SimpleFilter::configure(const sp<AMessage> &msg) { +    CHECK(msg->findInt32("width", &mWidth)); +    CHECK(msg->findInt32("height", &mHeight)); +    if (!msg->findInt32("stride", &mStride)) { +        mStride = mWidth; +    } +    if (!msg->findInt32("slice-height", &mSliceHeight)) { +        mSliceHeight = mHeight; +    } +    CHECK(msg->findInt32("color-format", &mColorFormatIn)); +    mColorFormatOut = mColorFormatIn; + +    return OK; +} + +}   // namespace android diff --git a/media/libstagefright/filters/SimpleFilter.h b/media/libstagefright/filters/SimpleFilter.h new file mode 100644 index 0000000..4cd37ef --- /dev/null +++ b/media/libstagefright/filters/SimpleFilter.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SIMPLE_FILTER_H_ +#define SIMPLE_FILTER_H_ + +#include <stdint.h> +#include <utils/Errors.h> +#include <utils/RefBase.h> + +struct ABuffer; +struct AMessage; + +namespace android { + +struct SimpleFilter : public RefBase { +public: +    SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0), +            mColorFormatIn(0), mColorFormatOut(0) {}; + +    virtual status_t configure(const sp<AMessage> &msg); + +    virtual status_t start() = 0; +    virtual void reset() = 0; +    virtual status_t setParameters(const sp<AMessage> &msg) = 0; +    virtual status_t processBuffers( +            const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) = 0; + +protected: +    int32_t mWidth, mHeight; +    int32_t mStride, mSliceHeight; +    int32_t mColorFormatIn, mColorFormatOut; + +    virtual ~SimpleFilter() {}; +}; + +}   // namespace android + +#endif  // SIMPLE_FILTER_H_ diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp new file mode 100644 index 0000000..3f1243c --- /dev/null +++ b/media/libstagefright/filters/ZeroFilter.cpp @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ZeroFilter" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "ZeroFilter.h" + +namespace android { + +status_t ZeroFilter::setParameters(const sp<AMessage> &msg) { +    sp<AMessage> params; +    CHECK(msg->findMessage("params", ¶ms)); + +    int32_t invert; +    if (params->findInt32("invert", &invert)) { +        mInvertData = (invert != 0); +    } + +    return OK; +} + +status_t ZeroFilter::processBuffers( +        const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { +    // assuming identical input & output buffers, since we're a copy filter +    if (mInvertData) { +        uint32_t* src = (uint32_t*)srcBuffer->data(); +        uint32_t* dest = (uint32_t*)outBuffer->data(); +        for (size_t i = 0; i < srcBuffer->size() / 4; ++i) { +            *(dest++) = *(src++) ^ 0xFFFFFFFF; +        } +    } else { +        memcpy(outBuffer->data(), srcBuffer->data(), srcBuffer->size()); +    } +    outBuffer->setRange(0, srcBuffer->size()); + +    return OK; +} + +}   // namespace android diff --git a/media/libstagefright/filters/ZeroFilter.h b/media/libstagefright/filters/ZeroFilter.h new file mode 100644 index 0000000..bd34dfb --- /dev/null +++ b/media/libstagefright/filters/ZeroFilter.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ZERO_FILTER_H_ +#define ZERO_FILTER_H_ + +#include "SimpleFilter.h" + +namespace android { + +struct ZeroFilter : public SimpleFilter { +public: +    ZeroFilter() : mInvertData(false) {}; + +    virtual status_t start() { return OK; }; +    virtual void reset() {}; +    virtual status_t setParameters(const sp<AMessage> &msg); +    virtual status_t processBuffers( +            const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: +    virtual ~ZeroFilter() {}; + +private: +    bool mInvertData; +}; + +}   // namespace android + +#endif  // ZERO_FILTER_H_ diff --git a/media/libstagefright/filters/saturation.rs b/media/libstagefright/filters/saturation.rs new file mode 100644 index 0000000..2c867ac --- /dev/null +++ b/media/libstagefright/filters/saturation.rs @@ -0,0 +1,40 @@ +// Sample script for RGB888 support (compare to saturationARGB.rs) +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed + +const static float3 gMonoMult = {0.299f, 0.587f, 0.114f}; + +// global variables (parameters accessible to application code) +float gSaturation = 1.0f; + +void root(const uchar3 *v_in, uchar3 *v_out) { +    // scale 0-255 uchar to 0-1.0 float +    float3 in = {v_in->r * 0.003921569f, v_in->g * 0.003921569f, +            v_in->b * 0.003921569f}; + +    // apply saturation filter +    float3 result = dot(in, gMonoMult); +    result = mix(result, in, gSaturation); + +    // convert to uchar, copied from rsPackColorTo8888 +    v_out->x = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f); +    v_out->y = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f); +    v_out->z = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f); +} diff --git a/media/libstagefright/filters/saturationARGB.rs b/media/libstagefright/filters/saturationARGB.rs new file mode 100644 index 0000000..1de9dd8 --- /dev/null +++ b/media/libstagefright/filters/saturationARGB.rs @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed + +const static float3 gMonoMult = {0.299f, 0.587f, 0.114f}; + +// global variables (parameters accessible to application code) +float gSaturation = 1.0f; + +void root(const uchar4 *v_in, uchar4 *v_out) { +    v_out->x = v_in->x; // don't modify A + +    // get RGB, scale 0-255 uchar to 0-1.0 float +    float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f, +            v_in->w * 0.003921569f}; + +    // apply saturation filter +    float3 result = dot(rgb, gMonoMult); +    result = mix(result, rgb, gSaturation); + +    v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f); +    v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f); +    v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f); +} diff --git a/media/libstagefright/foundation/ABuffer.cpp b/media/libstagefright/foundation/ABuffer.cpp index 4913fd4..a5b81a8 100644 --- a/media/libstagefright/foundation/ABuffer.cpp +++ b/media/libstagefright/foundation/ABuffer.cpp @@ -52,6 +52,9 @@ ABuffer::ABuffer(void *data, size_t capacity)  sp<ABuffer> ABuffer::CreateAsCopy(const void *data, size_t capacity)  {      sp<ABuffer> res = new ABuffer(capacity); +    if (res->base() == NULL) { +        return NULL; +    }      memcpy(res->data(), data, capacity);      return res;  } diff --git a/media/libstagefright/foundation/ADebug.cpp b/media/libstagefright/foundation/ADebug.cpp index ec4a960..2c5f544 100644 --- a/media/libstagefright/foundation/ADebug.cpp +++ b/media/libstagefright/foundation/ADebug.cpp @@ -19,6 +19,7 @@  #include <ctype.h>  #define LOG_TAG "ADebug" +#include <cutils/atomic.h>  #include <utils/Log.h>  #include <utils/misc.h> @@ -28,14 +29,15 @@  #include <AStringUtils.h>  #include <AUtils.h> +#define UNUSED(x) ((void)(x)) +  namespace android {  //static -ADebug::Level ADebug::GetDebugLevelFromString( -        const char *name, const char *value, ADebug::Level def) { +long ADebug::GetLevelFromSettingsString( +        const char *name, const char *value, long def) {      // split on ,      const char *next = value, *current; -    const unsigned long maxLevel = (unsigned long)kDebugMax;      while (next != NULL) {          current = next;          next = strchr(current, ','); @@ -51,8 +53,8 @@ ADebug::Level ADebug::GetDebugLevelFromString(          // get level          char *end; -        errno = 0;  // strtoul does not clear errno, but it can be set for any return value -        unsigned long level = strtoul(current, &end, 10); +        errno = 0;  // strtol does not clear errno, but it can be set for any return value +        long level = strtol(current, &end, 10);          while (isspace(*end)) {              ++end;          } @@ -76,23 +78,30 @@ ADebug::Level ADebug::GetDebugLevelFromString(              }          } -        // update debug level -        def = (Level)min(level, maxLevel); +        // update value +        def = level;      }      return def;  }  //static -ADebug::Level ADebug::GetDebugLevelFromProperty( -        const char *name, const char *propertyName, ADebug::Level def) { +long ADebug::GetLevelFromProperty( +        const char *name, const char *propertyName, long def) {      char value[PROPERTY_VALUE_MAX];      if (property_get(propertyName, value, NULL)) { -        return GetDebugLevelFromString(name, value, def); +        def = GetLevelFromSettingsString(name, value, def);      }      return def;  }  //static +ADebug::Level ADebug::GetDebugLevelFromProperty( +        const char *name, const char *propertyName, ADebug::Level def) { +    long level = GetLevelFromProperty(name, propertyName, (long)def); +    return (Level)min(max(level, (long)kDebugNone), (long)kDebugMax); +} + +//static  char *ADebug::GetDebugName(const char *name) {      char *debugName = strdup(name);      const char *terms[] = { "omx", "video", "audio" }; @@ -113,5 +122,69 @@ char *ADebug::GetDebugName(const char *name) {      return debugName;  } +//static +bool ADebug::getExperimentFlag( +        bool allow, const char *name, uint64_t modulo, +        uint64_t limit, uint64_t plus, uint64_t timeDivisor) { +    // see if this experiment should be disabled/enabled based on properties. +    // default to 2 to allow 0/1 specification +    const int undefined = 2; +    long level = GetLevelFromProperty(name, "debug.stagefright.experiments", undefined); +    if (level != undefined) { +        ALOGI("experiment '%s': %s from property", name, level ? "ENABLED" : "disabled"); +        return allow && (level != 0); +    } + +#ifndef ENABLE_STAGEFRIGHT_AUTO_EXPERIMENTS +    UNUSED(modulo); +    UNUSED(limit); +    UNUSED(plus); +    UNUSED(timeDivisor); +    return false; +#else +    // Disable automatic experiments in "non-experimental" builds (that is, _all_ builds +    // as there is no "experimental" build). +    // TODO: change build type to enable automatic experiments in the future for some builds +    char value[PROPERTY_VALUE_MAX]; +    if (property_get("ro.build.type", value, NULL)) { +        if (strcmp(value, "experimental")) { +            return false; +        } +    } + +    static volatile int32_t haveSerial = 0; +    static uint64_t serialNum; +    if (!android_atomic_acquire_load(&haveSerial)) { +        // calculate initial counter value based on serial number +        static char serial[PROPERTY_VALUE_MAX]; +        property_get("ro.serialno", serial, "0"); +        uint64_t num = 0; // it is okay for this number to overflow +        for (size_t i = 0; i < NELEM(serial) && serial[i] != '\0'; ++i) { +            const char &c = serial[i]; +            // try to use most letters of serialno +            if (isdigit(c)) { +                num = num * 10 + (c - '0'); +            } else if (islower(c)) { +                num = num * 26 + (c - 'a'); +            } else if (isupper(c)) { +                num = num * 26 + (c - 'A'); +            } else { +                num = num * 256 + c; +            } +        } +        serialNum = num; +        android_atomic_release_store(1, &haveSerial); +    } +    ALOGD("serial: %llu, time: %lld", (long long unsigned)serialNum, (long long)time(NULL)); +    // MINOR: use modulo for counter and time, so that their sum does not +    // roll over, and mess up the correlation between related experiments. +    // e.g. keep (a mod 2N) = 0 impl (a mod N) = 0 +    time_t counter = (time(NULL) / timeDivisor) % modulo + plus + serialNum % modulo; +    bool enable = allow && (counter % modulo < limit); +    ALOGI("experiment '%s': %s", name, enable ? "ENABLED" : "disabled"); +    return enable; +#endif +} +  }  // namespace android diff --git a/media/libstagefright/foundation/AHandler.cpp b/media/libstagefright/foundation/AHandler.cpp index bd5f7e9..7dbbe54 100644 --- a/media/libstagefright/foundation/AHandler.cpp +++ b/media/libstagefright/foundation/AHandler.cpp @@ -19,15 +19,23 @@  #include <utils/Log.h>  #include <media/stagefright/foundation/AHandler.h> - -#include <media/stagefright/foundation/ALooperRoster.h> +#include <media/stagefright/foundation/AMessage.h>  namespace android { -sp<ALooper> AHandler::looper() { -    extern ALooperRoster gLooperRoster; +void AHandler::deliverMessage(const sp<AMessage> &msg) { +    onMessageReceived(msg); +    mMessageCounter++; -    return gLooperRoster.findLooper(id()); +    if (mVerboseStats) { +        uint32_t what = msg->what(); +        ssize_t idx = mMessages.indexOfKey(what); +        if (idx < 0) { +            mMessages.add(what, 1); +        } else { +            mMessages.editValueAt(idx)++; +        } +    }  }  }  // namespace android diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp index 5f7c70d..b837f66 100644 --- a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp +++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp @@ -109,7 +109,8 @@ void AHierarchicalStateMachine::changeState(const sp<AState> &state) {          A.editItemAt(i)->stateExited();      } -    for (size_t i = B.size(); i-- > 0;) { +    for (size_t i = B.size(); i > 0;) { +        i--;          B.editItemAt(i)->stateEntered();      }  } diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp index 88b1c92..90b5f68 100644 --- a/media/libstagefright/foundation/ALooper.cpp +++ b/media/libstagefright/foundation/ALooper.cpp @@ -16,6 +16,9 @@  //#define LOG_NDEBUG 0  #define LOG_TAG "ALooper" + +#include <media/stagefright/foundation/ADebug.h> +  #include <utils/Log.h>  #include <sys/time.h> @@ -210,7 +213,7 @@ bool ALooper::loop() {          mEventQueue.erase(mEventQueue.begin());      } -    gLooperRoster.deliverMessage(event.mMessage); +    event.mMessage->deliver();      // NOTE: It's important to note that at this point our "ALooper" object      // may no longer exist (its final reference may have gone away while @@ -220,4 +223,29 @@ bool ALooper::loop() {      return true;  } +// to be called by AMessage::postAndAwaitResponse only +sp<AReplyToken> ALooper::createReplyToken() { +    return new AReplyToken(this); +} + +// to be called by AMessage::postAndAwaitResponse only +status_t ALooper::awaitResponse(const sp<AReplyToken> &replyToken, sp<AMessage> *response) { +    // return status in case we want to handle an interrupted wait +    Mutex::Autolock autoLock(mRepliesLock); +    CHECK(replyToken != NULL); +    while (!replyToken->retrieveReply(response)) { +        mRepliesCondition.wait(mRepliesLock); +    } +    return OK; +} + +status_t ALooper::postReply(const sp<AReplyToken> &replyToken, const sp<AMessage> &reply) { +    Mutex::Autolock autoLock(mRepliesLock); +    status_t err = replyToken->setReply(reply); +    if (err == OK) { +        mRepliesCondition.broadcast(); +    } +    return err; +} +  }  // namespace android diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp index 2d57aee..9ed53e7 100644 --- a/media/libstagefright/foundation/ALooperRoster.cpp +++ b/media/libstagefright/foundation/ALooperRoster.cpp @@ -30,8 +30,7 @@ namespace android {  static bool verboseStats = false;  ALooperRoster::ALooperRoster() -    : mNextHandlerID(1), -      mNextReplyID(1) { +    : mNextHandlerID(1) {  }  ALooper::handler_id ALooperRoster::registerHandler( @@ -49,7 +48,7 @@ ALooper::handler_id ALooperRoster::registerHandler(      ALooper::handler_id handlerID = mNextHandlerID++;      mHandlers.add(handlerID, info); -    handler->setID(handlerID); +    handler->setID(handlerID, looper);      return handlerID;  } @@ -68,7 +67,7 @@ void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) {      sp<AHandler> handler = info.mHandler.promote();      if (handler != NULL) { -        handler->setID(0); +        handler->setID(0, NULL);      }      mHandlers.removeItemsAt(index); @@ -80,7 +79,8 @@ void ALooperRoster::unregisterStaleHandlers() {      {          Mutex::Autolock autoLock(mLock); -        for (size_t i = mHandlers.size(); i-- > 0;) { +        for (size_t i = mHandlers.size(); i > 0;) { +            i--;              const HandlerInfo &info = mHandlers.valueAt(i);              sp<ALooper> looper = info.mLooper.promote(); @@ -100,116 +100,6 @@ void ALooperRoster::unregisterStaleHandlers() {      }  } -status_t ALooperRoster::postMessage( -        const sp<AMessage> &msg, int64_t delayUs) { - -    sp<ALooper> looper = findLooper(msg->target()); - -    if (looper == NULL) { -        return -ENOENT; -    } -    looper->post(msg, delayUs); -    return OK; -} - -void ALooperRoster::deliverMessage(const sp<AMessage> &msg) { -    sp<AHandler> handler; - -    { -        Mutex::Autolock autoLock(mLock); - -        ssize_t index = mHandlers.indexOfKey(msg->target()); - -        if (index < 0) { -            ALOGW("failed to deliver message. Target handler not registered."); -            return; -        } - -        const HandlerInfo &info = mHandlers.valueAt(index); -        handler = info.mHandler.promote(); - -        if (handler == NULL) { -            ALOGW("failed to deliver message. " -                 "Target handler %d registered, but object gone.", -                 msg->target()); - -            mHandlers.removeItemsAt(index); -            return; -        } -    } - -    handler->onMessageReceived(msg); -    handler->mMessageCounter++; - -    if (verboseStats) { -        uint32_t what = msg->what(); -        ssize_t idx = handler->mMessages.indexOfKey(what); -        if (idx < 0) { -            handler->mMessages.add(what, 1); -        } else { -            handler->mMessages.editValueAt(idx)++; -        } -    } -} - -sp<ALooper> ALooperRoster::findLooper(ALooper::handler_id handlerID) { -    Mutex::Autolock autoLock(mLock); - -    ssize_t index = mHandlers.indexOfKey(handlerID); - -    if (index < 0) { -        return NULL; -    } - -    sp<ALooper> looper = mHandlers.valueAt(index).mLooper.promote(); - -    if (looper == NULL) { -        mHandlers.removeItemsAt(index); -        return NULL; -    } - -    return looper; -} - -status_t ALooperRoster::postAndAwaitResponse( -        const sp<AMessage> &msg, sp<AMessage> *response) { -    sp<ALooper> looper = findLooper(msg->target()); - -    if (looper == NULL) { -        ALOGW("failed to post message. " -                "Target handler %d still registered, but object gone.", -                msg->target()); -        response->clear(); -        return -ENOENT; -    } - -    Mutex::Autolock autoLock(mLock); - -    uint32_t replyID = mNextReplyID++; - -    msg->setInt32("replyID", replyID); - -    looper->post(msg, 0 /* delayUs */); - -    ssize_t index; -    while ((index = mReplies.indexOfKey(replyID)) < 0) { -        mRepliesCondition.wait(mLock); -    } - -    *response = mReplies.valueAt(index); -    mReplies.removeItemsAt(index); - -    return OK; -} - -void ALooperRoster::postReply(uint32_t replyID, const sp<AMessage> &reply) { -    Mutex::Autolock autoLock(mLock); - -    CHECK(mReplies.indexOfKey(replyID) < 0); -    mReplies.add(replyID, reply); -    mRepliesCondition.broadcast(); -} -  static void makeFourCC(uint32_t fourcc, char *s) {      s[0] = (fourcc >> 24) & 0xff;      if (s[0]) { @@ -225,7 +115,7 @@ static void makeFourCC(uint32_t fourcc, char *s) {  void ALooperRoster::dump(int fd, const Vector<String16>& args) {      bool clear = false;      bool oldVerbose = verboseStats; -    for (size_t i = 0;i < args.size(); i++) { +    for (size_t i = 0; i < args.size(); i++) {          if (args[i] == String16("-c")) {              clear = true;          } else if (args[i] == String16("-von")) { @@ -241,22 +131,23 @@ void ALooperRoster::dump(int fd, const Vector<String16>& args) {      Mutex::Autolock autoLock(mLock);      size_t n = mHandlers.size(); -    s.appendFormat(" %zd registered handlers:\n", n); +    s.appendFormat(" %zu registered handlers:\n", n);      for (size_t i = 0; i < n; i++) { -        s.appendFormat("  %zd: ", i); +        s.appendFormat("  %d: ", mHandlers.keyAt(i));          HandlerInfo &info = mHandlers.editValueAt(i);          sp<ALooper> looper = info.mLooper.promote();          if (looper != NULL) { -            s.append(looper->mName.c_str()); +            s.append(looper->getName());              sp<AHandler> handler = info.mHandler.promote();              if (handler != NULL) { +                handler->mVerboseStats = verboseStats;                  s.appendFormat(": %u messages processed", handler->mMessageCounter);                  if (verboseStats) {                      for (size_t j = 0; j < handler->mMessages.size(); j++) {                          char fourcc[15];                          makeFourCC(handler->mMessages.keyAt(j), fourcc); -                        s.appendFormat("\n    %s: %d", +                        s.appendFormat("\n    %s: %u",                                  fourcc,                                  handler->mMessages.valueAt(j));                      } diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp index 1f46bc9..e549ff6 100644 --- a/media/libstagefright/foundation/AMessage.cpp +++ b/media/libstagefright/foundation/AMessage.cpp @@ -27,6 +27,7 @@  #include "ABuffer.h"  #include "ADebug.h"  #include "ALooperRoster.h" +#include "AHandler.h"  #include "AString.h"  #include <binder/Parcel.h> @@ -36,10 +37,27 @@ namespace android {  extern ALooperRoster gLooperRoster; -AMessage::AMessage(uint32_t what, ALooper::handler_id target) +status_t AReplyToken::setReply(const sp<AMessage> &reply) { +    if (mReplied) { +        ALOGE("trying to post a duplicate reply"); +        return -EBUSY; +    } +    CHECK(mReply == NULL); +    mReply = reply; +    mReplied = true; +    return OK; +} + +AMessage::AMessage(void) +    : mWhat(0), +      mTarget(0), +      mNumItems(0) { +} + +AMessage::AMessage(uint32_t what, const sp<const AHandler> &handler)      : mWhat(what), -      mTarget(target),        mNumItems(0) { +    setTarget(handler);  }  AMessage::~AMessage() { @@ -54,12 +72,16 @@ uint32_t AMessage::what() const {      return mWhat;  } -void AMessage::setTarget(ALooper::handler_id handlerID) { -    mTarget = handlerID; -} - -ALooper::handler_id AMessage::target() const { -    return mTarget; +void AMessage::setTarget(const sp<const AHandler> &handler) { +    if (handler == NULL) { +        mTarget = 0; +        mHandler.clear(); +        mLooper.clear(); +    } else { +        mTarget = handler->id(); +        mHandler = handler->getHandler(); +        mLooper = handler->getLooper(); +    }  }  void AMessage::clear() { @@ -322,33 +344,76 @@ bool AMessage::findRect(      return true;  } -void AMessage::post(int64_t delayUs) { -    gLooperRoster.postMessage(this, delayUs); +void AMessage::deliver() { +    sp<AHandler> handler = mHandler.promote(); +    if (handler == NULL) { +        ALOGW("failed to deliver message as target handler %d is gone.", mTarget); +        return; +    } + +    handler->deliverMessage(this); +} + +status_t AMessage::post(int64_t delayUs) { +    sp<ALooper> looper = mLooper.promote(); +    if (looper == NULL) { +        ALOGW("failed to post message as target looper for handler %d is gone.", mTarget); +        return -ENOENT; +    } + +    looper->post(this, delayUs); +    return OK;  }  status_t AMessage::postAndAwaitResponse(sp<AMessage> *response) { -    return gLooperRoster.postAndAwaitResponse(this, response); +    sp<ALooper> looper = mLooper.promote(); +    if (looper == NULL) { +        ALOGW("failed to post message as target looper for handler %d is gone.", mTarget); +        return -ENOENT; +    } + +    sp<AReplyToken> token = looper->createReplyToken(); +    if (token == NULL) { +        ALOGE("failed to create reply token"); +        return -ENOMEM; +    } +    setObject("replyID", token); + +    looper->post(this, 0 /* delayUs */); +    return looper->awaitResponse(token, response);  } -void AMessage::postReply(uint32_t replyID) { -    gLooperRoster.postReply(replyID, this); +status_t AMessage::postReply(const sp<AReplyToken> &replyToken) { +    if (replyToken == NULL) { +        ALOGW("failed to post reply to a NULL token"); +        return -ENOENT; +    } +    sp<ALooper> looper = replyToken->getLooper(); +    if (looper == NULL) { +        ALOGW("failed to post reply as target looper is gone."); +        return -ENOENT; +    } +    return looper->postReply(replyToken, this);  } -bool AMessage::senderAwaitsResponse(uint32_t *replyID) const { -    int32_t tmp; -    bool found = findInt32("replyID", &tmp); +bool AMessage::senderAwaitsResponse(sp<AReplyToken> *replyToken) { +    sp<RefBase> tmp; +    bool found = findObject("replyID", &tmp);      if (!found) {          return false;      } -    *replyID = static_cast<uint32_t>(tmp); +    *replyToken = static_cast<AReplyToken *>(tmp.get()); +    tmp.clear(); +    setObject("replyID", tmp); +    // TODO: delete Object instead of setting it to NULL -    return true; +    return *replyToken != NULL;  }  sp<AMessage> AMessage::dup() const { -    sp<AMessage> msg = new AMessage(mWhat, mTarget); +    sp<AMessage> msg = new AMessage(mWhat, mHandler.promote());      msg->mNumItems = mNumItems;  #ifdef DUMP_STATS @@ -532,7 +597,8 @@ AString AMessage::debugString(int32_t indent) const {  // static  sp<AMessage> AMessage::FromParcel(const Parcel &parcel) {      int32_t what = parcel.readInt32(); -    sp<AMessage> msg = new AMessage(what); +    sp<AMessage> msg = new AMessage(); +    msg->setWhat(what);      msg->mNumItems = static_cast<size_t>(parcel.readInt32());      for (size_t i = 0; i < msg->mNumItems; ++i) { diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk index 08355c7..c68264c 100644 --- a/media/libstagefright/foundation/Android.mk +++ b/media/libstagefright/foundation/Android.mk @@ -29,7 +29,8 @@ LOCAL_SHARED_LIBRARIES := \          liblog            \          libpowermanager -LOCAL_CFLAGS += -Wno-multichar -Werror +LOCAL_CFLAGS += -Wno-multichar -Werror -Wall +LOCAL_CLANG := true  LOCAL_MODULE:= libstagefright_foundation diff --git a/media/libstagefright/http/Android.mk b/media/libstagefright/http/Android.mk index 7f3307d..5fb51c1 100644 --- a/media/libstagefright/http/Android.mk +++ b/media/libstagefright/http/Android.mk @@ -21,7 +21,8 @@ LOCAL_MODULE:= libstagefright_http_support  LOCAL_CFLAGS += -Wno-multichar -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp index bb89567..801ff26 100644 --- a/media/libstagefright/http/MediaHTTP.cpp +++ b/media/libstagefright/http/MediaHTTP.cpp @@ -30,12 +30,11 @@  namespace android {  MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn) -    : mInitCheck(NO_INIT), +    : mInitCheck((conn != NULL) ? OK : NO_INIT),        mHTTPConnection(conn),        mCachedSizeValid(false),        mCachedSize(0ll),        mDrmManagerClient(NULL) { -    mInitCheck = OK;  }  MediaHTTP::~MediaHTTP() { @@ -54,7 +53,10 @@ status_t MediaHTTP::connect(      if (headers != NULL) {          extHeaders = *headers;      } -    extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str())); + +    if (extHeaders.indexOfKey(String8("User-Agent")) < 0) { +        extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str())); +    }      bool success = mHTTPConnection->connect(uri, &extHeaders); @@ -171,6 +173,10 @@ void MediaHTTP::getDrmInfo(  }  String8 MediaHTTP::getUri() { +    if (mInitCheck != OK) { +        return String8::empty(); +    } +      String8 uri;      if (OK == mHTTPConnection->getUri(&uri)) {          return uri; diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk index 93b7935..fc85835 100644 --- a/media/libstagefright/httplive/Android.mk +++ b/media/libstagefright/httplive/Android.mk @@ -3,6 +3,7 @@ LOCAL_PATH:= $(call my-dir)  include $(CLEAR_VARS)  LOCAL_SRC_FILES:=               \ +        HTTPDownloader.cpp      \          LiveDataSource.cpp      \          LiveSession.cpp         \          M3UParser.cpp           \ @@ -12,7 +13,8 @@ LOCAL_C_INCLUDES:= \  	$(TOP)/frameworks/av/media/libstagefright \  	$(TOP)/frameworks/native/include/media/openmax -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_SHARED_LIBRARIES := \          libbinder \ diff --git a/media/libstagefright/httplive/HTTPDownloader.cpp b/media/libstagefright/httplive/HTTPDownloader.cpp new file mode 100644 index 0000000..3b44bae --- /dev/null +++ b/media/libstagefright/httplive/HTTPDownloader.cpp @@ -0,0 +1,273 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "HTTPDownloader" +#include <utils/Log.h> + +#include "HTTPDownloader.h" +#include "M3UParser.h" + +#include <media/IMediaHTTPConnection.h> +#include <media/IMediaHTTPService.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaHTTP.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/FileSource.h> +#include <openssl/aes.h> +#include <openssl/md5.h> +#include <utils/Mutex.h> + +namespace android { + +HTTPDownloader::HTTPDownloader( +        const sp<IMediaHTTPService> &httpService, +        const KeyedVector<String8, String8> &headers) : +    mHTTPDataSource(new MediaHTTP(httpService->makeHTTPConnection())), +    mExtraHeaders(headers), +    mDisconnecting(false) { +} + +void HTTPDownloader::reconnect() { +    AutoMutex _l(mLock); +    mDisconnecting = false; +} + +void HTTPDownloader::disconnect() { +    { +        AutoMutex _l(mLock); +        mDisconnecting = true; +    } +    mHTTPDataSource->disconnect(); +} + +bool HTTPDownloader::isDisconnecting() { +    AutoMutex _l(mLock); +    return mDisconnecting; +} + +/* + * Illustration of parameters: + * + * 0      `range_offset` + * +------------+-------------------------------------------------------+--+--+ + * |            |                                 | next block to fetch |  |  | + * |            | `source` handle => `out` buffer |                     |  |  | + * | `url` file |<--------- buffer size --------->|<--- `block_size` -->|  |  | + * |            |<----------- `range_length` / buffer capacity ----------->|  | + * |<------------------------------ file_size ------------------------------->| + * + * Special parameter values: + * - range_length == -1 means entire file + * - block_size == 0 means entire range + * + */ +ssize_t HTTPDownloader::fetchBlock( +        const char *url, sp<ABuffer> *out, +        int64_t range_offset, int64_t range_length, +        uint32_t block_size, /* download block size */ +        String8 *actualUrl, +        bool reconnect /* force connect HTTP when resuing source */) { +    if (isDisconnecting()) { +        return ERROR_NOT_CONNECTED; +    } + +    off64_t size; + +    if (reconnect) { +        if (!strncasecmp(url, "file://", 7)) { +            mDataSource = new FileSource(url + 7); +        } else if (strncasecmp(url, "http://", 7) +                && strncasecmp(url, "https://", 8)) { +            return ERROR_UNSUPPORTED; +        } else { +            KeyedVector<String8, String8> headers = mExtraHeaders; +            if (range_offset > 0 || range_length >= 0) { +                headers.add( +                        String8("Range"), +                        String8( +                            AStringPrintf( +                                "bytes=%lld-%s", +                                range_offset, +                                range_length < 0 +                                    ? "" : AStringPrintf("%lld", +                                            range_offset + range_length - 1).c_str()).c_str())); +            } + +            status_t err = mHTTPDataSource->connect(url, &headers); + +            if (isDisconnecting()) { +                return ERROR_NOT_CONNECTED; +            } + +            if (err != OK) { +                return err; +            } + +            mDataSource = mHTTPDataSource; +        } +    } + +    status_t getSizeErr = mDataSource->getSize(&size); + +    if (isDisconnecting()) { +        return ERROR_NOT_CONNECTED; +    } + +    if (getSizeErr != OK) { +        size = 65536; +    } + +    sp<ABuffer> buffer = *out != NULL ? *out : new ABuffer(size); +    if (*out == NULL) { +        buffer->setRange(0, 0); +    } + +    ssize_t bytesRead = 0; +    // adjust range_length if only reading partial block +    if (block_size > 0 && (range_length == -1 || (int64_t)(buffer->size() + block_size) < range_length)) { +        range_length = buffer->size() + block_size; +    } +    for (;;) { +        // Only resize when we don't know the size. +        size_t bufferRemaining = buffer->capacity() - buffer->size(); +        if (bufferRemaining == 0 && getSizeErr != OK) { +            size_t bufferIncrement = buffer->size() / 2; +            if (bufferIncrement < 32768) { +                bufferIncrement = 32768; +            } +            bufferRemaining = bufferIncrement; + +            ALOGV("increasing download buffer to %zu bytes", +                 buffer->size() + bufferRemaining); + +            sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining); +            memcpy(copy->data(), buffer->data(), buffer->size()); +            copy->setRange(0, buffer->size()); + +            buffer = copy; +        } + +        size_t maxBytesToRead = bufferRemaining; +        if (range_length >= 0) { +            int64_t bytesLeftInRange = range_length - buffer->size(); +            if (bytesLeftInRange < (int64_t)maxBytesToRead) { +                maxBytesToRead = bytesLeftInRange; + +                if (bytesLeftInRange == 0) { +                    break; +                } +            } +        } + +        // The DataSource is responsible for informing us of error (n < 0) or eof (n == 0) +        // to help us break out of the loop. +        ssize_t n = mDataSource->readAt( +                buffer->size(), buffer->data() + buffer->size(), +                maxBytesToRead); + +        if (isDisconnecting()) { +            return ERROR_NOT_CONNECTED; +        } + +        if (n < 0) { +            return n; +        } + +        if (n == 0) { +            break; +        } + +        buffer->setRange(0, buffer->size() + (size_t)n); +        bytesRead += n; +    } + +    *out = buffer; +    if (actualUrl != NULL) { +        *actualUrl = mDataSource->getUri(); +        if (actualUrl->isEmpty()) { +            *actualUrl = url; +        } +    } + +    return bytesRead; +} + +ssize_t HTTPDownloader::fetchFile( +        const char *url, sp<ABuffer> *out, String8 *actualUrl) { +    ssize_t err = fetchBlock(url, out, 0, -1, 0, actualUrl, true /* reconnect */); + +    // close off the connection after use +    mHTTPDataSource->disconnect(); + +    return err; +} + +sp<M3UParser> HTTPDownloader::fetchPlaylist( +        const char *url, uint8_t *curPlaylistHash, bool *unchanged) { +    ALOGV("fetchPlaylist '%s'", url); + +    *unchanged = false; + +    sp<ABuffer> buffer; +    String8 actualUrl; +    ssize_t err = fetchFile(url, &buffer, &actualUrl); + +    // close off the connection after use +    mHTTPDataSource->disconnect(); + +    if (err <= 0) { +        return NULL; +    } + +    // MD5 functionality is not available on the simulator, treat all +    // playlists as changed. + +#if defined(HAVE_ANDROID_OS) +    uint8_t hash[16]; + +    MD5_CTX m; +    MD5_Init(&m); +    MD5_Update(&m, buffer->data(), buffer->size()); + +    MD5_Final(hash, &m); + +    if (curPlaylistHash != NULL && !memcmp(hash, curPlaylistHash, 16)) { +        // playlist unchanged +        *unchanged = true; + +        return NULL; +    } + +    if (curPlaylistHash != NULL) { +        memcpy(curPlaylistHash, hash, sizeof(hash)); +    } +#endif + +    sp<M3UParser> playlist = +        new M3UParser(actualUrl.string(), buffer->data(), buffer->size()); + +    if (playlist->initCheck() != OK) { +        ALOGE("failed to parse .m3u8 playlist"); + +        return NULL; +    } + +    return playlist; +} + +}  // namespace android diff --git a/media/libstagefright/httplive/HTTPDownloader.h b/media/libstagefright/httplive/HTTPDownloader.h new file mode 100644 index 0000000..1db4a48 --- /dev/null +++ b/media/libstagefright/httplive/HTTPDownloader.h @@ -0,0 +1,86 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HTTP_DOWNLOADER_H_ + +#define HTTP_DOWNLOADER_H_ + +#include <media/stagefright/foundation/ADebug.h> +#include <utils/KeyedVector.h> +#include <utils/Mutex.h> +#include <utils/RefBase.h> + +namespace android { + +struct ABuffer; +class DataSource; +struct HTTPBase; +struct IMediaHTTPService; +struct M3UParser; + +struct HTTPDownloader : public RefBase { +    HTTPDownloader( +            const sp<IMediaHTTPService> &httpService, +            const KeyedVector<String8, String8> &headers); + +    void reconnect(); +    void disconnect(); +    bool isDisconnecting(); +    // If given a non-zero block_size (default 0), it is used to cap the number of +    // bytes read in from the DataSource. If given a non-NULL buffer, new content +    // is read into the end. +    // +    // The DataSource we read from is responsible for signaling error or EOF to help us +    // break out of the read loop. The DataSource can be returned to the caller, so +    // that the caller can reuse it for subsequent fetches (within the initially +    // requested range). +    // +    // For reused HTTP sources, the caller must download a file sequentially without +    // any overlaps or gaps to prevent reconnection. +    ssize_t fetchBlock( +            const char *url, +            sp<ABuffer> *out, +            int64_t range_offset, /* open file at range_offset */ +            int64_t range_length, /* open file for range_length (-1: entire file) */ +            uint32_t block_size,  /* download block size (0: entire range) */ +            String8 *actualUrl,   /* returns actual URL */ +            bool reconnect        /* force connect http */ +            ); + +    // simplified version to fetch a single file +    ssize_t fetchFile( +            const char *url, +            sp<ABuffer> *out, +            String8 *actualUrl = NULL); + +    // fetch a playlist file +    sp<M3UParser> fetchPlaylist( +            const char *url, uint8_t *curPlaylistHash, bool *unchanged); + +private: +    sp<HTTPBase> mHTTPDataSource; +    sp<DataSource> mDataSource; +    KeyedVector<String8, String8> mExtraHeaders; + +    Mutex mLock; +    bool mDisconnecting; + +    DISALLOW_EVIL_CONSTRUCTORS(HTTPDownloader); +}; + +}  // namespace android + +#endif  // HTTP_DOWNLOADER_H_ diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index d0f3bc2..1557401 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -19,24 +19,19 @@  #include <utils/Log.h>  #include "LiveSession.h" - +#include "HTTPDownloader.h"  #include "M3UParser.h"  #include "PlaylistFetcher.h" -#include "include/HTTPBase.h"  #include "mpeg2ts/AnotherPacketSource.h"  #include <cutils/properties.h> -#include <media/IMediaHTTPConnection.h>  #include <media/IMediaHTTPService.h> -#include <media/stagefright/foundation/hexdump.h>  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AMessage.h> -#include <media/stagefright/DataSource.h> -#include <media/stagefright/FileSource.h> -#include <media/stagefright/MediaErrors.h> -#include <media/stagefright/MediaHTTP.h> +#include <media/stagefright/foundation/AUtils.h> +#include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MetaData.h>  #include <media/stagefright/Utils.h> @@ -44,13 +39,239 @@  #include <ctype.h>  #include <inttypes.h> -#include <openssl/aes.h> -#include <openssl/md5.h>  namespace android { -// Number of recently-read bytes to use for bandwidth estimation -const size_t LiveSession::kBandwidthHistoryBytes = 200 * 1024; +// static +// Bandwidth Switch Mark Defaults +const int64_t LiveSession::kUpSwitchMarkUs = 15000000ll; +const int64_t LiveSession::kDownSwitchMarkUs = 20000000ll; +const int64_t LiveSession::kUpSwitchMarginUs = 5000000ll; +const int64_t LiveSession::kResumeThresholdUs = 100000ll; + +// Buffer Prepare/Ready/Underflow Marks +const int64_t LiveSession::kReadyMarkUs = 5000000ll; +const int64_t LiveSession::kPrepareMarkUs = 1500000ll; +const int64_t LiveSession::kUnderflowMarkUs = 1000000ll; + +struct LiveSession::BandwidthEstimator : public RefBase { +    BandwidthEstimator(); + +    void addBandwidthMeasurement(size_t numBytes, int64_t delayUs); +    bool estimateBandwidth( +            int32_t *bandwidth, +            bool *isStable = NULL, +            int32_t *shortTermBps = NULL); + +private: +    // Bandwidth estimation parameters +    static const int32_t kShortTermBandwidthItems = 3; +    static const int32_t kMinBandwidthHistoryItems = 20; +    static const int64_t kMinBandwidthHistoryWindowUs = 5000000ll; // 5 sec +    static const int64_t kMaxBandwidthHistoryWindowUs = 30000000ll; // 30 sec +    static const int64_t kMaxBandwidthHistoryAgeUs = 60000000ll; // 60 sec + +    struct BandwidthEntry { +        int64_t mTimestampUs; +        int64_t mDelayUs; +        size_t mNumBytes; +    }; + +    Mutex mLock; +    List<BandwidthEntry> mBandwidthHistory; +    List<int32_t> mPrevEstimates; +    int32_t mShortTermEstimate; +    bool mHasNewSample; +    bool mIsStable; +    int64_t mTotalTransferTimeUs; +    size_t mTotalTransferBytes; + +    DISALLOW_EVIL_CONSTRUCTORS(BandwidthEstimator); +}; + +LiveSession::BandwidthEstimator::BandwidthEstimator() : +    mShortTermEstimate(0), +    mHasNewSample(false), +    mIsStable(true), +    mTotalTransferTimeUs(0), +    mTotalTransferBytes(0) { +} + +void LiveSession::BandwidthEstimator::addBandwidthMeasurement( +        size_t numBytes, int64_t delayUs) { +    AutoMutex autoLock(mLock); + +    int64_t nowUs = ALooper::GetNowUs(); +    BandwidthEntry entry; +    entry.mTimestampUs = nowUs; +    entry.mDelayUs = delayUs; +    entry.mNumBytes = numBytes; +    mTotalTransferTimeUs += delayUs; +    mTotalTransferBytes += numBytes; +    mBandwidthHistory.push_back(entry); +    mHasNewSample = true; + +    // Remove no more than 10% of total transfer time at a time +    // to avoid sudden jump on bandwidth estimation. There might +    // be long blocking reads that takes up signification time, +    // we have to keep a longer window in that case. +    int64_t bandwidthHistoryWindowUs = mTotalTransferTimeUs * 9 / 10; +    if (bandwidthHistoryWindowUs < kMinBandwidthHistoryWindowUs) { +        bandwidthHistoryWindowUs = kMinBandwidthHistoryWindowUs; +    } else if (bandwidthHistoryWindowUs > kMaxBandwidthHistoryWindowUs) { +        bandwidthHistoryWindowUs = kMaxBandwidthHistoryWindowUs; +    } +    // trim old samples, keeping at least kMaxBandwidthHistoryItems samples, +    // and total transfer time at least kMaxBandwidthHistoryWindowUs. +    while (mBandwidthHistory.size() > kMinBandwidthHistoryItems) { +        List<BandwidthEntry>::iterator it = mBandwidthHistory.begin(); +        // remove sample if either absolute age or total transfer time is +        // over kMaxBandwidthHistoryWindowUs +        if (nowUs - it->mTimestampUs < kMaxBandwidthHistoryAgeUs && +                mTotalTransferTimeUs - it->mDelayUs < bandwidthHistoryWindowUs) { +            break; +        } +        mTotalTransferTimeUs -= it->mDelayUs; +        mTotalTransferBytes -= it->mNumBytes; +        mBandwidthHistory.erase(mBandwidthHistory.begin()); +    } +} + +bool LiveSession::BandwidthEstimator::estimateBandwidth( +        int32_t *bandwidthBps, bool *isStable, int32_t *shortTermBps) { +    AutoMutex autoLock(mLock); + +    if (mBandwidthHistory.size() < 2) { +        return false; +    } + +    if (!mHasNewSample) { +        *bandwidthBps = *(--mPrevEstimates.end()); +        if (isStable) { +            *isStable = mIsStable; +        } +        if (shortTermBps) { +            *shortTermBps = mShortTermEstimate; +        } +        return true; +    } + +    *bandwidthBps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs); +    mPrevEstimates.push_back(*bandwidthBps); +    while (mPrevEstimates.size() > 3) { +        mPrevEstimates.erase(mPrevEstimates.begin()); +    } +    mHasNewSample = false; + +    int64_t totalTimeUs = 0; +    size_t totalBytes = 0; +    if (mBandwidthHistory.size() >= kShortTermBandwidthItems) { +        List<BandwidthEntry>::iterator it = --mBandwidthHistory.end(); +        for (size_t i = 0; i < kShortTermBandwidthItems; i++, it--) { +            totalTimeUs += it->mDelayUs; +            totalBytes += it->mNumBytes; +        } +    } +    mShortTermEstimate = totalTimeUs > 0 ? +            (totalBytes * 8E6 / totalTimeUs) : *bandwidthBps; +    if (shortTermBps) { +        *shortTermBps = mShortTermEstimate; +    } + +    int32_t minEstimate = -1, maxEstimate = -1; +    List<int32_t>::iterator it; +    for (it = mPrevEstimates.begin(); it != mPrevEstimates.end(); it++) { +        int32_t estimate = *it; +        if (minEstimate < 0 || minEstimate > estimate) { +            minEstimate = estimate; +        } +        if (maxEstimate < 0 || maxEstimate < estimate) { +            maxEstimate = estimate; +        } +    } +    // consider it stable if long-term average is not jumping a lot +    // and short-term average is not much lower than long-term average +    mIsStable = (maxEstimate <= minEstimate * 4 / 3) +            && mShortTermEstimate > minEstimate * 7 / 10; +    if (isStable) { +        *isStable = mIsStable; +    } + +#if 0 +    { +        char dumpStr[1024] = {0}; +        size_t itemIdx = 0; +        size_t histSize = mBandwidthHistory.size(); +        sprintf(dumpStr, "estimate bps=%d stable=%d history (n=%d): {", +            *bandwidthBps, mIsStable, histSize); +        List<BandwidthEntry>::iterator it = mBandwidthHistory.begin(); +        for (; it != mBandwidthHistory.end(); ++it) { +            if (itemIdx > 50) { +                sprintf(dumpStr + strlen(dumpStr), +                        "...(%zd more items)... }", histSize - itemIdx); +                break; +            } +            sprintf(dumpStr + strlen(dumpStr), "%dk/%.3fs%s", +                it->mNumBytes / 1024, +                (double)it->mDelayUs * 1.0e-6, +                (it == (--mBandwidthHistory.end())) ? "}" : ", "); +            itemIdx++; +        } +        ALOGE(dumpStr); +    } +#endif +    return true; +} + +//static +const char *LiveSession::getKeyForStream(StreamType type) { +    switch (type) { +        case STREAMTYPE_VIDEO: +            return "timeUsVideo"; +        case STREAMTYPE_AUDIO: +            return "timeUsAudio"; +        case STREAMTYPE_SUBTITLES: +            return "timeUsSubtitle"; +        case STREAMTYPE_METADATA: +            return "timeUsMetadata"; // unused +        default: +            TRESPASS(); +    } +    return NULL; +} + +//static +const char *LiveSession::getNameForStream(StreamType type) { +    switch (type) { +        case STREAMTYPE_VIDEO: +            return "video"; +        case STREAMTYPE_AUDIO: +            return "audio"; +        case STREAMTYPE_SUBTITLES: +            return "subs"; +        case STREAMTYPE_METADATA: +            return "metadata"; +        default: +            break; +    } +    return "unknown"; +} + +//static +ATSParser::SourceType LiveSession::getSourceTypeForStream(StreamType type) { +    switch (type) { +        case STREAMTYPE_VIDEO: +            return ATSParser::VIDEO; +        case STREAMTYPE_AUDIO: +            return ATSParser::AUDIO; +        case STREAMTYPE_METADATA: +            return ATSParser::META; +        case STREAMTYPE_SUBTITLES: +        default: +            TRESPASS(); +    } +    return ATSParser::NUM_SOURCE_TYPES; // should not reach here +}  LiveSession::LiveSession(          const sp<AMessage> ¬ify, uint32_t flags, @@ -58,169 +279,95 @@ LiveSession::LiveSession(      : mNotify(notify),        mFlags(flags),        mHTTPService(httpService), +      mBuffering(false),        mInPreparationPhase(true), -      mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())), +      mPollBufferingGeneration(0), +      mPrevBufferPercentage(-1),        mCurBandwidthIndex(-1), +      mOrigBandwidthIndex(-1), +      mLastBandwidthBps(-1ll), +      mLastBandwidthStable(false), +      mBandwidthEstimator(new BandwidthEstimator()), +      mMaxWidth(720), +      mMaxHeight(480),        mStreamMask(0),        mNewStreamMask(0),        mSwapMask(0), -      mCheckBandwidthGeneration(0),        mSwitchGeneration(0),        mSubtitleGeneration(0),        mLastDequeuedTimeUs(0ll),        mRealTimeBaseUs(0ll),        mReconfigurationInProgress(false),        mSwitchInProgress(false), -      mDisconnectReplyID(0), -      mSeekReplyID(0), +      mUpSwitchMark(kUpSwitchMarkUs), +      mDownSwitchMark(kDownSwitchMarkUs), +      mUpSwitchMargin(kUpSwitchMarginUs),        mFirstTimeUsValid(false),        mFirstTimeUs(0), -      mLastSeekTimeUs(0) { - +      mLastSeekTimeUs(0), +      mHasMetadata(false) {      mStreams[kAudioIndex] = StreamItem("audio");      mStreams[kVideoIndex] = StreamItem("video");      mStreams[kSubtitleIndex] = StreamItem("subtitles"); -    for (size_t i = 0; i < kMaxStreams; ++i) { -        mDiscontinuities.add(indexToType(i), new AnotherPacketSource(NULL /* meta */)); +    for (size_t i = 0; i < kNumSources; ++i) {          mPacketSources.add(indexToType(i), new AnotherPacketSource(NULL /* meta */));          mPacketSources2.add(indexToType(i), new AnotherPacketSource(NULL /* meta */)); -        mBuffering[i] = false;      } - -    size_t numHistoryItems = kBandwidthHistoryBytes / -            PlaylistFetcher::kDownloadBlockSize + 1; -    if (numHistoryItems < 5) { -        numHistoryItems = 5; -    } -    mHTTPDataSource->setBandwidthHistorySize(numHistoryItems);  }  LiveSession::~LiveSession() { +    if (mFetcherLooper != NULL) { +        mFetcherLooper->stop(); +    }  } -sp<ABuffer> LiveSession::createFormatChangeBuffer(bool swap) { -    ABuffer *discontinuity = new ABuffer(0); -    discontinuity->meta()->setInt32("discontinuity", ATSParser::DISCONTINUITY_FORMATCHANGE); -    discontinuity->meta()->setInt32("swapPacketSource", swap); -    discontinuity->meta()->setInt32("switchGeneration", mSwitchGeneration); -    discontinuity->meta()->setInt64("timeUs", -1); -    return discontinuity; -} - -void LiveSession::swapPacketSource(StreamType stream) { -    sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream); -    sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream); -    sp<AnotherPacketSource> tmp = aps; -    aps = aps2; -    aps2 = tmp; -    aps2->clear(); +int64_t LiveSession::calculateMediaTimeUs( +        int64_t firstTimeUs, int64_t timeUs, int32_t discontinuitySeq) { +    if (timeUs >= firstTimeUs) { +        timeUs -= firstTimeUs; +    } else { +        timeUs = 0; +    } +    timeUs += mLastSeekTimeUs; +    if (mDiscontinuityOffsetTimesUs.indexOfKey(discontinuitySeq) >= 0) { +        timeUs += mDiscontinuityOffsetTimesUs.valueFor(discontinuitySeq); +    } +    return timeUs;  }  status_t LiveSession::dequeueAccessUnit(          StreamType stream, sp<ABuffer> *accessUnit) { -    if (!(mStreamMask & stream)) { -        // return -EWOULDBLOCK to avoid halting the decoder -        // when switching between audio/video and audio only. -        return -EWOULDBLOCK; -    } - -    status_t finalResult; -    sp<AnotherPacketSource> discontinuityQueue  = mDiscontinuities.valueFor(stream); -    if (discontinuityQueue->hasBufferAvailable(&finalResult)) { -        discontinuityQueue->dequeueAccessUnit(accessUnit); -        // seeking, track switching -        sp<AMessage> extra; -        int64_t timeUs; -        if ((*accessUnit)->meta()->findMessage("extra", &extra) -                && extra != NULL -                && extra->findInt64("timeUs", &timeUs)) { -            // seeking only -            mLastSeekTimeUs = timeUs; -            mDiscontinuityOffsetTimesUs.clear(); -            mDiscontinuityAbsStartTimesUs.clear(); -        } -        return INFO_DISCONTINUITY; -    } - +    status_t finalResult = OK;      sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(stream); -    ssize_t idx = typeToIndex(stream); -    if (!packetSource->hasBufferAvailable(&finalResult)) { +    ssize_t streamIdx = typeToIndex(stream); +    if (streamIdx < 0) { +        return BAD_VALUE; +    } +    const char *streamStr = getNameForStream(stream); +    // Do not let client pull data if we don't have data packets yet. +    // We might only have a format discontinuity queued without data. +    // When NuPlayerDecoder dequeues the format discontinuity, it will +    // immediately try to getFormat. If we return NULL, NuPlayerDecoder +    // thinks it can do seamless change, so will not shutdown decoder. +    // When the actual format arrives, it can't handle it and get stuck. +    if (!packetSource->hasDataBufferAvailable(&finalResult)) { +        ALOGV("[%s] dequeueAccessUnit: no buffer available (finalResult=%d)", +                streamStr, finalResult); +          if (finalResult == OK) { -            mBuffering[idx] = true;              return -EAGAIN;          } else {              return finalResult;          }      } -    int32_t targetDuration = 0; -    sp<AMessage> meta = packetSource->getLatestEnqueuedMeta(); -    if (meta != NULL) { -        meta->findInt32("targetDuration", &targetDuration); -    } - -    int64_t targetDurationUs = targetDuration * 1000000ll; -    if (targetDurationUs == 0 || -            targetDurationUs > PlaylistFetcher::kMinBufferedDurationUs) { -        // Fetchers limit buffering to -        // min(3 * targetDuration, kMinBufferedDurationUs) -        targetDurationUs = PlaylistFetcher::kMinBufferedDurationUs; -    } - -    if (mBuffering[idx]) { -        if (mSwitchInProgress -                || packetSource->isFinished(0) -                || packetSource->getEstimatedDurationUs() > targetDurationUs) { -            mBuffering[idx] = false; -        } -    } - -    if (mBuffering[idx]) { -        return -EAGAIN; -    } - -    // wait for counterpart -    sp<AnotherPacketSource> otherSource; -    uint32_t mask = mNewStreamMask & mStreamMask; -    uint32_t fetchersMask  = 0; -    for (size_t i = 0; i < mFetcherInfos.size(); ++i) { -        uint32_t fetcherMask = mFetcherInfos.valueAt(i).mFetcher->getStreamTypeMask(); -        fetchersMask |= fetcherMask; -    } -    mask &= fetchersMask; -    if (stream == STREAMTYPE_AUDIO && (mask & STREAMTYPE_VIDEO)) { -        otherSource = mPacketSources.valueFor(STREAMTYPE_VIDEO); -    } else if (stream == STREAMTYPE_VIDEO && (mask & STREAMTYPE_AUDIO)) { -        otherSource = mPacketSources.valueFor(STREAMTYPE_AUDIO); -    } -    if (otherSource != NULL && !otherSource->hasBufferAvailable(&finalResult)) { -        return finalResult == OK ? -EAGAIN : finalResult; -    } +    // Let the client dequeue as long as we have buffers available +    // Do not make pause/resume decisions here.      status_t err = packetSource->dequeueAccessUnit(accessUnit); -    size_t streamIdx; -    const char *streamStr; -    switch (stream) { -        case STREAMTYPE_AUDIO: -            streamIdx = kAudioIndex; -            streamStr = "audio"; -            break; -        case STREAMTYPE_VIDEO: -            streamIdx = kVideoIndex; -            streamStr = "video"; -            break; -        case STREAMTYPE_SUBTITLES: -            streamIdx = kSubtitleIndex; -            streamStr = "subs"; -            break; -        default: -            TRESPASS(); -    } - -    StreamItem& strm = mStreams[streamIdx];      if (err == INFO_DISCONTINUITY) {          // adaptive streaming, discontinuities in the playlist          int32_t type; @@ -235,50 +382,36 @@ status_t LiveSession::dequeueAccessUnit(                streamStr,                type,                extra == NULL ? "NULL" : extra->debugString().c_str()); - -        int32_t swap; -        if ((*accessUnit)->meta()->findInt32("swapPacketSource", &swap) && swap) { -            int32_t switchGeneration; -            CHECK((*accessUnit)->meta()->findInt32("switchGeneration", &switchGeneration)); -            { -                Mutex::Autolock lock(mSwapMutex); -                if (switchGeneration == mSwitchGeneration) { -                    swapPacketSource(stream); -                    sp<AMessage> msg = new AMessage(kWhatSwapped, id()); -                    msg->setInt32("stream", stream); -                    msg->setInt32("switchGeneration", switchGeneration); -                    msg->post(); -                } -            } -        } else { -            size_t seq = strm.mCurDiscontinuitySeq; -            int64_t offsetTimeUs; -            if (mDiscontinuityOffsetTimesUs.indexOfKey(seq) >= 0) { -                offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(seq); -            } else { -                offsetTimeUs = 0; -            } - -            seq += 1; -            if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) { -                int64_t firstTimeUs; -                firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq); -                offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs; -                offsetTimeUs += strm.mLastSampleDurationUs; -            } else { -                offsetTimeUs += strm.mLastSampleDurationUs; -            } - -            mDiscontinuityOffsetTimesUs.add(seq, offsetTimeUs); -        }      } else if (err == OK) {          if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) { -            int64_t timeUs; +            int64_t timeUs, originalTimeUs;              int32_t discontinuitySeq = 0; +            StreamItem& strm = mStreams[streamIdx];              CHECK((*accessUnit)->meta()->findInt64("timeUs",  &timeUs)); +            originalTimeUs = timeUs;              (*accessUnit)->meta()->findInt32("discontinuitySeq", &discontinuitySeq); -            strm.mCurDiscontinuitySeq = discontinuitySeq; +            if (discontinuitySeq > (int32_t) strm.mCurDiscontinuitySeq) { +                int64_t offsetTimeUs; +                if (mDiscontinuityOffsetTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) { +                    offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(strm.mCurDiscontinuitySeq); +                } else { +                    offsetTimeUs = 0; +                } + +                if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0 +                        && strm.mLastDequeuedTimeUs >= 0) { +                    int64_t firstTimeUs; +                    firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq); +                    offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs; +                    offsetTimeUs += strm.mLastSampleDurationUs; +                } else { +                    offsetTimeUs += strm.mLastSampleDurationUs; +                } + +                mDiscontinuityOffsetTimesUs.add(discontinuitySeq, offsetTimeUs); +                strm.mCurDiscontinuitySeq = discontinuitySeq; +            }              int32_t discard = 0;              int64_t firstTimeUs; @@ -299,17 +432,10 @@ status_t LiveSession::dequeueAccessUnit(              }              strm.mLastDequeuedTimeUs = timeUs; -            if (timeUs >= firstTimeUs) { -                timeUs -= firstTimeUs; -            } else { -                timeUs = 0; -            } -            timeUs += mLastSeekTimeUs; -            if (mDiscontinuityOffsetTimesUs.indexOfKey(discontinuitySeq) >= 0) { -                timeUs += mDiscontinuityOffsetTimesUs.valueFor(discontinuitySeq); -            } +            timeUs = calculateMediaTimeUs(firstTimeUs, timeUs, discontinuitySeq); -            ALOGV("[%s] read buffer at time %" PRId64 " us", streamStr, timeUs); +            ALOGV("[%s] dequeueAccessUnit: time %lld us, original %lld us", +                    streamStr, (long long)timeUs, (long long)originalTimeUs);              (*accessUnit)->meta()->setInt64("timeUs",  timeUs);              mLastDequeuedTimeUs = timeUs;              mRealTimeBaseUs = ALooper::GetNowUs() - timeUs; @@ -322,6 +448,17 @@ status_t LiveSession::dequeueAccessUnit(              (*accessUnit)->meta()->setInt32(                      "trackIndex", mPlaylist->getSelectedIndex());              (*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs); +        } else if (stream == STREAMTYPE_METADATA) { +            HLSTime mdTime((*accessUnit)->meta()); +            if (mDiscontinuityAbsStartTimesUs.indexOfKey(mdTime.mSeq) < 0) { +                packetSource->requeueAccessUnit((*accessUnit)); +                return -EAGAIN; +            } else { +                int64_t firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(mdTime.mSeq); +                int64_t timeUs = calculateMediaTimeUs(firstTimeUs, mdTime.mTimeUs, mdTime.mSeq); +                (*accessUnit)->meta()->setInt64("timeUs",  timeUs); +                (*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs); +            }          }      } else {          ALOGI("[%s] encountered error %d", streamStr, err); @@ -331,7 +468,6 @@ status_t LiveSession::dequeueAccessUnit(  }  status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) { -    // No swapPacketSource race condition; called from the same thread as dequeueAccessUnit.      if (!(mStreamMask & stream)) {          return UNKNOWN_ERROR;      } @@ -344,12 +480,24 @@ status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) {          return -EAGAIN;      } +    if (stream == STREAMTYPE_AUDIO) { +        // set AAC input buffer size to 32K bytes (256kbps x 1sec) +        meta->setInt32(kKeyMaxInputSize, 32 * 1024); +    } else if (stream == STREAMTYPE_VIDEO) { +        meta->setInt32(kKeyMaxWidth, mMaxWidth); +        meta->setInt32(kKeyMaxHeight, mMaxHeight); +    } +      return convertMetaDataToMessage(meta, format);  } +sp<HTTPDownloader> LiveSession::getHTTPDownloader() { +    return new HTTPDownloader(mHTTPService, mExtraHeaders); +} +  void LiveSession::connectAsync(          const char *url, const KeyedVector<String8, String8> *headers) { -    sp<AMessage> msg = new AMessage(kWhatConnect, id()); +    sp<AMessage> msg = new AMessage(kWhatConnect, this);      msg->setString("url", url);      if (headers != NULL) { @@ -362,7 +510,7 @@ void LiveSession::connectAsync(  }  status_t LiveSession::disconnect() { -    sp<AMessage> msg = new AMessage(kWhatDisconnect, id()); +    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);      sp<AMessage> response;      status_t err = msg->postAndAwaitResponse(&response); @@ -371,7 +519,7 @@ status_t LiveSession::disconnect() {  }  status_t LiveSession::seekTo(int64_t timeUs) { -    sp<AMessage> msg = new AMessage(kWhatSeek, id()); +    sp<AMessage> msg = new AMessage(kWhatSeek, this);      msg->setInt64("timeUs", timeUs);      sp<AMessage> response; @@ -380,6 +528,95 @@ status_t LiveSession::seekTo(int64_t timeUs) {      return err;  } +bool LiveSession::checkSwitchProgress( +        sp<AMessage> &stopParams, int64_t delayUs, bool *needResumeUntil) { +    AString newUri; +    CHECK(stopParams->findString("uri", &newUri)); + +    *needResumeUntil = false; +    sp<AMessage> firstNewMeta[kMaxStreams]; +    for (size_t i = 0; i < kMaxStreams; ++i) { +        StreamType stream = indexToType(i); +        if (!(mSwapMask & mNewStreamMask & stream) +            || (mStreams[i].mNewUri != newUri)) { +            continue; +        } +        if (stream == STREAMTYPE_SUBTITLES) { +            continue; +        } +        sp<AnotherPacketSource> &source = mPacketSources.editValueAt(i); + +        // First, get latest dequeued meta, which is where the decoder is at. +        // (when upswitching, we take the meta after a certain delay, so that +        // the decoder is left with some cushion) +        sp<AMessage> lastDequeueMeta, lastEnqueueMeta; +        if (delayUs > 0) { +            lastDequeueMeta = source->getMetaAfterLastDequeued(delayUs); +            if (lastDequeueMeta == NULL) { +                // this means we don't have enough cushion, try again later +                ALOGV("[%s] up switching failed due to insufficient buffer", +                        getNameForStream(stream)); +                return false; +            } +        } else { +            // It's okay for lastDequeueMeta to be NULL here, it means the +            // decoder hasn't even started dequeueing +            lastDequeueMeta = source->getLatestDequeuedMeta(); +        } +        // Then, trim off packets at beginning of mPacketSources2 that's before +        // the latest dequeued time. These samples are definitely too late. +        firstNewMeta[i] = mPacketSources2.editValueAt(i) +                            ->trimBuffersBeforeMeta(lastDequeueMeta); + +        // Now firstNewMeta[i] is the first sample after the trim. +        // If it's NULL, we failed because dequeue already past all samples +        // in mPacketSource2, we have to try again. +        if (firstNewMeta[i] == NULL) { +            HLSTime dequeueTime(lastDequeueMeta); +            ALOGV("[%s] dequeue time (%d, %lld) past start time", +                    getNameForStream(stream), +                    dequeueTime.mSeq, (long long) dequeueTime.mTimeUs); +            return false; +        } + +        // Otherwise, we check if mPacketSources2 overlaps with what old fetcher +        // already fetched, and see if we need to resumeUntil +        lastEnqueueMeta = source->getLatestEnqueuedMeta(); +        // lastEnqueueMeta == NULL means old fetcher stopped at a discontinuity +        // boundary, no need to resume as the content will look different anyways +        if (lastEnqueueMeta != NULL) { +            HLSTime lastTime(lastEnqueueMeta), startTime(firstNewMeta[i]); + +            // no need to resume old fetcher if new fetcher started in different +            // discontinuity sequence, as the content will look different. +            *needResumeUntil |= (startTime.mSeq == lastTime.mSeq +                    && startTime.mTimeUs - lastTime.mTimeUs > kResumeThresholdUs); + +            // update the stopTime for resumeUntil +            stopParams->setInt32("discontinuitySeq", startTime.mSeq); +            stopParams->setInt64(getKeyForStream(stream), startTime.mTimeUs); +        } +    } + +    // if we're here, it means dequeue progress hasn't passed some samples in +    // mPacketSource2, we can trim off the excess in mPacketSource. +    // (old fetcher might still need to resumeUntil the start time of new fetcher) +    for (size_t i = 0; i < kMaxStreams; ++i) { +        StreamType stream = indexToType(i); +        if (!(mSwapMask & mNewStreamMask & stream) +            || (newUri != mStreams[i].mNewUri) +            || stream == STREAMTYPE_SUBTITLES) { +            continue; +        } +        mPacketSources.valueFor(stream)->trimBuffersAfterMeta(firstNewMeta[i]); +    } + +    // no resumeUntil if already underflow +    *needResumeUntil &= !mBuffering; + +    return true; +} +  void LiveSession::onMessageReceived(const sp<AMessage> &msg) {      switch (msg->what()) {          case kWhatConnect: @@ -402,16 +639,15 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {          case kWhatSeek:          { -            uint32_t seekReplyID; -            CHECK(msg->senderAwaitsResponse(&seekReplyID)); -            mSeekReplyID = seekReplyID; -            mSeekReply = new AMessage; - -            status_t err = onSeek(msg); - -            if (err != OK) { +            if (mReconfigurationInProgress) {                  msg->post(50000); +                break;              } + +            CHECK(msg->senderAwaitsResponse(&mSeekReplyID)); +            mSeekReply = new AMessage; + +            onSeek(msg);              break;          } @@ -426,16 +662,30 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {                  case PlaylistFetcher::kWhatPaused:                  case PlaylistFetcher::kWhatStopped:                  { -                    if (what == PlaylistFetcher::kWhatStopped) { -                        AString uri; -                        CHECK(msg->findString("uri", &uri)); -                        if (mFetcherInfos.removeItem(uri) < 0) { -                            // ignore duplicated kWhatStopped messages. -                            break; -                        } +                    AString uri; +                    CHECK(msg->findString("uri", &uri)); +                    ssize_t index = mFetcherInfos.indexOfKey(uri); +                    if (index < 0) { +                        // ignore msgs from fetchers that's already gone +                        break; +                    } + +                    ALOGV("fetcher-%d %s", +                            mFetcherInfos[index].mFetcher->getFetcherID(), +                            what == PlaylistFetcher::kWhatPaused ? +                                    "paused" : "stopped"); -                        if (mSwitchInProgress) { -                            tryToFinishBandwidthSwitch(); +                    if (what == PlaylistFetcher::kWhatStopped) { +                        mFetcherLooper->unregisterHandler( +                                mFetcherInfos[index].mFetcher->id()); +                        mFetcherInfos.removeItemsAt(index); +                    } else if (what == PlaylistFetcher::kWhatPaused) { +                        int32_t seekMode; +                        CHECK(msg->findInt32("seekMode", &seekMode)); +                        for (size_t i = 0; i < kMaxStreams; ++i) { +                            if (mStreams[i].mUri == uri) { +                                mStreams[i].mSeekMode = (SeekMode) seekMode; +                            }                          }                      } @@ -443,15 +693,8 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {                          CHECK_GT(mContinuationCounter, 0);                          if (--mContinuationCounter == 0) {                              mContinuation->post(); - -                            if (mSeekReplyID != 0) { -                                CHECK(mSeekReply != NULL); -                                mSeekReply->setInt32("err", OK); -                                mSeekReply->postReply(mSeekReplyID); -                                mSeekReplyID = 0; -                                mSeekReply.clear(); -                            }                          } +                        ALOGV("%zu fetcher(s) left", mContinuationCounter);                      }                      break;                  } @@ -464,8 +707,21 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {                      int64_t durationUs;                      CHECK(msg->findInt64("durationUs", &durationUs)); -                    FetcherInfo *info = &mFetcherInfos.editValueFor(uri); -                    info->mDurationUs = durationUs; +                    ssize_t index = mFetcherInfos.indexOfKey(uri); +                    if (index >= 0) { +                        FetcherInfo *info = &mFetcherInfos.editValueFor(uri); +                        info->mDurationUs = durationUs; +                    } +                    break; +                } + +                case PlaylistFetcher::kWhatTargetDurationUpdate: +                { +                    int64_t targetDurationUs; +                    CHECK(msg->findInt64("targetDurationUs", &targetDurationUs)); +                    mUpSwitchMark = min(kUpSwitchMarkUs, targetDurationUs * 7 / 4); +                    mDownSwitchMark = min(kDownSwitchMarkUs, targetDurationUs * 9 / 4); +                    mUpSwitchMargin = min(kUpSwitchMarginUs, targetDurationUs);                      break;                  } @@ -493,6 +749,20 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {                          }                      } +                    // remember the failure index (as mCurBandwidthIndex will be restored +                    // after cancelBandwidthSwitch()), and record last fail time +                    size_t failureIndex = mCurBandwidthIndex; +                    mBandwidthItems.editItemAt( +                            failureIndex).mLastFailureUs = ALooper::GetNowUs(); + +                    if (mSwitchInProgress) { +                        // if error happened when we switch to a variant, try fallback +                        // to other variant to save the session +                        if (tryBandwidthFallback()) { +                            break; +                        } +                    } +                      if (mInPreparationPhase) {                          postPrepared(err);                      } @@ -506,38 +776,23 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {                      mPacketSources.valueFor(                              STREAMTYPE_SUBTITLES)->signalEOS(err); -                    sp<AMessage> notify = mNotify->dup(); -                    notify->setInt32("what", kWhatError); -                    notify->setInt32("err", err); -                    notify->post(); +                    postError(err);                      break;                  } -                case PlaylistFetcher::kWhatTemporarilyDoneFetching: +                case PlaylistFetcher::kWhatStopReached:                  { -                    AString uri; -                    CHECK(msg->findString("uri", &uri)); +                    ALOGV("kWhatStopReached"); + +                    AString oldUri; +                    CHECK(msg->findString("uri", &oldUri)); -                    if (mFetcherInfos.indexOfKey(uri) < 0) { -                        ALOGE("couldn't find uri"); +                    ssize_t index = mFetcherInfos.indexOfKey(oldUri); +                    if (index < 0) {                          break;                      } -                    FetcherInfo *info = &mFetcherInfos.editValueFor(uri); -                    info->mIsPrepared = true; -                    if (mInPreparationPhase) { -                        bool allFetchersPrepared = true; -                        for (size_t i = 0; i < mFetcherInfos.size(); ++i) { -                            if (!mFetcherInfos.valueAt(i).mIsPrepared) { -                                allFetchersPrepared = false; -                                break; -                            } -                        } - -                        if (allFetchersPrepared) { -                            postPrepared(OK); -                        } -                    } +                    tryToFinishBandwidthSwitch(oldUri);                      break;                  } @@ -546,39 +801,104 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {                      int32_t switchGeneration;                      CHECK(msg->findInt32("switchGeneration", &switchGeneration)); +                    ALOGV("kWhatStartedAt: switchGen=%d, mSwitchGen=%d", +                            switchGeneration, mSwitchGeneration); +                      if (switchGeneration != mSwitchGeneration) {                          break;                      } -                    // Resume fetcher for the original variant; the resumed fetcher should -                    // continue until the timestamps found in msg, which is stored by the -                    // new fetcher to indicate where the new variant has started buffering. -                    for (size_t i = 0; i < mFetcherInfos.size(); i++) { -                        const FetcherInfo info = mFetcherInfos.valueAt(i); -                        if (info.mToBeRemoved) { -                            info.mFetcher->resumeUntilAsync(msg); +                    AString uri; +                    CHECK(msg->findString("uri", &uri)); + +                    // mark new fetcher mToBeResumed +                    ssize_t index = mFetcherInfos.indexOfKey(uri); +                    if (index >= 0) { +                        mFetcherInfos.editValueAt(index).mToBeResumed = true; +                    } + +                    // temporarily disable packet sources to be swapped to prevent +                    // NuPlayerDecoder from dequeuing while we check progress +                    for (size_t i = 0; i < mPacketSources.size(); ++i) { +                        if ((mSwapMask & mPacketSources.keyAt(i)) +                                && uri == mStreams[i].mNewUri) { +                            mPacketSources.editValueAt(i)->enable(false); +                        } +                    } +                    bool switchUp = (mCurBandwidthIndex > mOrigBandwidthIndex); +                    // If switching up, require a cushion bigger than kUnderflowMark +                    // to avoid buffering immediately after the switch. +                    // (If we don't have that cushion we'd rather cancel and try again.) +                    int64_t delayUs = switchUp ? (kUnderflowMarkUs + 1000000ll) : 0; +                    bool needResumeUntil = false; +                    sp<AMessage> stopParams = msg; +                    if (checkSwitchProgress(stopParams, delayUs, &needResumeUntil)) { +                        // playback time hasn't passed startAt time +                        if (!needResumeUntil) { +                            ALOGV("finish switch"); +                            for (size_t i = 0; i < kMaxStreams; ++i) { +                                if ((mSwapMask & indexToType(i)) +                                        && uri == mStreams[i].mNewUri) { +                                    // have to make a copy of mStreams[i].mUri because +                                    // tryToFinishBandwidthSwitch is modifying mStreams[] +                                    AString oldURI = mStreams[i].mUri; +                                    tryToFinishBandwidthSwitch(oldURI); +                                    break; +                                } +                            } +                        } else { +                            // startAt time is after last enqueue time +                            // Resume fetcher for the original variant; the resumed fetcher should +                            // continue until the timestamps found in msg, which is stored by the +                            // new fetcher to indicate where the new variant has started buffering. +                            ALOGV("finish switch with resumeUntilAsync"); +                            for (size_t i = 0; i < mFetcherInfos.size(); i++) { +                                const FetcherInfo &info = mFetcherInfos.valueAt(i); +                                if (info.mToBeRemoved) { +                                    info.mFetcher->resumeUntilAsync(stopParams); +                                } +                            } +                        } +                    } else { +                        // playback time passed startAt time +                        if (switchUp) { +                            // if switching up, cancel and retry if condition satisfies again +                            ALOGV("cancel up switch because we're too late"); +                            cancelBandwidthSwitch(true /* resume */); +                        } else { +                            ALOGV("retry down switch at next sample"); +                            resumeFetcher(uri, mSwapMask, -1, true /* newUri */);                          }                      } +                    // re-enable all packet sources +                    for (size_t i = 0; i < mPacketSources.size(); ++i) { +                        mPacketSources.editValueAt(i)->enable(true); +                    } +                      break;                  } -                default: -                    TRESPASS(); -            } - -            break; -        } +                case PlaylistFetcher::kWhatPlaylistFetched: +                { +                    onMasterPlaylistFetched(msg); +                    break; +                } -        case kWhatCheckBandwidth: -        { -            int32_t generation; -            CHECK(msg->findInt32("generation", &generation)); +                case PlaylistFetcher::kWhatMetadataDetected: +                { +                    if (!mHasMetadata) { +                        mHasMetadata = true; +                        sp<AMessage> notify = mNotify->dup(); +                        notify->setInt32("what", kWhatMetadataDetected); +                        notify->post(); +                    } +                    break; +                } -            if (generation != mCheckBandwidthGeneration) { -                break; +                default: +                    TRESPASS();              } -            onCheckBandwidth(msg);              break;          } @@ -600,27 +920,13 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {              break;          } -        case kWhatFinishDisconnect2: -        { -            onFinishDisconnect2(); -            break; -        } - -        case kWhatSwapped: +        case kWhatPollBuffering:          { -            onSwapped(msg); -            break; -        } - -        case kWhatCheckSwitchDown: -        { -            onCheckSwitchDown(); -            break; -        } - -        case kWhatSwitchDown: -        { -            onSwitchDown(); +            int32_t generation; +            CHECK(msg->findInt32("generation", &generation)); +            if (generation == mPollBufferingGeneration) { +                onPollBuffering(); +            }              break;          } @@ -631,6 +937,13 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {  }  // static +bool LiveSession::isBandwidthValid(const BandwidthItem &item) { +    static const int64_t kBlacklistWindowUs = 300 * 1000000ll; +    return item.mLastFailureUs < 0 +            || ALooper::GetNowUs() - item.mLastFailureUs > kBlacklistWindowUs; +} + +// static  int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) {      if (a->mBandwidth < b->mBandwidth) {          return -1; @@ -643,7 +956,7 @@ int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b)  // static  LiveSession::StreamType LiveSession::indexToType(int idx) { -    CHECK(idx >= 0 && idx < kMaxStreams); +    CHECK(idx >= 0 && idx < kNumSources);      return (StreamType)(1 << idx);  } @@ -656,6 +969,8 @@ ssize_t LiveSession::typeToIndex(int32_t type) {              return 1;          case STREAMTYPE_SUBTITLES:              return 2; +        case STREAMTYPE_METADATA: +            return 3;          default:              return -1;      }; @@ -663,8 +978,10 @@ ssize_t LiveSession::typeToIndex(int32_t type) {  }  void LiveSession::onConnect(const sp<AMessage> &msg) { -    AString url; -    CHECK(msg->findString("url", &url)); +    CHECK(msg->findString("url", &mMasterURL)); + +    // TODO currently we don't know if we are coming here from incognito mode +    ALOGI("onConnect %s", uriDebugString(mMasterURL).c_str());      KeyedVector<String8, String8> *headers = NULL;      if (!msg->findPointer("headers", (void **)&headers)) { @@ -676,21 +993,39 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {          headers = NULL;      } -    // TODO currently we don't know if we are coming here from incognito mode -    ALOGI("onConnect %s", uriDebugString(url).c_str()); +    // create looper for fetchers +    if (mFetcherLooper == NULL) { +        mFetcherLooper = new ALooper(); -    mMasterURL = url; +        mFetcherLooper->setName("Fetcher"); +        mFetcherLooper->start(false, false); +    } -    bool dummy; -    mPlaylist = fetchPlaylist(url.c_str(), NULL /* curPlaylistHash */, &dummy); +    // create fetcher to fetch the master playlist +    addFetcher(mMasterURL.c_str())->fetchPlaylistAsync(); +} +void LiveSession::onMasterPlaylistFetched(const sp<AMessage> &msg) { +    AString uri; +    CHECK(msg->findString("uri", &uri)); +    ssize_t index = mFetcherInfos.indexOfKey(uri); +    if (index < 0) { +        ALOGW("fetcher for master playlist is gone."); +        return; +    } + +    // no longer useful, remove +    mFetcherLooper->unregisterHandler(mFetcherInfos[index].mFetcher->id()); +    mFetcherInfos.removeItemsAt(index); + +    CHECK(msg->findObject("playlist", (sp<RefBase> *)&mPlaylist));      if (mPlaylist == NULL) { -        ALOGE("unable to fetch master playlist %s.", uriDebugString(url).c_str()); +        ALOGE("unable to fetch master playlist %s.", +                uriDebugString(mMasterURL).c_str());          postPrepared(ERROR_IO);          return;      } -      // We trust the content provider to make a reasonable choice of preferred      // initial bandwidth by listing it first in the variant playlist.      // At startup we really don't have a good estimate on the available @@ -699,11 +1034,16 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {      size_t initialBandwidth = 0;      size_t initialBandwidthIndex = 0; +    int32_t maxWidth = 0; +    int32_t maxHeight = 0; +      if (mPlaylist->isVariantPlaylist()) { +        Vector<BandwidthItem> itemsWithVideo;          for (size_t i = 0; i < mPlaylist->size(); ++i) {              BandwidthItem item;              item.mPlaylistIndex = i; +            item.mLastFailureUs = -1ll;              sp<AMessage> meta;              AString uri; @@ -711,14 +1051,30 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {              CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth)); -            if (initialBandwidth == 0) { -                initialBandwidth = item.mBandwidth; +            int32_t width, height; +            if (meta->findInt32("width", &width)) { +                maxWidth = max(maxWidth, width); +            } +            if (meta->findInt32("height", &height)) { +                maxHeight = max(maxHeight, height);              }              mBandwidthItems.push(item); +            if (mPlaylist->hasType(i, "video")) { +                itemsWithVideo.push(item); +            } +        } +        // remove the audio-only variants if we have at least one with video +        if (!itemsWithVideo.empty() +                && itemsWithVideo.size() < mBandwidthItems.size()) { +            mBandwidthItems.clear(); +            for (size_t i = 0; i < itemsWithVideo.size(); ++i) { +                mBandwidthItems.push(itemsWithVideo[i]); +            }          }          CHECK_GT(mBandwidthItems.size(), 0u); +        initialBandwidth = mBandwidthItems[0].mBandwidth;          mBandwidthItems.sort(SortByBandwidth); @@ -736,39 +1092,44 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {          mBandwidthItems.push(item);      } +    mMaxWidth = maxWidth > 0 ? maxWidth : mMaxWidth; +    mMaxHeight = maxHeight > 0 ? maxHeight : mMaxHeight; +      mPlaylist->pickRandomMediaItems();      changeConfiguration(              0ll /* timeUs */, initialBandwidthIndex, false /* pickTrack */);  }  void LiveSession::finishDisconnect() { +    ALOGV("finishDisconnect"); +      // No reconfiguration is currently pending, make sure none will trigger      // during disconnection either. -    cancelCheckBandwidthEvent(); - -    // Protect mPacketSources from a swapPacketSource race condition through disconnect. -    // (finishDisconnect, onFinishDisconnect2)      cancelBandwidthSwitch(); -    // cancel switch down monitor -    mSwitchDownMonitor.clear(); - +    // cancel buffer polling +    cancelPollBuffering(); + +    // TRICKY: don't wait for all fetcher to be stopped when disconnecting +    // +    // Some fetchers might be stuck in connect/getSize at this point. These +    // operations will eventually timeout (as we have a timeout set in +    // MediaHTTPConnection), but we don't want to block the main UI thread +    // until then. Here we just need to make sure we clear all references +    // to the fetchers, so that when they finally exit from the blocking +    // operation, they can be destructed. +    // +    // There is one very tricky point though. For this scheme to work, the +    // fecther must hold a reference to LiveSession, so that LiveSession is +    // destroyed after fetcher. Otherwise LiveSession would get stuck in its +    // own destructor when it waits for mFetcherLooper to stop, which still +    // blocks main UI thread.      for (size_t i = 0; i < mFetcherInfos.size(); ++i) {          mFetcherInfos.valueAt(i).mFetcher->stopAsync(); +        mFetcherLooper->unregisterHandler( +                mFetcherInfos.valueAt(i).mFetcher->id());      } - -    sp<AMessage> msg = new AMessage(kWhatFinishDisconnect2, id()); - -    mContinuationCounter = mFetcherInfos.size(); -    mContinuation = msg; - -    if (mContinuationCounter == 0) { -        msg->post(); -    } -} - -void LiveSession::onFinishDisconnect2() { -    mContinuation.clear(); +    mFetcherInfos.clear();      mPacketSources.valueFor(STREAMTYPE_AUDIO)->signalEOS(ERROR_END_OF_STREAM);      mPacketSources.valueFor(STREAMTYPE_VIDEO)->signalEOS(ERROR_END_OF_STREAM); @@ -780,7 +1141,7 @@ void LiveSession::onFinishDisconnect2() {      response->setInt32("err", OK);      response->postReply(mDisconnectReplyID); -    mDisconnectReplyID = 0; +    mDisconnectReplyID.clear();  }  sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) { @@ -790,213 +1151,185 @@ sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) {          return NULL;      } -    sp<AMessage> notify = new AMessage(kWhatFetcherNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatFetcherNotify, this);      notify->setString("uri", uri);      notify->setInt32("switchGeneration", mSwitchGeneration);      FetcherInfo info; -    info.mFetcher = new PlaylistFetcher(notify, this, uri, mSubtitleGeneration); +    info.mFetcher = new PlaylistFetcher( +            notify, this, uri, mCurBandwidthIndex, mSubtitleGeneration);      info.mDurationUs = -1ll; -    info.mIsPrepared = false;      info.mToBeRemoved = false; -    looper()->registerHandler(info.mFetcher); +    info.mToBeResumed = false; +    mFetcherLooper->registerHandler(info.mFetcher);      mFetcherInfos.add(uri, info);      return info.mFetcher;  } -/* - * Illustration of parameters: - * - * 0      `range_offset` - * +------------+-------------------------------------------------------+--+--+ - * |            |                                 | next block to fetch |  |  | - * |            | `source` handle => `out` buffer |                     |  |  | - * | `url` file |<--------- buffer size --------->|<--- `block_size` -->|  |  | - * |            |<----------- `range_length` / buffer capacity ----------->|  | - * |<------------------------------ file_size ------------------------------->| - * - * Special parameter values: - * - range_length == -1 means entire file - * - block_size == 0 means entire range - * - */ -ssize_t LiveSession::fetchFile( -        const char *url, sp<ABuffer> *out, -        int64_t range_offset, int64_t range_length, -        uint32_t block_size, /* download block size */ -        sp<DataSource> *source, /* to return and reuse source */ -        String8 *actualUrl) { -    off64_t size; -    sp<DataSource> temp_source; -    if (source == NULL) { -        source = &temp_source; -    } - -    if (*source == NULL) { -        if (!strncasecmp(url, "file://", 7)) { -            *source = new FileSource(url + 7); -        } else if (strncasecmp(url, "http://", 7) -                && strncasecmp(url, "https://", 8)) { -            return ERROR_UNSUPPORTED; -        } else { -            KeyedVector<String8, String8> headers = mExtraHeaders; -            if (range_offset > 0 || range_length >= 0) { -                headers.add( -                        String8("Range"), -                        String8( -                            AStringPrintf( -                                "bytes=%lld-%s", -                                range_offset, -                                range_length < 0 -                                    ? "" : AStringPrintf("%lld", -                                            range_offset + range_length - 1).c_str()).c_str())); -            } -            status_t err = mHTTPDataSource->connect(url, &headers); - -            if (err != OK) { -                return err; -            } +#if 0 +static double uniformRand() { +    return (double)rand() / RAND_MAX; +} +#endif -            *source = mHTTPDataSource; -        } -    } +bool LiveSession::UriIsSameAsIndex(const AString &uri, int32_t i, bool newUri) { +    ALOGI("[timed_id3] i %d UriIsSameAsIndex newUri %s, %s", i, +            newUri ? "true" : "false", +            newUri ? mStreams[i].mNewUri.c_str() : mStreams[i].mUri.c_str()); +    return i >= 0 +            && ((!newUri && uri == mStreams[i].mUri) +            || (newUri && uri == mStreams[i].mNewUri)); +} -    status_t getSizeErr = (*source)->getSize(&size); -    if (getSizeErr != OK) { -        size = 65536; -    } +sp<AnotherPacketSource> LiveSession::getPacketSourceForStreamIndex( +        size_t trackIndex, bool newUri) { +    StreamType type = indexToType(trackIndex); +    sp<AnotherPacketSource> source = NULL; +    if (newUri) { +        source = mPacketSources2.valueFor(type); +        source->clear(); +    } else { +        source = mPacketSources.valueFor(type); +    }; +    return source; +} -    sp<ABuffer> buffer = *out != NULL ? *out : new ABuffer(size); -    if (*out == NULL) { -        buffer->setRange(0, 0); -    } +sp<AnotherPacketSource> LiveSession::getMetadataSource( +        sp<AnotherPacketSource> sources[kNumSources], uint32_t streamMask, bool newUri) { +    // todo: One case where the following strategy can fail is when audio and video +    // are in separate playlists, both are transport streams, and the metadata +    // is actually contained in the audio stream. +    ALOGV("[timed_id3] getMetadataSourceForUri streamMask %x newUri %s", +            streamMask, newUri ? "true" : "false"); -    ssize_t bytesRead = 0; -    // adjust range_length if only reading partial block -    if (block_size > 0 && (range_length == -1 || (int64_t)(buffer->size() + block_size) < range_length)) { -        range_length = buffer->size() + block_size; +    if ((sources[kVideoIndex] != NULL) // video fetcher; or ... +            || (!(streamMask & STREAMTYPE_VIDEO) && sources[kAudioIndex] != NULL)) { +            // ... audio fetcher for audio only variant +        return getPacketSourceForStreamIndex(kMetaDataIndex, newUri);      } -    for (;;) { -        // Only resize when we don't know the size. -        size_t bufferRemaining = buffer->capacity() - buffer->size(); -        if (bufferRemaining == 0 && getSizeErr != OK) { -            size_t bufferIncrement = buffer->size() / 2; -            if (bufferIncrement < 32768) { -                bufferIncrement = 32768; -            } -            bufferRemaining = bufferIncrement; - -            ALOGV("increasing download buffer to %zu bytes", -                 buffer->size() + bufferRemaining); - -            sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining); -            memcpy(copy->data(), buffer->data(), buffer->size()); -            copy->setRange(0, buffer->size()); - -            buffer = copy; -        } -        size_t maxBytesToRead = bufferRemaining; -        if (range_length >= 0) { -            int64_t bytesLeftInRange = range_length - buffer->size(); -            if (bytesLeftInRange < (int64_t)maxBytesToRead) { -                maxBytesToRead = bytesLeftInRange; - -                if (bytesLeftInRange == 0) { -                    break; -                } -            } -        } - -        // The DataSource is responsible for informing us of error (n < 0) or eof (n == 0) -        // to help us break out of the loop. -        ssize_t n = (*source)->readAt( -                buffer->size(), buffer->data() + buffer->size(), -                maxBytesToRead); - -        if (n < 0) { -            return n; -        } - -        if (n == 0) { -            break; -        } +    return NULL; +} -        buffer->setRange(0, buffer->size() + (size_t)n); -        bytesRead += n; +bool LiveSession::resumeFetcher( +        const AString &uri, uint32_t streamMask, int64_t timeUs, bool newUri) { +    ssize_t index = mFetcherInfos.indexOfKey(uri); +    if (index < 0) { +        ALOGE("did not find fetcher for uri: %s", uri.c_str()); +        return false;      } -    *out = buffer; -    if (actualUrl != NULL) { -        *actualUrl = (*source)->getUri(); -        if (actualUrl->isEmpty()) { -            *actualUrl = url; +    bool resume = false; +    sp<AnotherPacketSource> sources[kNumSources]; +    for (size_t i = 0; i < kMaxStreams; ++i) { +        if ((streamMask & indexToType(i)) && UriIsSameAsIndex(uri, i, newUri)) { +            resume = true; +            sources[i] = getPacketSourceForStreamIndex(i, newUri);          }      } -    return bytesRead; -} - -sp<M3UParser> LiveSession::fetchPlaylist( -        const char *url, uint8_t *curPlaylistHash, bool *unchanged) { -    ALOGV("fetchPlaylist '%s'", url); - -    *unchanged = false; - -    sp<ABuffer> buffer; -    String8 actualUrl; -    ssize_t  err = fetchFile(url, &buffer, 0, -1, 0, NULL, &actualUrl); - -    if (err <= 0) { -        return NULL; -    } - -    // MD5 functionality is not available on the simulator, treat all -    // playlists as changed. - -#if defined(HAVE_ANDROID_OS) -    uint8_t hash[16]; - -    MD5_CTX m; -    MD5_Init(&m); -    MD5_Update(&m, buffer->data(), buffer->size()); +    if (resume) { +        sp<PlaylistFetcher> &fetcher = mFetcherInfos.editValueAt(index).mFetcher; +        SeekMode seekMode = newUri ? kSeekModeNextSample : kSeekModeExactPosition; -    MD5_Final(hash, &m); +        ALOGV("resuming fetcher-%d, timeUs=%lld, seekMode=%d", +                fetcher->getFetcherID(), (long long)timeUs, seekMode); -    if (curPlaylistHash != NULL && !memcmp(hash, curPlaylistHash, 16)) { -        // playlist unchanged -        *unchanged = true; - -        return NULL; -    } - -    if (curPlaylistHash != NULL) { -        memcpy(curPlaylistHash, hash, sizeof(hash)); +        fetcher->startAsync( +                sources[kAudioIndex], +                sources[kVideoIndex], +                sources[kSubtitleIndex], +                getMetadataSource(sources, streamMask, newUri), +                timeUs, -1, -1, seekMode);      } -#endif -    sp<M3UParser> playlist = -        new M3UParser(actualUrl.string(), buffer->data(), buffer->size()); +    return resume; +} -    if (playlist->initCheck() != OK) { -        ALOGE("failed to parse .m3u8 playlist"); +float LiveSession::getAbortThreshold( +        ssize_t currentBWIndex, ssize_t targetBWIndex) const { +    float abortThreshold = -1.0f; +    if (currentBWIndex > 0 && targetBWIndex < currentBWIndex) { +        /* +           If we're switching down, we need to decide whether to + +           1) finish last segment of high-bandwidth variant, or +           2) abort last segment of high-bandwidth variant, and fetch an +              overlapping portion from low-bandwidth variant. + +           Here we try to maximize the amount of buffer left when the +           switch point is met. Given the following parameters: + +           B: our current buffering level in seconds +           T: target duration in seconds +           X: sample duration in seconds remain to fetch in last segment +           bw0: bandwidth of old variant (as specified in playlist) +           bw1: bandwidth of new variant (as specified in playlist) +           bw: measured bandwidth available + +           If we choose 1), when switch happens at the end of current +           segment, our buffering will be +                  B + X - X * bw0 / bw + +           If we choose 2), when switch happens where we aborted current +           segment, our buffering will be +                  B - (T - X) * bw1 / bw + +           We should only choose 1) if +                  X/T < bw1 / (bw1 + bw0 - bw) +        */ + +        // abort old bandwidth immediately if bandwidth is fluctuating a lot. +        // our estimate could be far off, and fetching old bandwidth could +        // take too long. +        if (!mLastBandwidthStable) { +            return 0.0f; +        } -        return NULL; +        // Taking the measured current bandwidth at 50% face value only, +        // as our bandwidth estimation is a lagging indicator. Being +        // conservative on this, we prefer switching to lower bandwidth +        // unless we're really confident finishing up the last segment +        // of higher bandwidth will be fast. +        CHECK(mLastBandwidthBps >= 0); +        abortThreshold = +                (float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth +             / ((float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth +              + (float)mBandwidthItems.itemAt(currentBWIndex).mBandwidth +              - (float)mLastBandwidthBps * 0.5f); +        if (abortThreshold < 0.0f) { +            abortThreshold = -1.0f; // do not abort +        } +        ALOGV("Switching Down: bps %ld => %ld, measured %d, abort ratio %.2f", +                mBandwidthItems.itemAt(currentBWIndex).mBandwidth, +                mBandwidthItems.itemAt(targetBWIndex).mBandwidth, +                mLastBandwidthBps, +                abortThreshold);      } +    return abortThreshold; +} -    return playlist; +void LiveSession::addBandwidthMeasurement(size_t numBytes, int64_t delayUs) { +    mBandwidthEstimator->addBandwidthMeasurement(numBytes, delayUs);  } -#if 0 -static double uniformRand() { -    return (double)rand() / RAND_MAX; +ssize_t LiveSession::getLowestValidBandwidthIndex() const { +    for (size_t index = 0; index < mBandwidthItems.size(); index++) { +        if (isBandwidthValid(mBandwidthItems[index])) { +            return index; +        } +    } +    // if playlists are all blacklisted, return 0 and hope it's alive +    return 0;  } -#endif -size_t LiveSession::getBandwidthIndex() { -    if (mBandwidthItems.size() == 0) { +size_t LiveSession::getBandwidthIndex(int32_t bandwidthBps) { +    if (mBandwidthItems.size() < 2) { +        // shouldn't be here if we only have 1 bandwidth, check +        // logic to get rid of redundant bandwidth polling +        ALOGW("getBandwidthIndex() called for single bandwidth playlist!");          return 0;      } @@ -1014,15 +1347,6 @@ size_t LiveSession::getBandwidthIndex() {      }      if (index < 0) { -        int32_t bandwidthBps; -        if (mHTTPDataSource != NULL -                && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) { -            ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f); -        } else { -            ALOGV("no bandwidth estimate."); -            return 0;  // Pick the lowest bandwidth stream by default. -        } -          char value[PROPERTY_VALUE_MAX];          if (property_get("media.httplive.max-bw", value, NULL)) {              char *end; @@ -1035,20 +1359,18 @@ size_t LiveSession::getBandwidthIndex() {              }          } -        // Pick the highest bandwidth stream below or equal to estimated bandwidth. +        // Pick the highest bandwidth stream that's not currently blacklisted +        // below or equal to estimated bandwidth.          index = mBandwidthItems.size() - 1; -        while (index > 0) { -            // consider only 80% of the available bandwidth, but if we are switching up, -            // be even more conservative (70%) to avoid overestimating and immediately -            // switching back. -            size_t adjustedBandwidthBps = bandwidthBps; -            if (index > mCurBandwidthIndex) { -                adjustedBandwidthBps = adjustedBandwidthBps * 7 / 10; -            } else { -                adjustedBandwidthBps = adjustedBandwidthBps * 8 / 10; -            } -            if (mBandwidthItems.itemAt(index).mBandwidth <= adjustedBandwidthBps) { +        ssize_t lowestBandwidth = getLowestValidBandwidthIndex(); +        while (index > lowestBandwidth) { +            // be conservative (70%) to avoid overestimating and immediately +            // switching down again. +            size_t adjustedBandwidthBps = bandwidthBps * 7 / 10; +            const BandwidthItem &item = mBandwidthItems[index]; +            if (item.mBandwidth <= adjustedBandwidthBps +                    && isBandwidthValid(item)) {                  break;              }              --index; @@ -1107,34 +1429,20 @@ size_t LiveSession::getBandwidthIndex() {      return index;  } -int64_t LiveSession::latestMediaSegmentStartTimeUs() { -    sp<AMessage> audioMeta = mPacketSources.valueFor(STREAMTYPE_AUDIO)->getLatestDequeuedMeta(); -    int64_t minSegmentStartTimeUs = -1, videoSegmentStartTimeUs = -1; -    if (audioMeta != NULL) { -        audioMeta->findInt64("segmentStartTimeUs", &minSegmentStartTimeUs); -    } +HLSTime LiveSession::latestMediaSegmentStartTime() const { +    HLSTime audioTime(mPacketSources.valueFor( +                    STREAMTYPE_AUDIO)->getLatestDequeuedMeta()); -    sp<AMessage> videoMeta = mPacketSources.valueFor(STREAMTYPE_VIDEO)->getLatestDequeuedMeta(); -    if (videoMeta != NULL -            && videoMeta->findInt64("segmentStartTimeUs", &videoSegmentStartTimeUs)) { -        if (minSegmentStartTimeUs < 0 || videoSegmentStartTimeUs < minSegmentStartTimeUs) { -            minSegmentStartTimeUs = videoSegmentStartTimeUs; -        } +    HLSTime videoTime(mPacketSources.valueFor( +                    STREAMTYPE_VIDEO)->getLatestDequeuedMeta()); -    } -    return minSegmentStartTimeUs; +    return audioTime < videoTime ? videoTime : audioTime;  } -status_t LiveSession::onSeek(const sp<AMessage> &msg) { +void LiveSession::onSeek(const sp<AMessage> &msg) {      int64_t timeUs;      CHECK(msg->findInt64("timeUs", &timeUs)); - -    if (!mReconfigurationInProgress) { -        changeConfiguration(timeUs, mCurBandwidthIndex); -        return OK; -    } else { -        return -EWOULDBLOCK; -    } +    changeConfiguration(timeUs);  }  status_t LiveSession::getDuration(int64_t *durationUs) const { @@ -1165,7 +1473,7 @@ size_t LiveSession::getTrackCount() const {      if (mPlaylist == NULL) {          return 0;      } else { -        return mPlaylist->getTrackCount(); +        return mPlaylist->getTrackCount() + (mHasMetadata ? 1 : 0);      }  } @@ -1173,6 +1481,13 @@ sp<AMessage> LiveSession::getTrackInfo(size_t trackIndex) const {      if (mPlaylist == NULL) {          return NULL;      } else { +        if (trackIndex == mPlaylist->getTrackCount() && mHasMetadata) { +            sp<AMessage> format = new AMessage(); +            format->setInt32("type", MEDIA_TRACK_TYPE_METADATA); +            format->setString("language", "und"); +            format->setString("mime", MEDIA_MIMETYPE_DATA_TIMED_ID3); +            return format; +        }          return mPlaylist->getTrackInfo(trackIndex);      }  } @@ -1182,11 +1497,13 @@ status_t LiveSession::selectTrack(size_t index, bool select) {          return INVALID_OPERATION;      } +    ALOGV("selectTrack: index=%zu, select=%d, mSubtitleGen=%d++", +            index, select, mSubtitleGeneration); +      ++mSubtitleGeneration;      status_t err = mPlaylist->selectTrack(index, select);      if (err == OK) { -        sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, id()); -        msg->setInt32("bandwidthIndex", mCurBandwidthIndex); +        sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, this);          msg->setInt32("pickTrack", select);          msg->post();      } @@ -1201,35 +1518,25 @@ ssize_t LiveSession::getSelectedTrack(media_track_type type) const {      }  } -bool LiveSession::canSwitchUp() { -    // Allow upwards bandwidth switch when a stream has buffered at least 10 seconds. -    status_t err = OK; -    for (size_t i = 0; i < mPacketSources.size(); ++i) { -        sp<AnotherPacketSource> source = mPacketSources.valueAt(i); -        int64_t dur = source->getBufferedDurationUs(&err); -        if (err == OK && dur > 10000000) { -            return true; -        } -    } -    return false; -} -  void LiveSession::changeConfiguration( -        int64_t timeUs, size_t bandwidthIndex, bool pickTrack) { -    // Protect mPacketSources from a swapPacketSource race condition through reconfiguration. -    // (changeConfiguration, onChangeConfiguration2, onChangeConfiguration3). +        int64_t timeUs, ssize_t bandwidthIndex, bool pickTrack) { +    ALOGV("changeConfiguration: timeUs=%lld us, bwIndex=%zd, pickTrack=%d", +          (long long)timeUs, bandwidthIndex, pickTrack); +      cancelBandwidthSwitch();      CHECK(!mReconfigurationInProgress);      mReconfigurationInProgress = true; - -    mCurBandwidthIndex = bandwidthIndex; - -    ALOGV("changeConfiguration => timeUs:%" PRId64 " us, bwIndex:%zu, pickTrack:%d", -          timeUs, bandwidthIndex, pickTrack); - -    CHECK_LT(bandwidthIndex, mBandwidthItems.size()); -    const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex); +    if (bandwidthIndex >= 0) { +        mOrigBandwidthIndex = mCurBandwidthIndex; +        mCurBandwidthIndex = bandwidthIndex; +        if (mOrigBandwidthIndex != mCurBandwidthIndex) { +            ALOGI("#### Starting Bandwidth Switch: %zd => %zd", +                    mOrigBandwidthIndex, mCurBandwidthIndex); +        } +    } +    CHECK_LT(mCurBandwidthIndex, mBandwidthItems.size()); +    const BandwidthItem &item = mBandwidthItems.itemAt(mCurBandwidthIndex);      uint32_t streamMask = 0; // streams that should be fetched by the new fetcher      uint32_t resumeMask = 0; // streams that should be fetched by the original fetcher @@ -1244,38 +1551,60 @@ void LiveSession::changeConfiguration(      // Step 1, stop and discard fetchers that are no longer needed.      // Pause those that we'll reuse.      for (size_t i = 0; i < mFetcherInfos.size(); ++i) { -        const AString &uri = mFetcherInfos.keyAt(i); - -        bool discardFetcher = true; +        // skip fetchers that are marked mToBeRemoved, +        // these are done and can't be reused +        if (mFetcherInfos[i].mToBeRemoved) { +            continue; +        } -        // If we're seeking all current fetchers are discarded. -        if (timeUs < 0ll) { -            // delay fetcher removal if not picking tracks -            discardFetcher = pickTrack; +        const AString &uri = mFetcherInfos.keyAt(i); +        sp<PlaylistFetcher> &fetcher = mFetcherInfos.editValueAt(i).mFetcher; -            for (size_t j = 0; j < kMaxStreams; ++j) { -                StreamType type = indexToType(j); -                if ((streamMask & type) && uri == URIs[j]) { -                    resumeMask |= type; -                    streamMask &= ~type; -                    discardFetcher = false; -                } +        bool discardFetcher = true, delayRemoval = false; +        for (size_t j = 0; j < kMaxStreams; ++j) { +            StreamType type = indexToType(j); +            if ((streamMask & type) && uri == URIs[j]) { +                resumeMask |= type; +                streamMask &= ~type; +                discardFetcher = false;              }          } +        // Delay fetcher removal if not picking tracks, AND old fetcher +        // has stream mask that overlaps new variant. (Okay to discard +        // old fetcher now, if completely no overlap.) +        if (discardFetcher && timeUs < 0ll && !pickTrack +                && (fetcher->getStreamTypeMask() & streamMask)) { +            discardFetcher = false; +            delayRemoval = true; +        }          if (discardFetcher) { -            mFetcherInfos.valueAt(i).mFetcher->stopAsync(); +            ALOGV("discarding fetcher-%d", fetcher->getFetcherID()); +            fetcher->stopAsync();          } else { -            mFetcherInfos.valueAt(i).mFetcher->pauseAsync(); +            float threshold = 0.0f; // default to pause after current block (47Kbytes) +            bool disconnect = false; +            if (timeUs >= 0ll) { +                // seeking, no need to finish fetching +                disconnect = true; +            } else if (delayRemoval) { +                // adapting, abort if remaining of current segment is over threshold +                threshold = getAbortThreshold( +                        mOrigBandwidthIndex, mCurBandwidthIndex); +            } + +            ALOGV("pausing fetcher-%d, threshold=%.2f", +                    fetcher->getFetcherID(), threshold); +            fetcher->pauseAsync(threshold, disconnect);          }      }      sp<AMessage> msg;      if (timeUs < 0ll) {          // skip onChangeConfiguration2 (decoder destruction) if not seeking. -        msg = new AMessage(kWhatChangeConfiguration3, id()); +        msg = new AMessage(kWhatChangeConfiguration3, this);      } else { -        msg = new AMessage(kWhatChangeConfiguration2, id()); +        msg = new AMessage(kWhatChangeConfiguration2, this);      }      msg->setInt32("streamMask", streamMask);      msg->setInt32("resumeMask", resumeMask); @@ -1296,40 +1625,74 @@ void LiveSession::changeConfiguration(      if (mContinuationCounter == 0) {          msg->post(); - -        if (mSeekReplyID != 0) { -            CHECK(mSeekReply != NULL); -            mSeekReply->setInt32("err", OK); -            mSeekReply->postReply(mSeekReplyID); -            mSeekReplyID = 0; -            mSeekReply.clear(); -        }      }  }  void LiveSession::onChangeConfiguration(const sp<AMessage> &msg) { +    ALOGV("onChangeConfiguration"); +      if (!mReconfigurationInProgress) { -        int32_t pickTrack = 0, bandwidthIndex = mCurBandwidthIndex; +        int32_t pickTrack = 0;          msg->findInt32("pickTrack", &pickTrack); -        msg->findInt32("bandwidthIndex", &bandwidthIndex); -        changeConfiguration(-1ll /* timeUs */, bandwidthIndex, pickTrack); +        changeConfiguration(-1ll /* timeUs */, -1, pickTrack);      } else {          msg->post(1000000ll); // retry in 1 sec      }  }  void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) { +    ALOGV("onChangeConfiguration2"); +      mContinuation.clear();      // All fetchers are either suspended or have been removed now. +    // If we're seeking, clear all packet sources before we report +    // seek complete, to prevent decoder from pulling stale data. +    int64_t timeUs; +    CHECK(msg->findInt64("timeUs", &timeUs)); + +    if (timeUs >= 0) { +        mLastSeekTimeUs = timeUs; +        mLastDequeuedTimeUs = timeUs; + +        for (size_t i = 0; i < mPacketSources.size(); i++) { +            sp<AnotherPacketSource> packetSource = mPacketSources.editValueAt(i); +            sp<MetaData> format = packetSource->getFormat(); +            packetSource->clear(); +            // Set a tentative format here such that HTTPLiveSource will always have +            // a format available when NuPlayer queries. Without an available video +            // format when setting a surface NuPlayer might disable video decoding +            // altogether. The tentative format will be overwritten by the +            // authoritative (and possibly same) format once content from the new +            // position is dequeued. +            packetSource->setFormat(format); +        } + +        for (size_t i = 0; i < kMaxStreams; ++i) { +            mStreams[i].reset(); +        } + +        mDiscontinuityOffsetTimesUs.clear(); +        mDiscontinuityAbsStartTimesUs.clear(); + +        if (mSeekReplyID != NULL) { +            CHECK(mSeekReply != NULL); +            mSeekReply->setInt32("err", OK); +            mSeekReply->postReply(mSeekReplyID); +            mSeekReplyID.clear(); +            mSeekReply.clear(); +        } + +        // restart buffer polling after seek becauese previous +        // buffering position is no longer valid. +        restartPollBuffering(); +    } +      uint32_t streamMask, resumeMask;      CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask));      CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask)); -    // currently onChangeConfiguration2 is only called for seeking; -    // remove the following CHECK if using it else where. -    CHECK_EQ(resumeMask, 0);      streamMask |= resumeMask;      AString URIs[kMaxStreams]; @@ -1341,17 +1704,27 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {          }      } -    // Determine which decoders to shutdown on the player side, -    // a decoder has to be shutdown if either -    // 1) its streamtype was active before but now longer isn't. -    // or -    // 2) its streamtype was already active and still is but the URI -    //    has changed.      uint32_t changedMask = 0;      for (size_t i = 0; i < kMaxStreams && i != kSubtitleIndex; ++i) { -        if (((mStreamMask & streamMask & indexToType(i)) -                && !(URIs[i] == mStreams[i].mUri)) -                || (mStreamMask & ~streamMask & indexToType(i))) { +        // stream URI could change even if onChangeConfiguration2 is only +        // used for seek. Seek could happen during a bw switch, in this +        // case bw switch will be cancelled, but the seekTo position will +        // fetch from the new URI. +        if ((mStreamMask & streamMask & indexToType(i)) +                && !mStreams[i].mUri.empty() +                && !(URIs[i] == mStreams[i].mUri)) { +            ALOGV("stream %zu changed: oldURI %s, newURI %s", i, +                    mStreams[i].mUri.c_str(), URIs[i].c_str()); +            sp<AnotherPacketSource> source = mPacketSources.valueFor(indexToType(i)); +            if (source->getLatestDequeuedMeta() != NULL) { +                source->queueDiscontinuity( +                        ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true); +            } +        } +        // Determine which decoders to shutdown on the player side, +        // a decoder has to be shutdown if its streamtype was active +        // before but now longer isn't. +        if ((mStreamMask & ~streamMask & indexToType(i))) {              changedMask |= indexToType(i);          }      } @@ -1372,7 +1745,7 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {      notify->setInt32("changedMask", changedMask);      msg->setWhat(kWhatChangeConfiguration3); -    msg->setTarget(id()); +    msg->setTarget(this);      notify->setMessage("reply", msg);      notify->post(); @@ -1387,6 +1760,8 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {      CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask));      CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask)); +    mNewStreamMask = streamMask | resumeMask; +      int64_t timeUs;      int32_t pickTrack;      bool switching = false; @@ -1395,13 +1770,26 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {      if (timeUs < 0ll) {          if (!pickTrack) { -            switching = true; +            // mSwapMask contains streams that are in both old and new variant, +            // (in mNewStreamMask & mStreamMask) but with different URIs +            // (not in resumeMask). +            // For example, old variant has video and audio in two separate +            // URIs, and new variant has only audio with unchanged URI. mSwapMask +            // should be 0 as there is nothing to swap. We only need to stop video, +            // and resume audio. +            mSwapMask =  mNewStreamMask & mStreamMask & ~resumeMask; +            switching = (mSwapMask != 0);          }          mRealTimeBaseUs = ALooper::GetNowUs() - mLastDequeuedTimeUs;      } else {          mRealTimeBaseUs = ALooper::GetNowUs() - timeUs;      } +    ALOGV("onChangeConfiguration3: timeUs=%lld, switching=%d, pickTrack=%d, " +            "mStreamMask=0x%x, mNewStreamMask=0x%x, mSwapMask=0x%x", +            (long long)timeUs, switching, pickTrack, +            mStreamMask, mNewStreamMask, mSwapMask); +      for (size_t i = 0; i < kMaxStreams; ++i) {          if (streamMask & indexToType(i)) {              if (switching) { @@ -1412,47 +1800,21 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {          }      } -    mNewStreamMask = streamMask | resumeMask; -    if (switching) { -        mSwapMask = mStreamMask & ~resumeMask; -    } -      // Of all existing fetchers:      // * Resume fetchers that are still needed and assign them original packet sources.      // * Mark otherwise unneeded fetchers for removal.      ALOGV("resuming fetchers for mask 0x%08x", resumeMask);      for (size_t i = 0; i < mFetcherInfos.size(); ++i) {          const AString &uri = mFetcherInfos.keyAt(i); +        if (!resumeFetcher(uri, resumeMask, timeUs)) { +            ALOGV("marking fetcher-%d to be removed", +                    mFetcherInfos[i].mFetcher->getFetcherID()); -        sp<AnotherPacketSource> sources[kMaxStreams]; -        for (size_t j = 0; j < kMaxStreams; ++j) { -            if ((resumeMask & indexToType(j)) && uri == mStreams[j].mUri) { -                sources[j] = mPacketSources.valueFor(indexToType(j)); - -                if (j != kSubtitleIndex) { -                    ALOGV("queueing dummy discontinuity for stream type %d", indexToType(j)); -                    sp<AnotherPacketSource> discontinuityQueue; -                    discontinuityQueue = mDiscontinuities.valueFor(indexToType(j)); -                    discontinuityQueue->queueDiscontinuity( -                            ATSParser::DISCONTINUITY_NONE, -                            NULL, -                            true); -                } -            } -        } - -        FetcherInfo &info = mFetcherInfos.editValueAt(i); -        if (sources[kAudioIndex] != NULL || sources[kVideoIndex] != NULL -                || sources[kSubtitleIndex] != NULL) { -            info.mFetcher->startAsync( -                    sources[kAudioIndex], sources[kVideoIndex], sources[kSubtitleIndex]); -        } else { -            info.mToBeRemoved = true; +            mFetcherInfos.editValueAt(i).mToBeRemoved = true;          }      }      // streamMask now only contains the types that need a new fetcher created. -      if (streamMask != 0) {          ALOGV("creating new fetchers for mask 0x%08x", streamMask);      } @@ -1470,13 +1832,12 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {          sp<PlaylistFetcher> fetcher = addFetcher(uri.c_str());          CHECK(fetcher != NULL); -        int64_t startTimeUs = -1; -        int64_t segmentStartTimeUs = -1ll; -        int32_t discontinuitySeq = -1; -        sp<AnotherPacketSource> sources[kMaxStreams]; +        HLSTime startTime; +        SeekMode seekMode = kSeekModeExactPosition; +        sp<AnotherPacketSource> sources[kNumSources]; -        if (i == kSubtitleIndex) { -            segmentStartTimeUs = latestMediaSegmentStartTimeUs(); +        if (i == kSubtitleIndex || (!pickTrack && !switching)) { +            startTime = latestMediaSegmentStartTime();          }          // TRICKY: looping from i as earlier streams are already removed from streamMask @@ -1486,63 +1847,50 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {                  sources[j] = mPacketSources.valueFor(indexToType(j));                  if (timeUs >= 0) { -                    sources[j]->clear(); -                    startTimeUs = timeUs; - -                    sp<AnotherPacketSource> discontinuityQueue; -                    sp<AMessage> extra = new AMessage; -                    extra->setInt64("timeUs", timeUs); -                    discontinuityQueue = mDiscontinuities.valueFor(indexToType(j)); -                    discontinuityQueue->queueDiscontinuity( -                            ATSParser::DISCONTINUITY_TIME, extra, true); +                    startTime.mTimeUs = timeUs;                  } else {                      int32_t type;                      sp<AMessage> meta; -                    if (pickTrack) { -                        // selecting +                    if (!switching) { +                        // selecting, or adapting but no swap required                          meta = sources[j]->getLatestDequeuedMeta();                      } else { -                        // adapting +                        // adapting and swap required                          meta = sources[j]->getLatestEnqueuedMeta(); -                    } - -                    if (meta != NULL && !meta->findInt32("discontinuity", &type)) { -                        int64_t tmpUs; -                        int64_t tmpSegmentUs; - -                        CHECK(meta->findInt64("timeUs", &tmpUs)); -                        CHECK(meta->findInt64("segmentStartTimeUs", &tmpSegmentUs)); -                        if (startTimeUs < 0 || tmpSegmentUs < segmentStartTimeUs) { -                            startTimeUs = tmpUs; -                            segmentStartTimeUs = tmpSegmentUs; -                        } else if (tmpSegmentUs == segmentStartTimeUs && tmpUs < startTimeUs) { -                            startTimeUs = tmpUs; +                        if (meta != NULL && mCurBandwidthIndex > mOrigBandwidthIndex) { +                            // switching up +                            meta = sources[j]->getMetaAfterLastDequeued(mUpSwitchMargin);                          } +                    } -                        int32_t seq; -                        CHECK(meta->findInt32("discontinuitySeq", &seq)); -                        if (discontinuitySeq < 0 || seq < discontinuitySeq) { -                            discontinuitySeq = seq; +                    if ((j == kAudioIndex || j == kVideoIndex) +                            && meta != NULL && !meta->findInt32("discontinuity", &type)) { +                        HLSTime tmpTime(meta); +                        if (startTime < tmpTime) { +                            startTime = tmpTime;                          }                      } -                    if (pickTrack) { -                        // selecting track, queue discontinuities before content +                    if (!switching) { +                        // selecting, or adapting but no swap required                          sources[j]->clear();                          if (j == kSubtitleIndex) {                              break;                          } -                        sp<AnotherPacketSource> discontinuityQueue; -                        discontinuityQueue = mDiscontinuities.valueFor(indexToType(j)); -                        discontinuityQueue->queueDiscontinuity( -                                ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true); + +                        ALOGV("stream[%zu]: queue format change", j); +                        sources[j]->queueDiscontinuity( +                                ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, true);                      } else { -                        // adapting, queue discontinuities after resume +                        // switching, queue discontinuities after resume                          sources[j] = mPacketSources2.valueFor(indexToType(j));                          sources[j]->clear(); -                        uint32_t extraStreams = mNewStreamMask & (~mStreamMask); -                        if (extraStreams & indexToType(j)) { -                            sources[j]->queueAccessUnit(createFormatChangeBuffer(/*swap*/ false)); +                        // the new fetcher might be providing streams that used to be +                        // provided by two different fetchers,  if one of the fetcher +                        // paused in the middle while the other somehow paused in next +                        // seg, we have to start from next seg. +                        if (seekMode < mStreams[j].mSeekMode) { +                            seekMode = mStreams[j].mSeekMode;                          }                      }                  } @@ -1551,54 +1899,104 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {              }          } +        ALOGV("[fetcher-%d] startAsync: startTimeUs %lld mLastSeekTimeUs %lld " +                "segmentStartTimeUs %lld seekMode %d", +                fetcher->getFetcherID(), +                (long long)startTime.mTimeUs, +                (long long)mLastSeekTimeUs, +                (long long)startTime.getSegmentTimeUs(), +                seekMode); + +        // Set the target segment start time to the middle point of the +        // segment where the last sample was. +        // This gives a better guess if segments of the two variants are not +        // perfectly aligned. (If the corresponding segment in new variant +        // starts slightly later than that in the old variant, we still want +        // to pick that segment, not the one before)          fetcher->startAsync(                  sources[kAudioIndex],                  sources[kVideoIndex],                  sources[kSubtitleIndex], -                startTimeUs < 0 ? mLastSeekTimeUs : startTimeUs, -                segmentStartTimeUs, -                discontinuitySeq, -                switching); +                getMetadataSource(sources, mNewStreamMask, switching), +                startTime.mTimeUs < 0 ? mLastSeekTimeUs : startTime.mTimeUs, +                startTime.getSegmentTimeUs(), +                startTime.mSeq, +                seekMode);      }      // All fetchers have now been started, the configuration change      // has completed. -    cancelCheckBandwidthEvent(); -    scheduleCheckBandwidthEvent(); - -    ALOGV("XXX configuration change completed.");      mReconfigurationInProgress = false;      if (switching) {          mSwitchInProgress = true;      } else {          mStreamMask = mNewStreamMask; +        if (mOrigBandwidthIndex != mCurBandwidthIndex) { +            ALOGV("#### Finished Bandwidth Switch Early: %zd => %zd", +                    mOrigBandwidthIndex, mCurBandwidthIndex); +            mOrigBandwidthIndex = mCurBandwidthIndex; +        }      } -    if (mDisconnectReplyID != 0) { +    ALOGV("onChangeConfiguration3: mSwitchInProgress %d, mStreamMask 0x%x", +            mSwitchInProgress, mStreamMask); + +    if (mDisconnectReplyID != NULL) {          finishDisconnect();      }  } -void LiveSession::onSwapped(const sp<AMessage> &msg) { -    int32_t switchGeneration; -    CHECK(msg->findInt32("switchGeneration", &switchGeneration)); -    if (switchGeneration != mSwitchGeneration) { +void LiveSession::swapPacketSource(StreamType stream) { +    ALOGV("[%s] swapPacketSource", getNameForStream(stream)); + +    // transfer packets from source2 to source +    sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream); +    sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream); + +    // queue discontinuity in mPacketSource +    aps->queueDiscontinuity(ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, false); + +    // queue packets in mPacketSource2 to mPacketSource +    status_t finalResult = OK; +    sp<ABuffer> accessUnit; +    while (aps2->hasBufferAvailable(&finalResult) && finalResult == OK && +          OK == aps2->dequeueAccessUnit(&accessUnit)) { +        aps->queueAccessUnit(accessUnit); +    } +    aps2->clear(); +} + +void LiveSession::tryToFinishBandwidthSwitch(const AString &oldUri) { +    if (!mSwitchInProgress) { +        return; +    } + +    ssize_t index = mFetcherInfos.indexOfKey(oldUri); +    if (index < 0 || !mFetcherInfos[index].mToBeRemoved) {          return;      } -    int32_t stream; -    CHECK(msg->findInt32("stream", &stream)); +    // Swap packet source of streams provided by old variant +    for (size_t idx = 0; idx < kMaxStreams; idx++) { +        StreamType stream = indexToType(idx); +        if ((mSwapMask & stream) && (oldUri == mStreams[idx].mUri)) { +            swapPacketSource(stream); -    ssize_t idx = typeToIndex(stream); -    CHECK(idx >= 0); -    if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) { -        ALOGW("swapping stream type %d %s to empty stream", stream, mStreams[idx].mUri.c_str()); +            if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) { +                ALOGW("swapping stream type %d %s to empty stream", +                        stream, mStreams[idx].mUri.c_str()); +            } +            mStreams[idx].mUri = mStreams[idx].mNewUri; +            mStreams[idx].mNewUri.clear(); + +            mSwapMask &= ~stream; +        }      } -    mStreams[idx].mUri = mStreams[idx].mNewUri; -    mStreams[idx].mNewUri.clear(); -    mSwapMask &= ~stream; +    mFetcherInfos.editValueAt(index).mFetcher->stopAsync(false /* clear */); + +    ALOGV("tryToFinishBandwidthSwitch: mSwapMask=0x%x", mSwapMask);      if (mSwapMask != 0) {          return;      } @@ -1606,155 +2004,370 @@ void LiveSession::onSwapped(const sp<AMessage> &msg) {      // Check if new variant contains extra streams.      uint32_t extraStreams = mNewStreamMask & (~mStreamMask);      while (extraStreams) { -        StreamType extraStream = (StreamType) (extraStreams & ~(extraStreams - 1)); -        swapPacketSource(extraStream); -        extraStreams &= ~extraStream; +        StreamType stream = (StreamType) (extraStreams & ~(extraStreams - 1)); +        extraStreams &= ~stream; + +        swapPacketSource(stream); -        idx = typeToIndex(extraStream); +        ssize_t idx = typeToIndex(stream);          CHECK(idx >= 0);          if (mStreams[idx].mNewUri.empty()) {              ALOGW("swapping extra stream type %d %s to empty stream", -                    extraStream, mStreams[idx].mUri.c_str()); +                    stream, mStreams[idx].mUri.c_str());          }          mStreams[idx].mUri = mStreams[idx].mNewUri;          mStreams[idx].mNewUri.clear();      } -    tryToFinishBandwidthSwitch(); +    // Restart new fetcher (it was paused after the first 47k block) +    // and let it fetch into mPacketSources (not mPacketSources2) +    for (size_t i = 0; i < mFetcherInfos.size(); ++i) { +        FetcherInfo &info = mFetcherInfos.editValueAt(i); +        if (info.mToBeResumed) { +            resumeFetcher(mFetcherInfos.keyAt(i), mNewStreamMask); +            info.mToBeResumed = false; +        } +    } + +    ALOGI("#### Finished Bandwidth Switch: %zd => %zd", +            mOrigBandwidthIndex, mCurBandwidthIndex); + +    mStreamMask = mNewStreamMask; +    mSwitchInProgress = false; +    mOrigBandwidthIndex = mCurBandwidthIndex; + +    restartPollBuffering();  } -void LiveSession::onCheckSwitchDown() { -    if (mSwitchDownMonitor == NULL) { -        return; -    } +void LiveSession::schedulePollBuffering() { +    sp<AMessage> msg = new AMessage(kWhatPollBuffering, this); +    msg->setInt32("generation", mPollBufferingGeneration); +    msg->post(1000000ll); +} -    if (mSwitchInProgress || mReconfigurationInProgress) { -        ALOGV("Switch/Reconfig in progress, defer switch down"); -        mSwitchDownMonitor->post(1000000ll); -        return; -    } +void LiveSession::cancelPollBuffering() { +    ++mPollBufferingGeneration; +    mPrevBufferPercentage = -1; +} -    for (size_t i = 0; i < kMaxStreams; ++i) { -        int32_t targetDuration; -        sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(indexToType(i)); -        sp<AMessage> meta = packetSource->getLatestDequeuedMeta(); +void LiveSession::restartPollBuffering() { +    cancelPollBuffering(); +    onPollBuffering(); +} -        if (meta != NULL && meta->findInt32("targetDuration", &targetDuration) ) { -            int64_t bufferedDurationUs = packetSource->getEstimatedDurationUs(); -            int64_t targetDurationUs = targetDuration * 1000000ll; +void LiveSession::onPollBuffering() { +    ALOGV("onPollBuffering: mSwitchInProgress %d, mReconfigurationInProgress %d, " +            "mInPreparationPhase %d, mCurBandwidthIndex %zd, mStreamMask 0x%x", +        mSwitchInProgress, mReconfigurationInProgress, +        mInPreparationPhase, mCurBandwidthIndex, mStreamMask); + +    bool underflow, ready, down, up; +    if (checkBuffering(underflow, ready, down, up)) { +        if (mInPreparationPhase) { +            // Allow down switch even if we're still preparing. +            // +            // Some streams have a high bandwidth index as default, +            // when bandwidth is low, it takes a long time to buffer +            // to ready mark, then it immediately pauses after start +            // as we have to do a down switch. It's better experience +            // to restart from a lower index, if we detect low bw. +            if (!switchBandwidthIfNeeded(false /* up */, down) && ready) { +                postPrepared(OK); +            } +        } -            if (bufferedDurationUs < targetDurationUs / 3) { -                (new AMessage(kWhatSwitchDown, id()))->post(); -                break; +        if (!mInPreparationPhase) { +            if (ready) { +                stopBufferingIfNecessary(); +            } else if (underflow) { +                startBufferingIfNecessary();              } +            switchBandwidthIfNeeded(up, down);          }      } -    mSwitchDownMonitor->post(1000000ll); +    schedulePollBuffering();  } -void LiveSession::onSwitchDown() { -    if (mReconfigurationInProgress || mSwitchInProgress || mCurBandwidthIndex == 0) { +void LiveSession::cancelBandwidthSwitch(bool resume) { +    ALOGV("cancelBandwidthSwitch: mSwitchGen(%d)++, orig %zd, cur %zd", +            mSwitchGeneration, mOrigBandwidthIndex, mCurBandwidthIndex); +    if (!mSwitchInProgress) {          return;      } -    ssize_t bandwidthIndex = getBandwidthIndex(); -    if (bandwidthIndex < mCurBandwidthIndex) { -        changeConfiguration(-1, bandwidthIndex, false); -        return; +    for (size_t i = 0; i < mFetcherInfos.size(); ++i) { +        FetcherInfo& info = mFetcherInfos.editValueAt(i); +        if (info.mToBeRemoved) { +            info.mToBeRemoved = false; +            if (resume) { +                resumeFetcher(mFetcherInfos.keyAt(i), mSwapMask); +            } +        }      } +    for (size_t i = 0; i < kMaxStreams; ++i) { +        AString newUri = mStreams[i].mNewUri; +        if (!newUri.empty()) { +            // clear all mNewUri matching this newUri +            for (size_t j = i; j < kMaxStreams; ++j) { +                if (mStreams[j].mNewUri == newUri) { +                    mStreams[j].mNewUri.clear(); +                } +            } +            ALOGV("stopping newUri = %s", newUri.c_str()); +            ssize_t index = mFetcherInfos.indexOfKey(newUri); +            if (index < 0) { +                ALOGE("did not find fetcher for newUri: %s", newUri.c_str()); +                continue; +            } +            FetcherInfo &info = mFetcherInfos.editValueAt(index); +            info.mToBeRemoved = true; +            info.mFetcher->stopAsync(); +        } +    } + +    ALOGI("#### Canceled Bandwidth Switch: %zd => %zd", +            mOrigBandwidthIndex, mCurBandwidthIndex); + +    mSwitchGeneration++; +    mSwitchInProgress = false; +    mCurBandwidthIndex = mOrigBandwidthIndex; +    mSwapMask = 0;  } -// Mark switch done when: -//   1. all old buffers are swapped out -void LiveSession::tryToFinishBandwidthSwitch() { -    if (!mSwitchInProgress) { -        return; +bool LiveSession::checkBuffering( +        bool &underflow, bool &ready, bool &down, bool &up) { +    underflow = ready = down = up = false; + +    if (mReconfigurationInProgress) { +        ALOGV("Switch/Reconfig in progress, defer buffer polling"); +        return false;      } -    bool needToRemoveFetchers = false; -    for (size_t i = 0; i < mFetcherInfos.size(); ++i) { -        if (mFetcherInfos.valueAt(i).mToBeRemoved) { -            needToRemoveFetchers = true; -            break; +    size_t activeCount, underflowCount, readyCount, downCount, upCount; +    activeCount = underflowCount = readyCount = downCount = upCount =0; +    int32_t minBufferPercent = -1; +    int64_t durationUs; +    if (getDuration(&durationUs) != OK) { +        durationUs = -1; +    } +    for (size_t i = 0; i < mPacketSources.size(); ++i) { +        // we don't check subtitles for buffering level +        if (!(mStreamMask & mPacketSources.keyAt(i) +                & (STREAMTYPE_AUDIO | STREAMTYPE_VIDEO))) { +            continue; +        } +        // ignore streams that never had any packet queued. +        // (it's possible that the variant only has audio or video) +        sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta(); +        if (meta == NULL) { +            continue; +        } + +        status_t finalResult; +        int64_t bufferedDurationUs = +                mPacketSources[i]->getBufferedDurationUs(&finalResult); +        ALOGV("[%s] buffered %lld us", +                getNameForStream(mPacketSources.keyAt(i)), +                (long long)bufferedDurationUs); +        if (durationUs >= 0) { +            int32_t percent; +            if (mPacketSources[i]->isFinished(0 /* duration */)) { +                percent = 100; +            } else { +                percent = (int32_t)(100.0 * +                        (mLastDequeuedTimeUs + bufferedDurationUs) / durationUs); +            } +            if (minBufferPercent < 0 || percent < minBufferPercent) { +                minBufferPercent = percent; +            } +        } + +        ++activeCount; +        int64_t readyMark = mInPreparationPhase ? kPrepareMarkUs : kReadyMarkUs; +        if (bufferedDurationUs > readyMark +                || mPacketSources[i]->isFinished(0)) { +            ++readyCount; +        } +        if (!mPacketSources[i]->isFinished(0)) { +            if (bufferedDurationUs < kUnderflowMarkUs) { +                ++underflowCount; +            } +            if (bufferedDurationUs > mUpSwitchMark) { +                ++upCount; +            } +            if (bufferedDurationUs < mDownSwitchMark) { +                ++downCount; +            }          }      } -    if (!needToRemoveFetchers && mSwapMask == 0) { -        ALOGI("mSwitchInProgress = false"); -        mStreamMask = mNewStreamMask; -        mSwitchInProgress = false; +    if (minBufferPercent >= 0) { +        notifyBufferingUpdate(minBufferPercent); +    } + +    if (activeCount > 0) { +        up        = (upCount == activeCount); +        down      = (downCount > 0); +        ready     = (readyCount == activeCount); +        underflow = (underflowCount > 0); +        return true;      } -} -void LiveSession::scheduleCheckBandwidthEvent() { -    sp<AMessage> msg = new AMessage(kWhatCheckBandwidth, id()); -    msg->setInt32("generation", mCheckBandwidthGeneration); -    msg->post(10000000ll); +    return false;  } -void LiveSession::cancelCheckBandwidthEvent() { -    ++mCheckBandwidthGeneration; +void LiveSession::startBufferingIfNecessary() { +    ALOGV("startBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d", +            mInPreparationPhase, mBuffering); +    if (!mBuffering) { +        mBuffering = true; + +        sp<AMessage> notify = mNotify->dup(); +        notify->setInt32("what", kWhatBufferingStart); +        notify->post(); +    }  } -void LiveSession::cancelBandwidthSwitch() { -    Mutex::Autolock lock(mSwapMutex); -    mSwitchGeneration++; -    mSwitchInProgress = false; -    mSwapMask = 0; +void LiveSession::stopBufferingIfNecessary() { +    ALOGV("stopBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d", +            mInPreparationPhase, mBuffering); -    for (size_t i = 0; i < mFetcherInfos.size(); ++i) { -        FetcherInfo& info = mFetcherInfos.editValueAt(i); -        if (info.mToBeRemoved) { -            info.mToBeRemoved = false; -        } +    if (mBuffering) { +        mBuffering = false; + +        sp<AMessage> notify = mNotify->dup(); +        notify->setInt32("what", kWhatBufferingEnd); +        notify->post();      } +} -    for (size_t i = 0; i < kMaxStreams; ++i) { -        if (!mStreams[i].mNewUri.empty()) { -            ssize_t j = mFetcherInfos.indexOfKey(mStreams[i].mNewUri); -            if (j < 0) { -                mStreams[i].mNewUri.clear(); -                continue; -            } +void LiveSession::notifyBufferingUpdate(int32_t percentage) { +    if (percentage < mPrevBufferPercentage) { +        percentage = mPrevBufferPercentage; +    } else if (percentage > 100) { +        percentage = 100; +    } -            const FetcherInfo &info = mFetcherInfos.valueAt(j); -            info.mFetcher->stopAsync(); -            mFetcherInfos.removeItemsAt(j); -            mStreams[i].mNewUri.clear(); +    mPrevBufferPercentage = percentage; + +    ALOGV("notifyBufferingUpdate: percentage=%d%%", percentage); + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatBufferingUpdate); +    notify->setInt32("percentage", percentage); +    notify->post(); +} + +bool LiveSession::tryBandwidthFallback() { +    if (mInPreparationPhase || mReconfigurationInProgress) { +        // Don't try fallback during prepare or reconfig. +        // If error happens there, it's likely unrecoverable. +        return false; +    } +    if (mCurBandwidthIndex > mOrigBandwidthIndex) { +        // if we're switching up, simply cancel and resume old variant +        cancelBandwidthSwitch(true /* resume */); +        return true; +    } else { +        // if we're switching down, we're likely about to underflow (if +        // not already underflowing). try the lowest viable bandwidth if +        // not on that variant already. +        ssize_t lowestValid = getLowestValidBandwidthIndex(); +        if (mCurBandwidthIndex > lowestValid) { +            cancelBandwidthSwitch(); +            changeConfiguration(-1ll, lowestValid); +            return true;          }      } +    // return false if we couldn't find any fallback +    return false;  } -bool LiveSession::canSwitchBandwidthTo(size_t bandwidthIndex) { -    if (mReconfigurationInProgress || mSwitchInProgress) { +/* + * returns true if a bandwidth switch is actually needed (and started), + * returns false otherwise + */ +bool LiveSession::switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow) { +    // no need to check bandwidth if we only have 1 bandwidth settings +    if (mBandwidthItems.size() < 2) {          return false;      } -    if (mCurBandwidthIndex < 0) { -        return true; +    if (mSwitchInProgress) { +        if (mBuffering) { +            tryBandwidthFallback(); +        } +        return false;      } -    if (bandwidthIndex == (size_t)mCurBandwidthIndex) { -        return false; -    } else if (bandwidthIndex > (size_t)mCurBandwidthIndex) { -        return canSwitchUp(); +    int32_t bandwidthBps, shortTermBps; +    bool isStable; +    if (mBandwidthEstimator->estimateBandwidth( +            &bandwidthBps, &isStable, &shortTermBps)) { +        ALOGV("bandwidth estimated at %.2f kbps, " +                "stable %d, shortTermBps %.2f kbps", +                bandwidthBps / 1024.0f, isStable, shortTermBps / 1024.0f); +        mLastBandwidthBps = bandwidthBps; +        mLastBandwidthStable = isStable;      } else { -        return true; +        ALOGV("no bandwidth estimate."); +        return false; +    } + +    int32_t curBandwidth = mBandwidthItems.itemAt(mCurBandwidthIndex).mBandwidth; +    // canSwithDown and canSwitchUp can't both be true. +    // we only want to switch up when measured bw is 120% higher than current variant, +    // and we only want to switch down when measured bw is below current variant. +    bool canSwitchDown = bufferLow +            && (bandwidthBps < (int32_t)curBandwidth); +    bool canSwitchUp = bufferHigh +            && (bandwidthBps > (int32_t)curBandwidth * 12 / 10); + +    if (canSwitchDown || canSwitchUp) { +        // bandwidth estimating has some delay, if we have to downswitch when +        // it hasn't stabilized, use the short term to guess real bandwidth, +        // since it may be dropping too fast. +        // (note this doesn't apply to upswitch, always use longer average there) +        if (!isStable && canSwitchDown) { +            if (shortTermBps < bandwidthBps) { +                bandwidthBps = shortTermBps; +            } +        } + +        ssize_t bandwidthIndex = getBandwidthIndex(bandwidthBps); + +        // it's possible that we're checking for canSwitchUp case, but the returned +        // bandwidthIndex is < mCurBandwidthIndex, as getBandwidthIndex() only uses 70% +        // of measured bw. In that case we don't want to do anything, since we have +        // both enough buffer and enough bw. +        if ((canSwitchUp && bandwidthIndex > mCurBandwidthIndex) +         || (canSwitchDown && bandwidthIndex < mCurBandwidthIndex)) { +            // if not yet prepared, just restart again with new bw index. +            // this is faster and playback experience is cleaner. +            changeConfiguration( +                    mInPreparationPhase ? 0 : -1ll, bandwidthIndex); +            return true; +        }      } +    return false;  } -void LiveSession::onCheckBandwidth(const sp<AMessage> &msg) { -    size_t bandwidthIndex = getBandwidthIndex(); -    if (canSwitchBandwidthTo(bandwidthIndex)) { -        changeConfiguration(-1ll /* timeUs */, bandwidthIndex); -    } else { -        // Come back and check again 10 seconds later in case there is nothing to do now. -        // If we DO change configuration, once that completes it'll schedule a new -        // check bandwidth event with an incremented mCheckBandwidthGeneration. -        msg->post(10000000ll); +void LiveSession::postError(status_t err) { +    // if we reached EOS, notify buffering of 100% +    if (err == ERROR_END_OF_STREAM) { +        notifyBufferingUpdate(100);      } +    // we'll stop buffer polling now, before that notify +    // stop buffering to stop the spinning icon +    stopBufferingIfNecessary(); +    cancelPollBuffering(); + +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatError); +    notify->setInt32("err", err); +    notify->post();  }  void LiveSession::postPrepared(status_t err) { @@ -1764,6 +2377,8 @@ void LiveSession::postPrepared(status_t err) {      if (err == OK || err == ERROR_END_OF_STREAM) {          notify->setInt32("what", kWhatPrepared);      } else { +        cancelPollBuffering(); +          notify->setInt32("what", kWhatPreparationFailed);          notify->setInt32("err", err);      } @@ -1771,10 +2386,8 @@ void LiveSession::postPrepared(status_t err) {      notify->post();      mInPreparationPhase = false; - -    mSwitchDownMonitor = new AMessage(kWhatCheckSwitchDown, id()); -    mSwitchDownMonitor->post();  } +  }  // namespace android diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h index 2d3a25a..90d56d0 100644 --- a/media/libstagefright/httplive/LiveSession.h +++ b/media/libstagefright/httplive/LiveSession.h @@ -23,43 +23,62 @@  #include <utils/String8.h> +#include "mpeg2ts/ATSParser.h" +  namespace android {  struct ABuffer; +struct AReplyToken;  struct AnotherPacketSource; -struct DataSource; +class DataSource;  struct HTTPBase;  struct IMediaHTTPService;  struct LiveDataSource;  struct M3UParser;  struct PlaylistFetcher; +struct HLSTime; +struct HTTPDownloader;  struct LiveSession : public AHandler {      enum Flags {          // Don't log any URLs.          kFlagIncognito = 1,      }; -    LiveSession( -            const sp<AMessage> ¬ify, -            uint32_t flags, -            const sp<IMediaHTTPService> &httpService);      enum StreamIndex {          kAudioIndex    = 0,          kVideoIndex    = 1,          kSubtitleIndex = 2,          kMaxStreams    = 3, +        kMetaDataIndex = 3, +        kNumSources    = 4,      };      enum StreamType {          STREAMTYPE_AUDIO        = 1 << kAudioIndex,          STREAMTYPE_VIDEO        = 1 << kVideoIndex,          STREAMTYPE_SUBTITLES    = 1 << kSubtitleIndex, +        STREAMTYPE_METADATA     = 1 << kMetaDataIndex, +    }; + +    enum SeekMode { +        kSeekModeExactPosition = 0, // used for seeking +        kSeekModeNextSample    = 1, // used for seamless switching +        kSeekModeNextSegment   = 2, // used for seamless switching      }; + +    LiveSession( +            const sp<AMessage> ¬ify, +            uint32_t flags, +            const sp<IMediaHTTPService> &httpService); + +    int64_t calculateMediaTimeUs(int64_t firstTimeUs, int64_t timeUs, int32_t discontinuitySeq);      status_t dequeueAccessUnit(StreamType stream, sp<ABuffer> *accessUnit);      status_t getStreamFormat(StreamType stream, sp<AMessage> *format); +    sp<HTTPDownloader> getHTTPDownloader(); +      void connectAsync(              const char *url,              const KeyedVector<String8, String8> *headers = NULL); @@ -78,18 +97,21 @@ struct LiveSession : public AHandler {      bool isSeekable() const;      bool hasDynamicDuration() const; +    static const char *getKeyForStream(StreamType type); +    static const char *getNameForStream(StreamType type); +    static ATSParser::SourceType getSourceTypeForStream(StreamType type); +      enum {          kWhatStreamsChanged,          kWhatError,          kWhatPrepared,          kWhatPreparationFailed, +        kWhatBufferingStart, +        kWhatBufferingEnd, +        kWhatBufferingUpdate, +        kWhatMetadataDetected,      }; -    // create a format-change discontinuity -    // -    // swap: -    //   whether is format-change discontinuity should trigger a buffer swap -    sp<ABuffer> createFormatChangeBuffer(bool swap = true);  protected:      virtual ~LiveSession(); @@ -103,46 +125,56 @@ private:          kWhatDisconnect                 = 'disc',          kWhatSeek                       = 'seek',          kWhatFetcherNotify              = 'notf', -        kWhatCheckBandwidth             = 'bndw',          kWhatChangeConfiguration        = 'chC0',          kWhatChangeConfiguration2       = 'chC2',          kWhatChangeConfiguration3       = 'chC3', -        kWhatFinishDisconnect2          = 'fin2', -        kWhatSwapped                    = 'swap', -        kWhatCheckSwitchDown            = 'ckSD', -        kWhatSwitchDown                 = 'sDwn', +        kWhatPollBuffering              = 'poll',      }; -    static const size_t kBandwidthHistoryBytes; +    // Bandwidth Switch Mark Defaults +    static const int64_t kUpSwitchMarkUs; +    static const int64_t kDownSwitchMarkUs; +    static const int64_t kUpSwitchMarginUs; +    static const int64_t kResumeThresholdUs; +    // Buffer Prepare/Ready/Underflow Marks +    static const int64_t kReadyMarkUs; +    static const int64_t kPrepareMarkUs; +    static const int64_t kUnderflowMarkUs; + +    struct BandwidthEstimator;      struct BandwidthItem {          size_t mPlaylistIndex;          unsigned long mBandwidth; +        int64_t mLastFailureUs;      };      struct FetcherInfo {          sp<PlaylistFetcher> mFetcher;          int64_t mDurationUs; -        bool mIsPrepared;          bool mToBeRemoved; +        bool mToBeResumed;      };      struct StreamItem {          const char *mType;          AString mUri, mNewUri; +        SeekMode mSeekMode;          size_t mCurDiscontinuitySeq;          int64_t mLastDequeuedTimeUs;          int64_t mLastSampleDurationUs;          StreamItem() -            : mType(""), -              mCurDiscontinuitySeq(0), -              mLastDequeuedTimeUs(0), -              mLastSampleDurationUs(0) {} +            : StreamItem("") {}          StreamItem(const char *type)              : mType(type), -              mCurDiscontinuitySeq(0), -              mLastDequeuedTimeUs(0), -              mLastSampleDurationUs(0) {} +              mSeekMode(kSeekModeExactPosition) { +                  reset(); +              } +        void reset() { +            mCurDiscontinuitySeq = 0; +            mLastDequeuedTimeUs = -1ll; +            mLastSampleDurationUs = 0ll; +        }          AString uriKey() {              AString key(mType);              key.append("URI"); @@ -155,19 +187,27 @@ private:      uint32_t mFlags;      sp<IMediaHTTPService> mHTTPService; +    bool mBuffering;      bool mInPreparationPhase; -    bool mBuffering[kMaxStreams]; +    int32_t mPollBufferingGeneration; +    int32_t mPrevBufferPercentage; -    sp<HTTPBase> mHTTPDataSource;      KeyedVector<String8, String8> mExtraHeaders;      AString mMasterURL;      Vector<BandwidthItem> mBandwidthItems;      ssize_t mCurBandwidthIndex; +    ssize_t mOrigBandwidthIndex; +    int32_t mLastBandwidthBps; +    bool mLastBandwidthStable; +    sp<BandwidthEstimator> mBandwidthEstimator;      sp<M3UParser> mPlaylist; +    int32_t mMaxWidth; +    int32_t mMaxHeight; +    sp<ALooper> mFetcherLooper;      KeyedVector<AString, FetcherInfo> mFetcherInfos;      uint32_t mStreamMask; @@ -180,17 +220,10 @@ private:      // we use this to track reconfiguration progress.      uint32_t mSwapMask; -    KeyedVector<StreamType, sp<AnotherPacketSource> > mDiscontinuities;      KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources;      // A second set of packet sources that buffer content for the variant we're switching to.      KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources2; -    // A mutex used to serialize two sets of events: -    // * the swapping of packet sources in dequeueAccessUnit on the player thread, AND -    // * a forced bandwidth switch termination in cancelSwitch on the live looper. -    Mutex mSwapMutex; - -    int32_t mCheckBandwidthGeneration;      int32_t mSwitchGeneration;      int32_t mSubtitleGeneration; @@ -203,80 +236,76 @@ private:      bool mReconfigurationInProgress;      bool mSwitchInProgress; -    uint32_t mDisconnectReplyID; -    uint32_t mSeekReplyID; +    int64_t mUpSwitchMark; +    int64_t mDownSwitchMark; +    int64_t mUpSwitchMargin; + +    sp<AReplyToken> mDisconnectReplyID; +    sp<AReplyToken> mSeekReplyID;      bool mFirstTimeUsValid;      int64_t mFirstTimeUs;      int64_t mLastSeekTimeUs; -    sp<AMessage> mSwitchDownMonitor; +    bool mHasMetadata; +      KeyedVector<size_t, int64_t> mDiscontinuityAbsStartTimesUs;      KeyedVector<size_t, int64_t> mDiscontinuityOffsetTimesUs;      sp<PlaylistFetcher> addFetcher(const char *uri);      void onConnect(const sp<AMessage> &msg); -    status_t onSeek(const sp<AMessage> &msg); -    void onFinishDisconnect2(); - -    // If given a non-zero block_size (default 0), it is used to cap the number of -    // bytes read in from the DataSource. If given a non-NULL buffer, new content -    // is read into the end. -    // -    // The DataSource we read from is responsible for signaling error or EOF to help us -    // break out of the read loop. The DataSource can be returned to the caller, so -    // that the caller can reuse it for subsequent fetches (within the initially -    // requested range). -    // -    // For reused HTTP sources, the caller must download a file sequentially without -    // any overlaps or gaps to prevent reconnection. -    ssize_t fetchFile( -            const char *url, sp<ABuffer> *out, -            /* request/open a file starting at range_offset for range_length bytes */ -            int64_t range_offset = 0, int64_t range_length = -1, -            /* download block size */ -            uint32_t block_size = 0, -            /* reuse DataSource if doing partial fetch */ -            sp<DataSource> *source = NULL, -            String8 *actualUrl = NULL); - -    sp<M3UParser> fetchPlaylist( -            const char *url, uint8_t *curPlaylistHash, bool *unchanged); - -    size_t getBandwidthIndex(); -    int64_t latestMediaSegmentStartTimeUs(); - +    void onMasterPlaylistFetched(const sp<AMessage> &msg); +    void onSeek(const sp<AMessage> &msg); + +    bool UriIsSameAsIndex( const AString &uri, int32_t index, bool newUri); +    sp<AnotherPacketSource> getPacketSourceForStreamIndex(size_t trackIndex, bool newUri); +    sp<AnotherPacketSource> getMetadataSource( +            sp<AnotherPacketSource> sources[kNumSources], uint32_t streamMask, bool newUri); + +    bool resumeFetcher( +            const AString &uri, uint32_t streamMask, +            int64_t timeUs = -1ll, bool newUri = false); + +    float getAbortThreshold( +            ssize_t currentBWIndex, ssize_t targetBWIndex) const; +    void addBandwidthMeasurement(size_t numBytes, int64_t delayUs); +    size_t getBandwidthIndex(int32_t bandwidthBps); +    ssize_t getLowestValidBandwidthIndex() const; +    HLSTime latestMediaSegmentStartTime() const; + +    static bool isBandwidthValid(const BandwidthItem &item);      static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *);      static StreamType indexToType(int idx);      static ssize_t typeToIndex(int32_t type);      void changeConfiguration( -            int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false); +            int64_t timeUs, ssize_t bwIndex = -1, bool pickTrack = false);      void onChangeConfiguration(const sp<AMessage> &msg);      void onChangeConfiguration2(const sp<AMessage> &msg);      void onChangeConfiguration3(const sp<AMessage> &msg); -    void onSwapped(const sp<AMessage> &msg); -    void onCheckSwitchDown(); -    void onSwitchDown(); -    void tryToFinishBandwidthSwitch(); - -    void scheduleCheckBandwidthEvent(); -    void cancelCheckBandwidthEvent(); - -    // cancelBandwidthSwitch is atomic wrt swapPacketSource; call it to prevent packet sources -    // from being swapped out on stale discontinuities while manipulating -    // mPacketSources/mPacketSources2. -    void cancelBandwidthSwitch(); -    bool canSwitchBandwidthTo(size_t bandwidthIndex); -    void onCheckBandwidth(const sp<AMessage> &msg); +    void swapPacketSource(StreamType stream); +    void tryToFinishBandwidthSwitch(const AString &oldUri); +    void cancelBandwidthSwitch(bool resume = false); +    bool checkSwitchProgress( +            sp<AMessage> &msg, int64_t delayUs, bool *needResumeUntil); + +    bool switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow); +    bool tryBandwidthFallback(); + +    void schedulePollBuffering(); +    void cancelPollBuffering(); +    void restartPollBuffering(); +    void onPollBuffering(); +    bool checkBuffering(bool &underflow, bool &ready, bool &down, bool &up); +    void startBufferingIfNecessary(); +    void stopBufferingIfNecessary(); +    void notifyBufferingUpdate(int32_t percentage);      void finishDisconnect();      void postPrepared(status_t err); - -    void swapPacketSource(StreamType stream); -    bool canSwitchUp(); +    void postError(status_t err);      DISALLOW_EVIL_CONSTRUCTORS(LiveSession);  }; diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 997b694..ff2bb27 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -250,7 +250,11 @@ M3UParser::M3UParser(        mIsVariantPlaylist(false),        mIsComplete(false),        mIsEvent(false), +      mFirstSeqNumber(-1), +      mLastSeqNumber(-1), +      mTargetDurationUs(-1ll),        mDiscontinuitySeq(0), +      mDiscontinuityCount(0),        mSelectedIndex(-1) {      mInitCheck = parse(data, size);  } @@ -282,6 +286,19 @@ size_t M3UParser::getDiscontinuitySeq() const {      return mDiscontinuitySeq;  } +int64_t M3UParser::getTargetDuration() const { +    return mTargetDurationUs; +} + +int32_t M3UParser::getFirstSeqNumber() const { +    return mFirstSeqNumber; +} + +void M3UParser::getSeqNumberRange(int32_t *firstSeq, int32_t *lastSeq) const { +    *firstSeq = mFirstSeqNumber; +    *lastSeq = mLastSeqNumber; +} +  sp<AMessage> M3UParser::meta() {      return mMeta;  } @@ -394,7 +411,9 @@ ssize_t M3UParser::getSelectedTrack(media_track_type type) const {  bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {      if (!mIsVariantPlaylist) { -        *uri = mBaseURI; +        if (uri != NULL) { +            *uri = mBaseURI; +        }          // Assume media without any more specific attribute contains          // audio and video, but no subtitles. @@ -407,7 +426,9 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {      AString groupID;      if (!meta->findString(key, &groupID)) { -        *uri = mItems.itemAt(index).mURI; +        if (uri != NULL) { +            *uri = mItems.itemAt(index).mURI; +        }          AString codecs;          if (!meta->findString("codecs", &codecs)) { @@ -433,18 +454,26 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {          }      } -    sp<MediaGroup> group = mMediaGroups.valueFor(groupID); -    if (!group->getActiveURI(uri)) { -        return false; -    } +    // if uri == NULL, we're only checking if the type is present, +    // don't care about the active URI (or if there is an active one) +    if (uri != NULL) { +        sp<MediaGroup> group = mMediaGroups.valueFor(groupID); +        if (!group->getActiveURI(uri)) { +            return false; +        } -    if ((*uri).empty()) { -        *uri = mItems.itemAt(index).mURI; +        if ((*uri).empty()) { +            *uri = mItems.itemAt(index).mURI; +        }      }      return true;  } +bool M3UParser::hasType(size_t index, const char *key) const { +    return getTypeURI(index, key, NULL /* uri */); +} +  static bool MakeURL(const char *baseURL, const char *url, AString *out) {      out->clear(); @@ -582,6 +611,7 @@ status_t M3UParser::parse(const void *_data, size_t size) {                      itemMeta = new AMessage;                  }                  itemMeta->setInt32("discontinuity", true); +                ++mDiscontinuityCount;              } else if (line.startsWith("#EXT-X-STREAM-INF")) {                  if (mMeta != NULL) {                      return ERROR_MALFORMED; @@ -609,6 +639,9 @@ status_t M3UParser::parse(const void *_data, size_t size) {              } else if (line.startsWith("#EXT-X-MEDIA")) {                  err = parseMedia(line);              } else if (line.startsWith("#EXT-X-DISCONTINUITY-SEQUENCE")) { +                if (mIsVariantPlaylist) { +                    return ERROR_MALFORMED; +                }                  size_t seq;                  err = parseDiscontinuitySequence(line, &seq);                  if (err == OK) { @@ -628,6 +661,8 @@ status_t M3UParser::parse(const void *_data, size_t size) {                          || !itemMeta->findInt64("durationUs", &durationUs)) {                      return ERROR_MALFORMED;                  } +                itemMeta->setInt32("discontinuity-sequence", +                        mDiscontinuitySeq + mDiscontinuityCount);              }              mItems.push(); @@ -644,6 +679,25 @@ status_t M3UParser::parse(const void *_data, size_t size) {          ++lineNo;      } +    // error checking of all fields that's required to appear once +    // (currently only checking "target-duration"), and +    // initialization of playlist properties (eg. mTargetDurationUs) +    if (!mIsVariantPlaylist) { +        int32_t targetDurationSecs; +        if (mMeta == NULL || !mMeta->findInt32( +                "target-duration", &targetDurationSecs)) { +            ALOGE("Media playlist missing #EXT-X-TARGETDURATION"); +            return ERROR_MALFORMED; +        } +        mTargetDurationUs = targetDurationSecs * 1000000ll; + +        mFirstSeqNumber = 0; +        if (mMeta != NULL) { +            mMeta->findInt32("media-sequence", &mFirstSeqNumber); +        } +        mLastSeqNumber = mFirstSeqNumber + mItems.size() - 1; +    } +      return OK;  } @@ -781,6 +835,29 @@ status_t M3UParser::parseStreamInf(                  *meta = new AMessage;              }              (*meta)->setString(key.c_str(), codecs.c_str()); +        } else if (!strcasecmp("resolution", key.c_str())) { +            const char *s = val.c_str(); +            char *end; +            unsigned long width = strtoul(s, &end, 10); + +            if (end == s || *end != 'x') { +                // malformed +                continue; +            } + +            s = end + 1; +            unsigned long height = strtoul(s, &end, 10); + +            if (end == s || *end != '\0') { +                // malformed +                continue; +            } + +            if (meta->get() == NULL) { +                *meta = new AMessage; +            } +            (*meta)->setInt32("width", width); +            (*meta)->setInt32("height", height);          } else if (!strcasecmp("audio", key.c_str())                  || !strcasecmp("video", key.c_str())                  || !strcasecmp("subtitles", key.c_str())) { diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h index 1cad060..fa648ed 100644 --- a/media/libstagefright/httplive/M3UParser.h +++ b/media/libstagefright/httplive/M3UParser.h @@ -36,6 +36,9 @@ struct M3UParser : public RefBase {      bool isComplete() const;      bool isEvent() const;      size_t getDiscontinuitySeq() const; +    int64_t getTargetDuration() const; +    int32_t getFirstSeqNumber() const; +    void getSeqNumberRange(int32_t *firstSeq, int32_t *lastSeq) const;      sp<AMessage> meta(); @@ -50,6 +53,7 @@ struct M3UParser : public RefBase {      ssize_t getSelectedTrack(media_track_type /* type */) const;      bool getTypeURI(size_t index, const char *key, AString *uri) const; +    bool hasType(size_t index, const char *key) const;  protected:      virtual ~M3UParser(); @@ -69,7 +73,11 @@ private:      bool mIsVariantPlaylist;      bool mIsComplete;      bool mIsEvent; +    int32_t mFirstSeqNumber; +    int32_t mLastSeqNumber; +    int64_t mTargetDurationUs;      size_t mDiscontinuitySeq; +    int32_t mDiscontinuityCount;      sp<AMessage> mMeta;      Vector<Item> mItems; diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp index 1227600..72d832e 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.cpp +++ b/media/libstagefright/httplive/PlaylistFetcher.cpp @@ -17,24 +17,19 @@  //#define LOG_NDEBUG 0  #define LOG_TAG "PlaylistFetcher"  #include <utils/Log.h> +#include <utils/misc.h>  #include "PlaylistFetcher.h" - -#include "LiveDataSource.h" +#include "HTTPDownloader.h"  #include "LiveSession.h"  #include "M3UParser.h" -  #include "include/avc_utils.h" -#include "include/HTTPBase.h"  #include "include/ID3.h"  #include "mpeg2ts/AnotherPacketSource.h" -#include <media/IStreamSource.h>  #include <media/stagefright/foundation/ABitReader.h>  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h> -#include <media/stagefright/foundation/hexdump.h> -#include <media/stagefright/FileSource.h>  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MetaData.h>  #include <media/stagefright/Utils.h> @@ -42,63 +37,156 @@  #include <ctype.h>  #include <inttypes.h>  #include <openssl/aes.h> -#include <openssl/md5.h> + +#define FLOGV(fmt, ...) ALOGV("[fetcher-%d] " fmt, mFetcherID, ##__VA_ARGS__) +#define FSLOGV(stream, fmt, ...) ALOGV("[fetcher-%d] [%s] " fmt, mFetcherID, \ +         LiveSession::getNameForStream(stream), ##__VA_ARGS__)  namespace android {  // static -const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll; +const int64_t PlaylistFetcher::kMinBufferedDurationUs = 30000000ll;  const int64_t PlaylistFetcher::kMaxMonitorDelayUs = 3000000ll;  // LCM of 188 (size of a TS packet) & 1k works well  const int32_t PlaylistFetcher::kDownloadBlockSize = 47 * 1024; -const int32_t PlaylistFetcher::kNumSkipFrames = 5; + +struct PlaylistFetcher::DownloadState : public RefBase { +    DownloadState(); +    void resetState(); +    bool hasSavedState() const; +    void restoreState( +            AString &uri, +            sp<AMessage> &itemMeta, +            sp<ABuffer> &buffer, +            sp<ABuffer> &tsBuffer, +            int32_t &firstSeqNumberInPlaylist, +            int32_t &lastSeqNumberInPlaylist); +    void saveState( +            AString &uri, +            sp<AMessage> &itemMeta, +            sp<ABuffer> &buffer, +            sp<ABuffer> &tsBuffer, +            int32_t &firstSeqNumberInPlaylist, +            int32_t &lastSeqNumberInPlaylist); + +private: +    bool mHasSavedState; +    AString mUri; +    sp<AMessage> mItemMeta; +    sp<ABuffer> mBuffer; +    sp<ABuffer> mTsBuffer; +    int32_t mFirstSeqNumberInPlaylist; +    int32_t mLastSeqNumberInPlaylist; +}; + +PlaylistFetcher::DownloadState::DownloadState() { +    resetState(); +} + +bool PlaylistFetcher::DownloadState::hasSavedState() const { +    return mHasSavedState; +} + +void PlaylistFetcher::DownloadState::resetState() { +    mHasSavedState = false; + +    mUri.clear(); +    mItemMeta = NULL; +    mBuffer = NULL; +    mTsBuffer = NULL; +    mFirstSeqNumberInPlaylist = 0; +    mLastSeqNumberInPlaylist = 0; +} + +void PlaylistFetcher::DownloadState::restoreState( +        AString &uri, +        sp<AMessage> &itemMeta, +        sp<ABuffer> &buffer, +        sp<ABuffer> &tsBuffer, +        int32_t &firstSeqNumberInPlaylist, +        int32_t &lastSeqNumberInPlaylist) { +    if (!mHasSavedState) { +        return; +    } + +    uri = mUri; +    itemMeta = mItemMeta; +    buffer = mBuffer; +    tsBuffer = mTsBuffer; +    firstSeqNumberInPlaylist = mFirstSeqNumberInPlaylist; +    lastSeqNumberInPlaylist = mLastSeqNumberInPlaylist; + +    resetState(); +} + +void PlaylistFetcher::DownloadState::saveState( +        AString &uri, +        sp<AMessage> &itemMeta, +        sp<ABuffer> &buffer, +        sp<ABuffer> &tsBuffer, +        int32_t &firstSeqNumberInPlaylist, +        int32_t &lastSeqNumberInPlaylist) { +    mHasSavedState = true; + +    mUri = uri; +    mItemMeta = itemMeta; +    mBuffer = buffer; +    mTsBuffer = tsBuffer; +    mFirstSeqNumberInPlaylist = firstSeqNumberInPlaylist; +    mLastSeqNumberInPlaylist = lastSeqNumberInPlaylist; +}  PlaylistFetcher::PlaylistFetcher(          const sp<AMessage> ¬ify,          const sp<LiveSession> &session,          const char *uri, +        int32_t id,          int32_t subtitleGeneration)      : mNotify(notify), -      mStartTimeUsNotify(notify->dup()),        mSession(session),        mURI(uri), +      mFetcherID(id),        mStreamTypeMask(0),        mStartTimeUs(-1ll),        mSegmentStartTimeUs(-1ll),        mDiscontinuitySeq(-1ll),        mStartTimeUsRelative(false),        mLastPlaylistFetchTimeUs(-1ll), +      mPlaylistTimeUs(-1ll),        mSeqNumber(-1),        mNumRetries(0),        mStartup(true), -      mAdaptive(false), -      mPrepared(false), +      mIDRFound(false), +      mSeekMode(LiveSession::kSeekModeExactPosition), +      mTimeChangeSignaled(false),        mNextPTSTimeUs(-1ll),        mMonitorQueueGeneration(0),        mSubtitleGeneration(subtitleGeneration), +      mLastDiscontinuitySeq(-1ll),        mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY),        mFirstPTSValid(false), -      mAbsoluteTimeAnchorUs(0ll), -      mVideoBuffer(new AnotherPacketSource(NULL)) { +      mFirstTimeUs(-1ll), +      mVideoBuffer(new AnotherPacketSource(NULL)), +      mThresholdRatio(-1.0f), +      mDownloadState(new DownloadState()), +      mHasMetadata(false) {      memset(mPlaylistHash, 0, sizeof(mPlaylistHash)); -    mStartTimeUsNotify->setInt32("what", kWhatStartedAt); -    mStartTimeUsNotify->setInt32("streamMask", 0); +    mHTTPDownloader = mSession->getHTTPDownloader();  }  PlaylistFetcher::~PlaylistFetcher() {  } +int32_t PlaylistFetcher::getFetcherID() const { +    return mFetcherID; +} +  int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const {      CHECK(mPlaylist != NULL); -    int32_t firstSeqNumberInPlaylist; -    if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( -                "media-sequence", &firstSeqNumberInPlaylist)) { -        firstSeqNumberInPlaylist = 0; -    } - -    int32_t lastSeqNumberInPlaylist = -        firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; +    int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist; +    mPlaylist->getSeqNumberRange( +            &firstSeqNumberInPlaylist, &lastSeqNumberInPlaylist);      CHECK_GE(seqNumber, firstSeqNumberInPlaylist);      CHECK_LE(seqNumber, lastSeqNumberInPlaylist); @@ -119,6 +207,27 @@ int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const {      return segmentStartUs;  } +int64_t PlaylistFetcher::getSegmentDurationUs(int32_t seqNumber) const { +    CHECK(mPlaylist != NULL); + +    int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist; +    mPlaylist->getSeqNumberRange( +            &firstSeqNumberInPlaylist, &lastSeqNumberInPlaylist); + +    CHECK_GE(seqNumber, firstSeqNumberInPlaylist); +    CHECK_LE(seqNumber, lastSeqNumberInPlaylist); + +    int32_t index = seqNumber - firstSeqNumberInPlaylist; +    sp<AMessage> itemMeta; +    CHECK(mPlaylist->itemAt( +                index, NULL /* uri */, &itemMeta)); + +    int64_t itemDurationUs; +    CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + +    return itemDurationUs; +} +  int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const {      int64_t nowUs = ALooper::GetNowUs(); @@ -131,10 +240,7 @@ int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const {          return (~0llu >> 1);      } -    int32_t targetDurationSecs; -    CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); - -    int64_t targetDurationUs = targetDurationSecs * 1000000ll; +    int64_t targetDurationUs = mPlaylist->getTargetDuration();      int64_t minPlaylistAgeUs; @@ -224,9 +330,11 @@ status_t PlaylistFetcher::decryptBuffer(      if (index >= 0) {          key = mAESKeyForURI.valueAt(index);      } else { -        ssize_t err = mSession->fetchFile(keyURI.c_str(), &key); +        ssize_t err = mHTTPDownloader->fetchFile(keyURI.c_str(), &key); -        if (err < 0) { +        if (err == ERROR_NOT_CONNECTED) { +            return ERROR_NOT_CONNECTED; +        } else if (err < 0) {              ALOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());              return ERROR_IO;          } else if (key->size() != 16) { @@ -322,10 +430,10 @@ void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) {          maxDelayUs = minDelayUs;      }      if (delayUs > maxDelayUs) { -        ALOGV("Need to refresh playlist in %" PRId64 , maxDelayUs); +        FLOGV("Need to refresh playlist in %lld", (long long)maxDelayUs);          delayUs = maxDelayUs;      } -    sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id()); +    sp<AMessage> msg = new AMessage(kWhatMonitorQueue, this);      msg->setInt32("generation", mMonitorQueueGeneration);      msg->post(delayUs);  } @@ -334,15 +442,44 @@ void PlaylistFetcher::cancelMonitorQueue() {      ++mMonitorQueueGeneration;  } +void PlaylistFetcher::setStoppingThreshold(float thresholdRatio, bool disconnect) { +    { +        AutoMutex _l(mThresholdLock); +        mThresholdRatio = thresholdRatio; +    } +    if (disconnect) { +        mHTTPDownloader->disconnect(); +    } +} + +void PlaylistFetcher::resetStoppingThreshold(bool disconnect) { +    { +        AutoMutex _l(mThresholdLock); +        mThresholdRatio = -1.0f; +    } +    if (disconnect) { +        mHTTPDownloader->disconnect(); +    } else { +        // allow reconnect +        mHTTPDownloader->reconnect(); +    } +} + +float PlaylistFetcher::getStoppingThreshold() { +    AutoMutex _l(mThresholdLock); +    return mThresholdRatio; +} +  void PlaylistFetcher::startAsync(          const sp<AnotherPacketSource> &audioSource,          const sp<AnotherPacketSource> &videoSource,          const sp<AnotherPacketSource> &subtitleSource, +        const sp<AnotherPacketSource> &metadataSource,          int64_t startTimeUs,          int64_t segmentStartTimeUs,          int32_t startDiscontinuitySeq, -        bool adaptive) { -    sp<AMessage> msg = new AMessage(kWhatStart, id()); +        LiveSession::SeekMode seekMode) { +    sp<AMessage> msg = new AMessage(kWhatStart, this);      uint32_t streamTypeMask = 0ul; @@ -361,30 +498,53 @@ void PlaylistFetcher::startAsync(          streamTypeMask |= LiveSession::STREAMTYPE_SUBTITLES;      } +    if (metadataSource != NULL) { +        msg->setPointer("metadataSource", metadataSource.get()); +        // metadataSource does not affect streamTypeMask. +    } +      msg->setInt32("streamTypeMask", streamTypeMask);      msg->setInt64("startTimeUs", startTimeUs);      msg->setInt64("segmentStartTimeUs", segmentStartTimeUs);      msg->setInt32("startDiscontinuitySeq", startDiscontinuitySeq); -    msg->setInt32("adaptive", adaptive); +    msg->setInt32("seekMode", seekMode);      msg->post();  } -void PlaylistFetcher::pauseAsync() { -    (new AMessage(kWhatPause, id()))->post(); +/* + * pauseAsync + * + * threshold: 0.0f - pause after current fetch block (default 47Kbytes) + *           -1.0f - pause after finishing current segment + *        0.0~1.0f - pause if remaining of current segment exceeds threshold + */ +void PlaylistFetcher::pauseAsync( +        float thresholdRatio, bool disconnect) { +    setStoppingThreshold(thresholdRatio, disconnect); + +    (new AMessage(kWhatPause, this))->post();  }  void PlaylistFetcher::stopAsync(bool clear) { -    sp<AMessage> msg = new AMessage(kWhatStop, id()); +    setStoppingThreshold(0.0f, true /* disconncect */); + +    sp<AMessage> msg = new AMessage(kWhatStop, this);      msg->setInt32("clear", clear);      msg->post();  }  void PlaylistFetcher::resumeUntilAsync(const sp<AMessage> ¶ms) { -    AMessage* msg = new AMessage(kWhatResumeUntil, id()); +    FLOGV("resumeUntilAsync: params=%s", params->debugString().c_str()); + +    AMessage* msg = new AMessage(kWhatResumeUntil, this);      msg->setMessage("params", params);      msg->post();  } +void PlaylistFetcher::fetchPlaylistAsync() { +    (new AMessage(kWhatFetchPlaylist, this))->post(); +} +  void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {      switch (msg->what()) {          case kWhatStart: @@ -404,6 +564,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {              sp<AMessage> notify = mNotify->dup();              notify->setInt32("what", kWhatPaused); +            notify->setInt32("seekMode", +                    mDownloadState->hasSavedState() +                    ? LiveSession::kSeekModeNextSample +                    : LiveSession::kSeekModeNextSegment);              notify->post();              break;          } @@ -418,6 +582,19 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {              break;          } +        case kWhatFetchPlaylist: +        { +            bool unchanged; +            sp<M3UParser> playlist = mHTTPDownloader->fetchPlaylist( +                    mURI.c_str(), NULL /* curPlaylistHash */, &unchanged); + +            sp<AMessage> notify = mNotify->dup(); +            notify->setInt32("what", kWhatPlaylistFetched); +            notify->setObject("playlist", playlist); +            notify->post(); +            break; +        } +          case kWhatMonitorQueue:          case kWhatDownloadNext:          { @@ -450,6 +627,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {  status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {      mPacketSources.clear(); +    mStopParams.clear(); +    mStartTimeUsNotify = mNotify->dup(); +    mStartTimeUsNotify->setInt32("what", kWhatStartedAt); +    mStartTimeUsNotify->setString("uri", mURI);      uint32_t streamTypeMask;      CHECK(msg->findInt32("streamTypeMask", (int32_t *)&streamTypeMask)); @@ -457,11 +638,11 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {      int64_t startTimeUs;      int64_t segmentStartTimeUs;      int32_t startDiscontinuitySeq; -    int32_t adaptive; +    int32_t seekMode;      CHECK(msg->findInt64("startTimeUs", &startTimeUs));      CHECK(msg->findInt64("segmentStartTimeUs", &segmentStartTimeUs));      CHECK(msg->findInt32("startDiscontinuitySeq", &startDiscontinuitySeq)); -    CHECK(msg->findInt32("adaptive", &adaptive)); +    CHECK(msg->findInt32("seekMode", &seekMode));      if (streamTypeMask & LiveSession::STREAMTYPE_AUDIO) {          void *ptr; @@ -490,17 +671,38 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {                  static_cast<AnotherPacketSource *>(ptr));      } +    void *ptr; +    // metadataSource is not part of streamTypeMask +    if ((streamTypeMask & (LiveSession::STREAMTYPE_AUDIO | LiveSession::STREAMTYPE_VIDEO)) +            && msg->findPointer("metadataSource", &ptr)) { +        mPacketSources.add( +                LiveSession::STREAMTYPE_METADATA, +                static_cast<AnotherPacketSource *>(ptr)); +    } +      mStreamTypeMask = streamTypeMask;      mSegmentStartTimeUs = segmentStartTimeUs; -    mDiscontinuitySeq = startDiscontinuitySeq; + +    if (startDiscontinuitySeq >= 0) { +        mDiscontinuitySeq = startDiscontinuitySeq; +    } + +    mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY; +    mSeekMode = (LiveSession::SeekMode) seekMode; + +    if (startTimeUs >= 0 || mSeekMode == LiveSession::kSeekModeNextSample) { +        mStartup = true; +        mIDRFound = false; +        mVideoBuffer->clear(); +    }      if (startTimeUs >= 0) {          mStartTimeUs = startTimeUs; +        mFirstPTSValid = false;          mSeqNumber = -1; -        mStartup = true; -        mPrepared = false; -        mAdaptive = adaptive; +        mTimeChangeSignaled = false; +        mDownloadState->resetState();      }      postMonitorQueue(); @@ -510,6 +712,9 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {  void PlaylistFetcher::onPause() {      cancelMonitorQueue(); +    mLastDiscontinuitySeq = mDiscontinuitySeq; + +    resetStoppingThreshold(false /* disconnect */);  }  void PlaylistFetcher::onStop(const sp<AMessage> &msg) { @@ -524,8 +729,11 @@ void PlaylistFetcher::onStop(const sp<AMessage> &msg) {          }      } +    mDownloadState->resetState();      mPacketSources.clear();      mStreamTypeMask = 0; + +    resetStoppingThreshold(true /* disconnect */);  }  // Resume until we have reached the boundary timestamps listed in `msg`; when @@ -535,57 +743,18 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {      sp<AMessage> params;      CHECK(msg->findMessage("params", ¶ms)); -    bool stop = false; -    for (size_t i = 0; i < mPacketSources.size(); i++) { -        sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i); - -        const char *stopKey; -        int streamType = mPacketSources.keyAt(i); -        switch (streamType) { -        case LiveSession::STREAMTYPE_VIDEO: -            stopKey = "timeUsVideo"; -            break; - -        case LiveSession::STREAMTYPE_AUDIO: -            stopKey = "timeUsAudio"; -            break; - -        case LiveSession::STREAMTYPE_SUBTITLES: -            stopKey = "timeUsSubtitle"; -            break; - -        default: -            TRESPASS(); -        } - -        // Don't resume if we would stop within a resume threshold. -        int32_t discontinuitySeq; -        int64_t latestTimeUs = 0, stopTimeUs = 0; -        sp<AMessage> latestMeta = packetSource->getLatestEnqueuedMeta(); -        if (latestMeta != NULL -                && latestMeta->findInt32("discontinuitySeq", &discontinuitySeq) -                && discontinuitySeq == mDiscontinuitySeq -                && latestMeta->findInt64("timeUs", &latestTimeUs) -                && params->findInt64(stopKey, &stopTimeUs) -                && stopTimeUs - latestTimeUs < resumeThreshold(latestMeta)) { -            stop = true; -        } -    } - -    if (stop) { -        for (size_t i = 0; i < mPacketSources.size(); i++) { -            mPacketSources.valueAt(i)->queueAccessUnit(mSession->createFormatChangeBuffer()); -        } -        stopAsync(/* clear = */ false); -        return OK; -    } -      mStopParams = params; -    postMonitorQueue(); +    onDownloadNext();      return OK;  } +void PlaylistFetcher::notifyStopReached() { +    sp<AMessage> notify = mNotify->dup(); +    notify->setInt32("what", kWhatStopReached); +    notify->post(); +} +  void PlaylistFetcher::notifyError(status_t err) {      sp<AMessage> notify = mNotify->dup();      notify->setInt32("what", kWhatError); @@ -604,96 +773,84 @@ void PlaylistFetcher::queueDiscontinuity(  }  void PlaylistFetcher::onMonitorQueue() { -    bool downloadMore = false; -    refreshPlaylist(); +    // in the middle of an unfinished download, delay +    // playlist refresh as it'll change seq numbers +    if (!mDownloadState->hasSavedState()) { +        refreshPlaylist(); +    } -    int32_t targetDurationSecs;      int64_t targetDurationUs = kMinBufferedDurationUs;      if (mPlaylist != NULL) { -        if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( -                "target-duration", &targetDurationSecs)) { -            ALOGE("Playlist is missing required EXT-X-TARGETDURATION tag"); -            notifyError(ERROR_MALFORMED); -            return; -        } -        targetDurationUs = targetDurationSecs * 1000000ll; -    } - -    // buffer at least 3 times the target duration, or up to 10 seconds -    int64_t durationToBufferUs = targetDurationUs * 3; -    if (durationToBufferUs > kMinBufferedDurationUs)  { -        durationToBufferUs = kMinBufferedDurationUs; +        targetDurationUs = mPlaylist->getTargetDuration();      }      int64_t bufferedDurationUs = 0ll; -    status_t finalResult = NOT_ENOUGH_DATA; +    status_t finalResult = OK;      if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {          sp<AnotherPacketSource> packetSource =              mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES);          bufferedDurationUs =                  packetSource->getBufferedDurationUs(&finalResult); -        finalResult = OK;      } else { -        // Use max stream duration to prevent us from waiting on a non-existent stream; -        // when we cannot make out from the manifest what streams are included in a playlist -        // we might assume extra streams. +        // Use min stream duration, but ignore streams that never have any packet +        // enqueued to prevent us from waiting on a non-existent stream; +        // when we cannot make out from the manifest what streams are included in +        // a playlist we might assume extra streams. +        bufferedDurationUs = -1ll;          for (size_t i = 0; i < mPacketSources.size(); ++i) { -            if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) { +            if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0 +                    || mPacketSources[i]->getLatestEnqueuedMeta() == NULL) {                  continue;              }              int64_t bufferedStreamDurationUs =                  mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult); -            ALOGV("buffered %" PRId64 " for stream %d", -                    bufferedStreamDurationUs, mPacketSources.keyAt(i)); -            if (bufferedStreamDurationUs > bufferedDurationUs) { + +            FSLOGV(mPacketSources.keyAt(i), "buffered %lld", (long long)bufferedStreamDurationUs); + +            if (bufferedDurationUs == -1ll +                 || bufferedStreamDurationUs < bufferedDurationUs) {                  bufferedDurationUs = bufferedStreamDurationUs;              }          } +        if (bufferedDurationUs == -1ll) { +            bufferedDurationUs = 0ll; +        }      } -    downloadMore = (bufferedDurationUs < durationToBufferUs); -    // signal start if buffered up at least the target size -    if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) { -        mPrepared = true; - -        ALOGV("prepared, buffered=%" PRId64 " > %" PRId64 "", -                bufferedDurationUs, targetDurationUs); -        sp<AMessage> msg = mNotify->dup(); -        msg->setInt32("what", kWhatTemporarilyDoneFetching); -        msg->post(); -    } +    if (finalResult == OK && bufferedDurationUs < kMinBufferedDurationUs) { +        FLOGV("monitoring, buffered=%lld < %lld", +                (long long)bufferedDurationUs, (long long)kMinBufferedDurationUs); -    if (finalResult == OK && downloadMore) { -        ALOGV("monitoring, buffered=%" PRId64 " < %" PRId64 "", -                bufferedDurationUs, durationToBufferUs);          // delay the next download slightly; hopefully this gives other concurrent fetchers          // a better chance to run.          // onDownloadNext(); -        sp<AMessage> msg = new AMessage(kWhatDownloadNext, id()); +        sp<AMessage> msg = new AMessage(kWhatDownloadNext, this);          msg->setInt32("generation", mMonitorQueueGeneration);          msg->post(1000l);      } else { -        // Nothing to do yet, try again in a second. +        // We'd like to maintain buffering above durationToBufferUs, so try +        // again when buffer just about to go below durationToBufferUs +        // (or after targetDurationUs / 2, whichever is smaller). +        int64_t delayUs = bufferedDurationUs - kMinBufferedDurationUs + 1000000ll; +        if (delayUs > targetDurationUs / 2) { +            delayUs = targetDurationUs / 2; +        } -        sp<AMessage> msg = mNotify->dup(); -        msg->setInt32("what", kWhatTemporarilyDoneFetching); -        msg->post(); +        FLOGV("pausing for %lld, buffered=%lld > %lld", +                (long long)delayUs, +                (long long)bufferedDurationUs, +                (long long)kMinBufferedDurationUs); -        int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2; -        ALOGV("pausing for %" PRId64 ", buffered=%" PRId64 " > %" PRId64 "", -                delayUs, bufferedDurationUs, durationToBufferUs); -        // :TRICKY: need to enforce minimum delay because the delay to -        // refresh the playlist will become 0 -        postMonitorQueue(delayUs, mPrepared ? targetDurationUs * 2 : 0); +        postMonitorQueue(delayUs);      }  }  status_t PlaylistFetcher::refreshPlaylist() {      if (delayUsToRefreshPlaylist() <= 0) {          bool unchanged; -        sp<M3UParser> playlist = mSession->fetchPlaylist( +        sp<M3UParser> playlist = mHTTPDownloader->fetchPlaylist(                  mURI.c_str(), mPlaylistHash, &unchanged);          if (playlist == NULL) { @@ -715,6 +872,14 @@ status_t PlaylistFetcher::refreshPlaylist() {              if (mPlaylist->isComplete() || mPlaylist->isEvent()) {                  updateDuration();              } +            // Notify LiveSession to use target-duration based buffering level +            // for up/down switch. Default LiveSession::kUpSwitchMark may not +            // be reachable for live streams, as our max buffering amount is +            // limited to 3 segments. +            if (!mPlaylist->isComplete()) { +                updateTargetDuration(); +            } +            mPlaylistTimeUs = ALooper::GetNowUs();          }          mLastPlaylistFetchTimeUs = ALooper::GetNowUs(); @@ -727,25 +892,83 @@ bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) {      return buffer->size() > 0 && buffer->data()[0] == 0x47;  } -void PlaylistFetcher::onDownloadNext() { +bool PlaylistFetcher::shouldPauseDownload() { +    if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) { +        // doesn't apply to subtitles +        return false; +    } + +    // Calculate threshold to abort current download +    float thresholdRatio = getStoppingThreshold(); + +    if (thresholdRatio < 0.0f) { +        // never abort +        return false; +    } else if (thresholdRatio == 0.0f) { +        // immediately abort +        return true; +    } + +    // now we have a positive thresholdUs, abort if remaining +    // portion to download is over that threshold. +    if (mSegmentFirstPTS < 0) { +        // this means we haven't even find the first access unit, +        // abort now as we must be very far away from the end. +        return true; +    } +    int64_t lastEnqueueUs = mSegmentFirstPTS; +    for (size_t i = 0; i < mPacketSources.size(); ++i) { +        if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) { +            continue; +        } +        sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta(); +        int32_t type; +        if (meta == NULL || meta->findInt32("discontinuity", &type)) { +            continue; +        } +        int64_t tmpUs; +        CHECK(meta->findInt64("timeUs", &tmpUs)); +        if (tmpUs > lastEnqueueUs) { +            lastEnqueueUs = tmpUs; +        } +    } +    lastEnqueueUs -= mSegmentFirstPTS; + +    int64_t targetDurationUs = mPlaylist->getTargetDuration(); +    int64_t thresholdUs = thresholdRatio * targetDurationUs; + +    FLOGV("%spausing now, thresholdUs %lld, remaining %lld", +            targetDurationUs - lastEnqueueUs > thresholdUs ? "" : "not ", +            (long long)thresholdUs, +            (long long)(targetDurationUs - lastEnqueueUs)); + +    if (targetDurationUs - lastEnqueueUs > thresholdUs) { +        return true; +    } +    return false; +} + +bool PlaylistFetcher::initDownloadState( +        AString &uri, +        sp<AMessage> &itemMeta, +        int32_t &firstSeqNumberInPlaylist, +        int32_t &lastSeqNumberInPlaylist) {      status_t err = refreshPlaylist(); -    int32_t firstSeqNumberInPlaylist = 0; -    int32_t lastSeqNumberInPlaylist = 0; +    firstSeqNumberInPlaylist = 0; +    lastSeqNumberInPlaylist = 0;      bool discontinuity = false;      if (mPlaylist != NULL) { -        if (mPlaylist->meta() != NULL) { -            mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist); -        } - -        lastSeqNumberInPlaylist = -                firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; +        mPlaylist->getSeqNumberRange( +                &firstSeqNumberInPlaylist, &lastSeqNumberInPlaylist);          if (mDiscontinuitySeq < 0) {              mDiscontinuitySeq = mPlaylist->getDiscontinuitySeq();          }      } +    mSegmentFirstPTS = -1ll; +      if (mPlaylist != NULL && mSeqNumber < 0) {          CHECK_GE(mStartTimeUs, 0ll); @@ -764,18 +987,26 @@ void PlaylistFetcher::onDownloadNext() {                  mStartTimeUs -= getSegmentStartTimeUs(mSeqNumber);              }              mStartTimeUsRelative = true; -            ALOGV("Initial sequence number for time %" PRId64 " is %d from (%d .. %d)", -                    mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist, +            FLOGV("Initial sequence number for time %lld is %d from (%d .. %d)", +                    (long long)mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist,                      lastSeqNumberInPlaylist);          } else {              // When adapting or track switching, mSegmentStartTimeUs (relative              // to media time 0) is used to determine the start segment; mStartTimeUs (absolute              // timestamps coming from the media container) is used to determine the position              // inside a segments. -            mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs); -            if (mAdaptive) { +            if (mStreamTypeMask != LiveSession::STREAMTYPE_SUBTITLES +                    && mSeekMode != LiveSession::kSeekModeNextSample) {                  // avoid double fetch/decode -                mSeqNumber += 1; +                // Use (mSegmentStartTimeUs + 1/2 * targetDurationUs) to search +                // for the starting segment in new variant. +                // If the two variants' segments are aligned, this gives the +                // next segment. If they're not aligned, this gives the segment +                // that overlaps no more than 1/2 * targetDurationUs. +                mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs +                        + mPlaylist->getTargetDuration() / 2); +            } else { +                mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs);              }              ssize_t minSeq = getSeqNumberForDiscontinuity(mDiscontinuitySeq);              if (mSeqNumber < minSeq) { @@ -789,7 +1020,7 @@ void PlaylistFetcher::onDownloadNext() {              if (mSeqNumber > lastSeqNumberInPlaylist) {                  mSeqNumber = lastSeqNumberInPlaylist;              } -            ALOGV("Initial sequence number for live event %d from (%d .. %d)", +            FLOGV("Initial sequence number is %d from (%d .. %d)",                      mSeqNumber, firstSeqNumberInPlaylist,                      lastSeqNumberInPlaylist);          } @@ -809,26 +1040,24 @@ void PlaylistFetcher::onDownloadNext() {                  // refresh in increasing fraction (1/2, 1/3, ...) of the                  // playlist's target duration or 3 seconds, whichever is less                  int64_t delayUs = kMaxMonitorDelayUs; -                if (mPlaylist != NULL && mPlaylist->meta() != NULL) { -                    int32_t targetDurationSecs; -                    CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); -                    delayUs = mPlaylist->size() * targetDurationSecs * -                            1000000ll / (1 + mNumRetries); +                if (mPlaylist != NULL) { +                    delayUs = mPlaylist->size() * mPlaylist->getTargetDuration() +                            / (1 + mNumRetries);                  }                  if (delayUs > kMaxMonitorDelayUs) {                      delayUs = kMaxMonitorDelayUs;                  } -                ALOGV("sequence number high: %d from (%d .. %d), " -                      "monitor in %" PRId64 " (retry=%d)", +                FLOGV("sequence number high: %d from (%d .. %d), " +                      "monitor in %lld (retry=%d)",                          mSeqNumber, firstSeqNumberInPlaylist, -                        lastSeqNumberInPlaylist, delayUs, mNumRetries); +                        lastSeqNumberInPlaylist, (long long)delayUs, mNumRetries);                  postMonitorQueue(delayUs); -                return; +                return false;              }              if (err != OK) {                  notifyError(err); -                return; +                return false;              }              // we've missed the boat, let's start 3 segments prior to the latest sequence @@ -843,12 +1072,8 @@ void PlaylistFetcher::onDownloadNext() {                  // but since the segments we are supposed to fetch have already rolled off                  // the playlist, i.e. we have already missed the boat, we inevitably have to                  // skip. -                for (size_t i = 0; i < mPacketSources.size(); i++) { -                    sp<ABuffer> formatChange = mSession->createFormatChangeBuffer(); -                    mPacketSources.valueAt(i)->queueAccessUnit(formatChange); -                } -                stopAsync(/* clear = */ false); -                return; +                notifyStopReached(); +                return false;              }              mSeqNumber = lastSeqNumberInPlaylist - 3;              if (mSeqNumber < firstSeqNumberInPlaylist) { @@ -858,45 +1083,59 @@ void PlaylistFetcher::onDownloadNext() {              // fall through          } else { -            ALOGE("Cannot find sequence number %d in playlist " -                 "(contains %d - %d)", -                 mSeqNumber, firstSeqNumberInPlaylist, -                  firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1); +            if (mPlaylist != NULL) { +                ALOGE("Cannot find sequence number %d in playlist " +                     "(contains %d - %d)", +                     mSeqNumber, firstSeqNumberInPlaylist, +                      firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1); + +                if (mTSParser != NULL) { +                    mTSParser->signalEOS(ERROR_END_OF_STREAM); +                    // Use an empty buffer; we don't have any new data, just want to extract +                    // potential new access units after flush.  Reset mSeqNumber to +                    // lastSeqNumberInPlaylist such that we set the correct access unit +                    // properties in extractAndQueueAccessUnitsFromTs. +                    sp<ABuffer> buffer = new ABuffer(0); +                    mSeqNumber = lastSeqNumberInPlaylist; +                    extractAndQueueAccessUnitsFromTs(buffer); +                } +                notifyError(ERROR_END_OF_STREAM); +            } else { +                // It's possible that we were never able to download the playlist. +                // In this case we should notify error, instead of EOS, as EOS during +                // prepare means we succeeded in downloading everything. +                ALOGE("Failed to download playlist!"); +                notifyError(ERROR_IO); +            } -            notifyError(ERROR_END_OF_STREAM); -            return; +            return false;          }      }      mNumRetries = 0; -    AString uri; -    sp<AMessage> itemMeta;      CHECK(mPlaylist->itemAt(                  mSeqNumber - firstSeqNumberInPlaylist,                  &uri,                  &itemMeta)); +    CHECK(itemMeta->findInt32("discontinuity-sequence", &mDiscontinuitySeq)); +      int32_t val;      if (itemMeta->findInt32("discontinuity", &val) && val != 0) { -        mDiscontinuitySeq++; +        discontinuity = true; +    } else if (mLastDiscontinuitySeq >= 0 +            && mDiscontinuitySeq != mLastDiscontinuitySeq) { +        // Seek jumped to a new discontinuity sequence. We need to signal +        // a format change to decoder. Decoder needs to shutdown and be +        // created again if seamless format change is unsupported. +        FLOGV("saw discontinuity: mStartup %d, mLastDiscontinuitySeq %d, " +                "mDiscontinuitySeq %d, mStartTimeUs %lld", +                mStartup, mLastDiscontinuitySeq, mDiscontinuitySeq, (long long)mStartTimeUs);          discontinuity = true;      } +    mLastDiscontinuitySeq = -1; -    int64_t range_offset, range_length; -    if (!itemMeta->findInt64("range-offset", &range_offset) -            || !itemMeta->findInt64("range-length", &range_length)) { -        range_offset = 0; -        range_length = -1; -    } - -    ALOGV("fetching segment %d from (%d .. %d)", -          mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); - -    ALOGV("fetching '%s'", uri.c_str()); - -    sp<DataSource> source; -    sp<ABuffer> buffer, tsBuffer;      // decrypt a junk buffer to prefetch key; since a session uses only one http connection,      // this avoids interleaved connections to the key and segment file.      { @@ -904,19 +1143,118 @@ void PlaylistFetcher::onDownloadNext() {          junk->setRange(0, 16);          status_t err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, junk,                  true /* first */); -        if (err != OK) { +        if (err == ERROR_NOT_CONNECTED) { +            return false; +        } else if (err != OK) {              notifyError(err); +            return false; +        } +    } + +    if ((mStartup && !mTimeChangeSignaled) || discontinuity) { +        // We need to signal a time discontinuity to ATSParser on the +        // first segment after start, or on a discontinuity segment. +        // Setting mNextPTSTimeUs informs extractAndQueueAccessUnitsXX() +        // to send the time discontinuity. +        if (mPlaylist->isComplete() || mPlaylist->isEvent()) { +            // If this was a live event this made no sense since +            // we don't have access to all the segment before the current +            // one. +            mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber); +        } + +        // Setting mTimeChangeSignaled to true, so that if start time +        // searching goes into 2nd segment (without a discontinuity), +        // we don't reset time again. It causes corruption when pending +        // data in ATSParser is cleared. +        mTimeChangeSignaled = true; +    } + +    if (discontinuity) { +        ALOGI("queueing discontinuity (explicit=%d)", discontinuity); + +        // Signal a format discontinuity to ATSParser to clear partial data +        // from previous streams. Not doing this causes bitstream corruption. +        if (mTSParser != NULL) { +            mTSParser->signalDiscontinuity( +                    ATSParser::DISCONTINUITY_FORMATCHANGE, NULL /* extra */); +        } + +        queueDiscontinuity( +                ATSParser::DISCONTINUITY_FORMAT_ONLY, +                NULL /* extra */); + +        if (mStartup && mStartTimeUsRelative && mFirstPTSValid) { +            // This means we guessed mStartTimeUs to be in the previous +            // segment (likely very close to the end), but either video or +            // audio has not found start by the end of that segment. +            // +            // If this new segment is not a discontinuity, keep searching. +            // +            // If this new segment even got a discontinuity marker, just +            // set mStartTimeUs=0, and take all samples from now on. +            mStartTimeUs = 0; +            mFirstPTSValid = false; +            mIDRFound = false; +            mVideoBuffer->clear(); +        } +    } + +    FLOGV("fetching segment %d from (%d .. %d)", +            mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); +    return true; +} + +void PlaylistFetcher::onDownloadNext() { +    AString uri; +    sp<AMessage> itemMeta; +    sp<ABuffer> buffer; +    sp<ABuffer> tsBuffer; +    int32_t firstSeqNumberInPlaylist = 0; +    int32_t lastSeqNumberInPlaylist = 0; +    bool connectHTTP = true; + +    if (mDownloadState->hasSavedState()) { +        mDownloadState->restoreState( +                uri, +                itemMeta, +                buffer, +                tsBuffer, +                firstSeqNumberInPlaylist, +                lastSeqNumberInPlaylist); +        connectHTTP = false; +        FLOGV("resuming: '%s'", uri.c_str()); +    } else { +        if (!initDownloadState( +                uri, +                itemMeta, +                firstSeqNumberInPlaylist, +                lastSeqNumberInPlaylist)) {              return;          } +        FLOGV("fetching: '%s'", uri.c_str()); +    } + +    int64_t range_offset, range_length; +    if (!itemMeta->findInt64("range-offset", &range_offset) +            || !itemMeta->findInt64("range-length", &range_length)) { +        range_offset = 0; +        range_length = -1;      }      // block-wise download -    bool startup = mStartup; +    bool shouldPause = false;      ssize_t bytesRead;      do { -        bytesRead = mSession->fetchFile( -                uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, &source); +        int64_t startUs = ALooper::GetNowUs(); +        bytesRead = mHTTPDownloader->fetchBlock( +                uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, +                NULL /* actualURL */, connectHTTP); +        int64_t delayUs = ALooper::GetNowUs() - startUs; +        if (bytesRead == ERROR_NOT_CONNECTED) { +            return; +        }          if (bytesRead < 0) {              status_t err = bytesRead;              ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str()); @@ -924,6 +1262,22 @@ void PlaylistFetcher::onDownloadNext() {              return;          } +        // add sample for bandwidth estimation, excluding samples from subtitles (as +        // its too small), or during startup/resumeUntil (when we could have more than +        // one connection open which affects bandwidth) +        if (!mStartup && mStopParams == NULL && bytesRead > 0 +                && (mStreamTypeMask +                        & (LiveSession::STREAMTYPE_AUDIO +                        | LiveSession::STREAMTYPE_VIDEO))) { +            mSession->addBandwidthMeasurement(bytesRead, delayUs); +            if (delayUs > 2000000ll) { +                FLOGV("bytesRead %zd took %.2f seconds - abnormal bandwidth dip", +                        bytesRead, (double)delayUs / 1.0e6); +            } +        } + +        connectHTTP = false; +          CHECK(buffer != NULL);          size_t size = buffer->size(); @@ -941,28 +1295,7 @@ void PlaylistFetcher::onDownloadNext() {              return;          } -        if (startup || discontinuity) { -            // Signal discontinuity. - -            if (mPlaylist->isComplete() || mPlaylist->isEvent()) { -                // If this was a live event this made no sense since -                // we don't have access to all the segment before the current -                // one. -                mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber); -            } - -            if (discontinuity) { -                ALOGI("queueing discontinuity (explicit=%d)", discontinuity); - -                queueDiscontinuity( -                        ATSParser::DISCONTINUITY_FORMATCHANGE, -                        NULL /* extra */); - -                discontinuity = false; -            } - -            startup = false; -        } +        bool startUp = mStartup; // save current start up state          err = OK;          if (bufferStartsWithTsSyncByte(buffer)) { @@ -976,7 +1309,6 @@ void PlaylistFetcher::onDownloadNext() {                  tsBuffer->setRange(tsOff, tsSize);              }              tsBuffer->setRange(tsBuffer->offset(), tsBuffer->size() + bytesRead); -              err = extractAndQueueAccessUnitsFromTs(tsBuffer);          } @@ -991,23 +1323,45 @@ void PlaylistFetcher::onDownloadNext() {              return;          } else if (err == ERROR_OUT_OF_RANGE) {              // reached stopping point -            stopAsync(/* clear = */ false); +            notifyStopReached();              return;          } else if (err != OK) {              notifyError(err);              return;          } - +        // If we're switching, post start notification +        // this should only be posted when the last chunk is full processed by TSParser +        if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) { +            CHECK(mStartTimeUsNotify != NULL); +            mStartTimeUsNotify->post(); +            mStartTimeUsNotify.clear(); +            shouldPause = true; +        } +        if (shouldPause || shouldPauseDownload()) { +            // save state and return if this is not the last chunk, +            // leaving the fetcher in paused state. +            if (bytesRead != 0) { +                mDownloadState->saveState( +                        uri, +                        itemMeta, +                        buffer, +                        tsBuffer, +                        firstSeqNumberInPlaylist, +                        lastSeqNumberInPlaylist); +                return; +            } +            shouldPause = true; +        }      } while (bytesRead != 0);      if (bufferStartsWithTsSyncByte(buffer)) {          // If we don't see a stream in the program table after fetching a full ts segment          // mark it as nonexistent. -        const size_t kNumTypes = ATSParser::NUM_SOURCE_TYPES; -        ATSParser::SourceType srcTypes[kNumTypes] = +        ATSParser::SourceType srcTypes[] =                  { ATSParser::VIDEO, ATSParser::AUDIO }; -        LiveSession::StreamType streamTypes[kNumTypes] = +        LiveSession::StreamType streamTypes[] =                  { LiveSession::STREAMTYPE_VIDEO, LiveSession::STREAMTYPE_AUDIO }; +        const size_t kNumTypes = NELEM(srcTypes);          for (size_t i = 0; i < kNumTypes; i++) {              ATSParser::SourceType srcType = srcTypes[i]; @@ -1034,7 +1388,6 @@ void PlaylistFetcher::onDownloadNext() {          return;      } -    err = OK;      if (tsBuffer != NULL) {          AString method;          CHECK(buffer->meta()->findString("cipher-method", &method)); @@ -1048,81 +1401,117 @@ void PlaylistFetcher::onDownloadNext() {      }      // bulk extract non-ts files +    bool startUp = mStartup;      if (tsBuffer == NULL) { -        err = extractAndQueueAccessUnits(buffer, itemMeta); +        status_t err = extractAndQueueAccessUnits(buffer, itemMeta);          if (err == -EAGAIN) {              // starting sequence number too low/high              postMonitorQueue();              return;          } else if (err == ERROR_OUT_OF_RANGE) {              // reached stopping point -            stopAsync(/* clear = */false); +            notifyStopReached(); +            return; +        } else if (err != OK) { +            notifyError(err);              return;          }      } -    if (err != OK) { -        notifyError(err); -        return; -    } -      ++mSeqNumber; -    postMonitorQueue(); +    // if adapting, pause after found the next starting point +    if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) { +        CHECK(mStartTimeUsNotify != NULL); +        mStartTimeUsNotify->post(); +        mStartTimeUsNotify.clear(); +        shouldPause = true; +    } + +    if (!shouldPause) { +        postMonitorQueue(); +    }  } -int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const { -    int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist; -    if (mPlaylist->meta() == NULL -            || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) { -        firstSeqNumberInPlaylist = 0; +/* + * returns true if we need to adjust mSeqNumber + */ +bool PlaylistFetcher::adjustSeqNumberWithAnchorTime(int64_t anchorTimeUs) { +    int32_t firstSeqNumberInPlaylist = mPlaylist->getFirstSeqNumber(); + +    int64_t minDiffUs, maxDiffUs; +    if (mSeekMode == LiveSession::kSeekModeNextSample) { +        // if the previous fetcher paused in the middle of a segment, we +        // want to start at a segment that overlaps the last sample +        minDiffUs = -mPlaylist->getTargetDuration(); +        maxDiffUs = 0ll; +    } else { +        // if the previous fetcher paused at the end of a segment, ideally +        // we want to start at the segment that's roughly aligned with its +        // next segment, but if the two variants are not well aligned we +        // adjust the diff to within (-T/2, T/2) +        minDiffUs = -mPlaylist->getTargetDuration() / 2; +        maxDiffUs = mPlaylist->getTargetDuration() / 2;      } -    lastSeqNumberInPlaylist = firstSeqNumberInPlaylist + mPlaylist->size() - 1; -    int32_t index = mSeqNumber - firstSeqNumberInPlaylist - 1; -    while (index >= 0 && anchorTimeUs > mStartTimeUs) { -        sp<AMessage> itemMeta; -        CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta)); +    int32_t oldSeqNumber = mSeqNumber; +    ssize_t index = mSeqNumber - firstSeqNumberInPlaylist; -        int64_t itemDurationUs; -        CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); +    // adjust anchorTimeUs to within (minDiffUs, maxDiffUs) from mStartTimeUs +    int64_t diffUs = anchorTimeUs - mStartTimeUs; +    if (diffUs > maxDiffUs) { +        while (index > 0 && diffUs > maxDiffUs) { +            --index; + +            sp<AMessage> itemMeta; +            CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta)); -        anchorTimeUs -= itemDurationUs; -        --index; +            int64_t itemDurationUs; +            CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + +            diffUs -= itemDurationUs; +        } +    } else if (diffUs < minDiffUs) { +        while (index + 1 < (ssize_t) mPlaylist->size() +                && diffUs < minDiffUs) { +            ++index; + +            sp<AMessage> itemMeta; +            CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta)); + +            int64_t itemDurationUs; +            CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + +            diffUs += itemDurationUs; +        }      } -    int32_t newSeqNumber = firstSeqNumberInPlaylist + index + 1; -    if (newSeqNumber <= lastSeqNumberInPlaylist) { -        return newSeqNumber; -    } else { -        return lastSeqNumberInPlaylist; +    mSeqNumber = firstSeqNumberInPlaylist + index; + +    if (mSeqNumber != oldSeqNumber) { +        FLOGV("guessed wrong seg number: diff %lld out of [%lld, %lld]", +                (long long) anchorTimeUs - mStartTimeUs, +                (long long) minDiffUs, +                (long long) maxDiffUs); +        return true;      } +    return false;  }  int32_t PlaylistFetcher::getSeqNumberForDiscontinuity(size_t discontinuitySeq) const { -    int32_t firstSeqNumberInPlaylist; -    if (mPlaylist->meta() == NULL -            || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) { -        firstSeqNumberInPlaylist = 0; -    } - -    size_t curDiscontinuitySeq = mPlaylist->getDiscontinuitySeq(); -    if (discontinuitySeq < curDiscontinuitySeq) { -        return firstSeqNumberInPlaylist <= 0 ? 0 : (firstSeqNumberInPlaylist - 1); -    } +    int32_t firstSeqNumberInPlaylist = mPlaylist->getFirstSeqNumber();      size_t index = 0;      while (index < mPlaylist->size()) {          sp<AMessage> itemMeta;          CHECK(mPlaylist->itemAt( index, NULL /* uri */, &itemMeta)); - -        int64_t discontinuity; -        if (itemMeta->findInt64("discontinuity", &discontinuity)) { -            curDiscontinuitySeq++; -        } - +        size_t curDiscontinuitySeq; +        CHECK(itemMeta->findInt32("discontinuity-sequence", (int32_t *)&curDiscontinuitySeq)); +        int32_t seqNumber = firstSeqNumberInPlaylist + index;          if (curDiscontinuitySeq == discontinuitySeq) { -            return firstSeqNumberInPlaylist + index; +            return seqNumber; +        } else if (curDiscontinuitySeq > discontinuitySeq) { +            return seqNumber <= 0 ? 0 : seqNumber - 1;          }          ++index; @@ -1132,12 +1521,6 @@ int32_t PlaylistFetcher::getSeqNumberForDiscontinuity(size_t discontinuitySeq) c  }  int32_t PlaylistFetcher::getSeqNumberForTime(int64_t timeUs) const { -    int32_t firstSeqNumberInPlaylist; -    if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( -                "media-sequence", &firstSeqNumberInPlaylist)) { -        firstSeqNumberInPlaylist = 0; -    } -      size_t index = 0;      int64_t segmentStartUs = 0;      while (index < mPlaylist->size()) { @@ -1160,7 +1543,7 @@ int32_t PlaylistFetcher::getSeqNumberForTime(int64_t timeUs) const {          index = mPlaylist->size() - 1;      } -    return firstSeqNumberInPlaylist + index; +    return mPlaylist->getFirstSeqNumber() + index;  }  const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties( @@ -1175,16 +1558,37 @@ const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties(          accessUnit->meta()->setInt32("discard", discard);      } -    int32_t targetDurationSecs; -    if (mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)) { -        accessUnit->meta()->setInt32("targetDuration", targetDurationSecs); -    } -      accessUnit->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq);      accessUnit->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber)); +    accessUnit->meta()->setInt64("segmentFirstTimeUs", mSegmentFirstPTS); +    accessUnit->meta()->setInt64("segmentDurationUs", getSegmentDurationUs(mSeqNumber)); +    if (!mPlaylist->isComplete() && !mPlaylist->isEvent()) { +        accessUnit->meta()->setInt64("playlistTimeUs", mPlaylistTimeUs); +    }      return accessUnit;  } +bool PlaylistFetcher::isStartTimeReached(int64_t timeUs) { +    if (!mFirstPTSValid) { +        mFirstTimeUs = timeUs; +        mFirstPTSValid = true; +    } +    bool startTimeReached = true; +    if (mStartTimeUsRelative) { +        FLOGV("startTimeUsRelative, timeUs (%lld) - %lld = %lld", +                (long long)timeUs, +                (long long)mFirstTimeUs, +                (long long)(timeUs - mFirstTimeUs)); +        timeUs -= mFirstTimeUs; +        if (timeUs < 0) { +            FLOGV("clamp negative timeUs to 0"); +            timeUs = 0; +        } +        startTimeReached = (timeUs >= mStartTimeUs); +    } +    return startTimeReached; +} +  status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &buffer) {      if (mTSParser == NULL) {          // Use TS_TIMESTAMPS_ARE_ABSOLUTE so pts carry over between fetchers. @@ -1197,12 +1601,16 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu          // ATSParser from skewing the timestamps of access units.          extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0); +        // When adapting, signal a recent media time to the parser, +        // so that PTS wrap around is handled for the new variant. +        if (mStartTimeUs >= 0 && !mStartTimeUsRelative) { +            extra->setInt64(IStreamListener::kKeyRecentMediaTimeUs, mStartTimeUs); +        } +          mTSParser->signalDiscontinuity(                  ATSParser::DISCONTINUITY_TIME, extra); -        mAbsoluteTimeAnchorUs = mNextPTSTimeUs;          mNextPTSTimeUs = -1ll; -        mFirstPTSValid = false;      }      size_t offset = 0; @@ -1218,35 +1626,75 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu      // setRange to indicate consumed bytes.      buffer->setRange(buffer->offset() + offset, buffer->size() - offset); +    if (mSegmentFirstPTS < 0ll) { +        // get the smallest first PTS from all streams present in this parser +        for (size_t i = mPacketSources.size(); i-- > 0;) { +            const LiveSession::StreamType stream = mPacketSources.keyAt(i); +            if (stream == LiveSession::STREAMTYPE_SUBTITLES) { +                ALOGE("MPEG2 Transport streams do not contain subtitles."); +                return ERROR_MALFORMED; +            } +            if (stream == LiveSession::STREAMTYPE_METADATA) { +                continue; +            } +            ATSParser::SourceType type =LiveSession::getSourceTypeForStream(stream); +            sp<AnotherPacketSource> source = +                static_cast<AnotherPacketSource *>( +                        mTSParser->getSource(type).get()); + +            if (source == NULL) { +                continue; +            } +            sp<AMessage> meta = source->getMetaAfterLastDequeued(0); +            if (meta != NULL) { +                int64_t timeUs; +                CHECK(meta->findInt64("timeUs", &timeUs)); +                if (mSegmentFirstPTS < 0ll || timeUs < mSegmentFirstPTS) { +                    mSegmentFirstPTS = timeUs; +                } +            } +        } +        if (mSegmentFirstPTS < 0ll) { +            // didn't find any TS packet, can return early +            return OK; +        } +        if (!mStartTimeUsRelative) { +            // mStartup +            //   mStartup is true until we have queued a packet for all the streams +            //   we are fetching. We queue packets whose timestamps are greater than +            //   mStartTimeUs. +            // mSegmentStartTimeUs >= 0 +            //   mSegmentStartTimeUs is non-negative when adapting or switching tracks +            // adjustSeqNumberWithAnchorTime(timeUs) == true +            //   we guessed a seq number that's either too large or too small. +            // If this happens, we'll adjust mSeqNumber and restart fetching from new +            // location. Note that we only want to adjust once, so set mSegmentStartTimeUs +            // to -1 so that we don't enter this chunk next time. +            if (mStartup && mSegmentStartTimeUs >= 0 +                    && adjustSeqNumberWithAnchorTime(mSegmentFirstPTS)) { +                mStartTimeUsNotify = mNotify->dup(); +                mStartTimeUsNotify->setInt32("what", kWhatStartedAt); +                mStartTimeUsNotify->setString("uri", mURI); +                mIDRFound = false; +                mSegmentStartTimeUs = -1; +                return -EAGAIN; +            } +        } +    } +      status_t err = OK;      for (size_t i = mPacketSources.size(); i-- > 0;) {          sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i); -        const char *key; -        ATSParser::SourceType type;          const LiveSession::StreamType stream = mPacketSources.keyAt(i); -        switch (stream) { -            case LiveSession::STREAMTYPE_VIDEO: -                type = ATSParser::VIDEO; -                key = "timeUsVideo"; -                break; - -            case LiveSession::STREAMTYPE_AUDIO: -                type = ATSParser::AUDIO; -                key = "timeUsAudio"; -                break; - -            case LiveSession::STREAMTYPE_SUBTITLES: -            { -                ALOGE("MPEG2 Transport streams do not contain subtitles."); -                return ERROR_MALFORMED; -                break; -            } - -            default: -                TRESPASS(); +        if (stream == LiveSession::STREAMTYPE_SUBTITLES) { +            ALOGE("MPEG2 Transport streams do not contain subtitles."); +            return ERROR_MALFORMED;          } +        const char *key = LiveSession::getKeyForStream(stream); +        ATSParser::SourceType type =LiveSession::getSourceTypeForStream(stream); +          sp<AnotherPacketSource> source =              static_cast<AnotherPacketSource *>(                      mTSParser->getSource(type).get()); @@ -1255,116 +1703,65 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu              continue;          } -        int64_t timeUs; +        const char *mime; +        sp<MetaData> format  = source->getFormat(); +        bool isAvc = format != NULL && format->findCString(kKeyMIMEType, &mime) +                && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC); +          sp<ABuffer> accessUnit;          status_t finalResult;          while (source->hasBufferAvailable(&finalResult)                  && source->dequeueAccessUnit(&accessUnit) == OK) { +            int64_t timeUs;              CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));              if (mStartup) { -                if (!mFirstPTSValid) { -                    mFirstTimeUs = timeUs; -                    mFirstPTSValid = true; -                } -                if (mStartTimeUsRelative) { -                    timeUs -= mFirstTimeUs; -                    if (timeUs < 0) { -                        timeUs = 0; -                    } -                } - -                if (timeUs < mStartTimeUs) { -                    // buffer up to the closest preceding IDR frame -                    ALOGV("timeUs %" PRId64 " us < mStartTimeUs %" PRId64 " us", -                            timeUs, mStartTimeUs); -                    const char *mime; -                    sp<MetaData> format  = source->getFormat(); -                    bool isAvc = false; -                    if (format != NULL && format->findCString(kKeyMIMEType, &mime) -                            && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) { -                        isAvc = true; -                    } -                    if (isAvc && IsIDR(accessUnit)) { -                        mVideoBuffer->clear(); -                    } +                bool startTimeReached = isStartTimeReached(timeUs); + +                if (!startTimeReached || (isAvc && !mIDRFound)) { +                    // buffer up to the closest preceding IDR frame in the next segement, +                    // or the closest succeeding IDR frame after the exact position +                    FSLOGV(stream, "timeUs(%lld)-mStartTimeUs(%lld)=%lld, mIDRFound=%d", +                            (long long)timeUs, +                            (long long)mStartTimeUs, +                            (long long)timeUs - mStartTimeUs, +                            mIDRFound);                      if (isAvc) { -                        mVideoBuffer->queueAccessUnit(accessUnit); +                        if (IsIDR(accessUnit)) { +                            mVideoBuffer->clear(); +                            FSLOGV(stream, "found IDR, clear mVideoBuffer"); +                            mIDRFound = true; +                        } +                        if (mIDRFound && mStartTimeUsRelative && !startTimeReached) { +                            mVideoBuffer->queueAccessUnit(accessUnit); +                            FSLOGV(stream, "saving AVC video AccessUnit"); +                        }                      } - -                    continue; -                } -            } - -            CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); -            if (mStartTimeUsNotify != NULL && timeUs > mStartTimeUs) { -                int32_t firstSeqNumberInPlaylist; -                if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( -                            "media-sequence", &firstSeqNumberInPlaylist)) { -                    firstSeqNumberInPlaylist = 0; -                } - -                int32_t targetDurationSecs; -                CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); -                int64_t targetDurationUs = targetDurationSecs * 1000000ll; -                // mStartup -                //   mStartup is true until we have queued a packet for all the streams -                //   we are fetching. We queue packets whose timestamps are greater than -                //   mStartTimeUs. -                // mSegmentStartTimeUs >= 0 -                //   mSegmentStartTimeUs is non-negative when adapting or switching tracks -                // mSeqNumber > firstSeqNumberInPlaylist -                //   don't decrement mSeqNumber if it already points to the 1st segment -                // timeUs - mStartTimeUs > targetDurationUs: -                //   This and the 2 above conditions should only happen when adapting in a live -                //   stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher -                //   would start fetching after timeUs, which should be greater than mStartTimeUs; -                //   the old fetcher would then continue fetching data until timeUs. We don't want -                //   timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to -                //   stop as early as possible. The definition of being "too far ahead" is -                //   arbitrary; here we use targetDurationUs as threshold. -                if (mStartup && mSegmentStartTimeUs >= 0 -                        && mSeqNumber > firstSeqNumberInPlaylist -                        && timeUs - mStartTimeUs > targetDurationUs) { -                    // we just guessed a starting timestamp that is too high when adapting in a -                    // live stream; re-adjust based on the actual timestamp extracted from the -                    // media segment; if we didn't move backward after the re-adjustment -                    // (newSeqNumber), start at least 1 segment prior. -                    int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs); -                    if (newSeqNumber >= mSeqNumber) { -                        --mSeqNumber; -                    } else { -                        mSeqNumber = newSeqNumber; +                    if (!startTimeReached || (isAvc && !mIDRFound)) { +                        continue;                      } -                    mStartTimeUsNotify = mNotify->dup(); -                    mStartTimeUsNotify->setInt32("what", kWhatStartedAt); -                    return -EAGAIN; -                } - -                int32_t seq; -                if (!mStartTimeUsNotify->findInt32("discontinuitySeq", &seq)) { -                    mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);                  } -                int64_t startTimeUs; -                if (!mStartTimeUsNotify->findInt64(key, &startTimeUs)) { -                    mStartTimeUsNotify->setInt64(key, timeUs); +            } -                    uint32_t streamMask = 0; -                    mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask); +            if (mStartTimeUsNotify != NULL) { +                uint32_t streamMask = 0; +                mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask); +                if ((mStreamTypeMask & mPacketSources.keyAt(i)) +                        && !(streamMask & mPacketSources.keyAt(i))) {                      streamMask |= mPacketSources.keyAt(i);                      mStartTimeUsNotify->setInt32("streamMask", streamMask); +                    FSLOGV(stream, "found start point, timeUs=%lld, streamMask becomes %x", +                            (long long)timeUs, streamMask);                      if (streamMask == mStreamTypeMask) { +                        FLOGV("found start point for all streams");                          mStartup = false; -                        mStartTimeUsNotify->post(); -                        mStartTimeUsNotify.clear();                      }                  }              }              if (mStopParams != NULL) { -                // Queue discontinuity in original stream.                  int32_t discontinuitySeq;                  int64_t stopTimeUs;                  if (!mStopParams->findInt32("discontinuitySeq", &discontinuitySeq) @@ -1372,14 +1769,13 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu                          || !mStopParams->findInt64(key, &stopTimeUs)                          || (discontinuitySeq == mDiscontinuitySeq                                  && timeUs >= stopTimeUs)) { -                    packetSource->queueAccessUnit(mSession->createFormatChangeBuffer()); +                    FSLOGV(stream, "reached stop point, timeUs=%lld", (long long)timeUs);                      mStreamTypeMask &= ~stream;                      mPacketSources.removeItemsAt(i);                      break;                  }              } -            // Note that we do NOT dequeue any discontinuities except for format change.              if (stream == LiveSession::STREAMTYPE_VIDEO) {                  const bool discard = true;                  status_t status; @@ -1388,11 +1784,21 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu                      mVideoBuffer->dequeueAccessUnit(&videoBuffer);                      setAccessUnitProperties(videoBuffer, source, discard);                      packetSource->queueAccessUnit(videoBuffer); +                    int64_t bufferTimeUs; +                    CHECK(videoBuffer->meta()->findInt64("timeUs", &bufferTimeUs)); +                    FSLOGV(stream, "queueAccessUnit (saved), timeUs=%lld", +                            (long long)bufferTimeUs);                  } +            } else if (stream == LiveSession::STREAMTYPE_METADATA && !mHasMetadata) { +                mHasMetadata = true; +                sp<AMessage> notify = mNotify->dup(); +                notify->setInt32("what", kWhatMetadataDetected); +                notify->post();              }              setAccessUnitProperties(accessUnit, source);              packetSource->queueAccessUnit(accessUnit); +            FSLOGV(stream, "queueAccessUnit, timeUs=%lld", (long long)timeUs);          }          if (err != OK) { @@ -1410,7 +1816,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu      if (!mStreamTypeMask) {          // Signal gap is filled between original and new stream. -        ALOGV("ERROR OUT OF RANGE"); +        FLOGV("reached stop point for all streams");          return ERROR_OUT_OF_RANGE;      } @@ -1461,14 +1867,11 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(          buffer->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber));          buffer->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq);          buffer->meta()->setInt32("subtitleGeneration", mSubtitleGeneration); -          packetSource->queueAccessUnit(buffer);          return OK;      }      if (mNextPTSTimeUs >= 0ll) { -        mFirstPTSValid = false; -        mAbsoluteTimeAnchorUs = mNextPTSTimeUs;          mNextPTSTimeUs = -1ll;      } @@ -1569,11 +1972,23 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(      CHECK(packetSource->getFormat()->findInt32(kKeySampleRate, &sampleRate));      int64_t timeUs = (PTS * 100ll) / 9ll; -    if (!mFirstPTSValid) { +    if (mStartup && !mFirstPTSValid) {          mFirstPTSValid = true;          mFirstTimeUs = timeUs;      } +    if (mSegmentFirstPTS < 0ll) { +        mSegmentFirstPTS = timeUs; +        if (!mStartTimeUsRelative) { +            // Duplicated logic from how we handle .ts playlists. +            if (mStartup && mSegmentStartTimeUs >= 0 +                    && adjustSeqNumberWithAnchorTime(timeUs)) { +                mSegmentStartTimeUs = -1; +                return -EAGAIN; +            } +        } +    } +      size_t offset = 0;      while (offset < buffer->size()) {          const uint8_t *adtsHeader = buffer->data() + offset; @@ -1617,40 +2032,18 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(              }              if (mStartTimeUsNotify != NULL) { -                int32_t targetDurationSecs; -                CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); -                int64_t targetDurationUs = targetDurationSecs * 1000000ll; - -                // Duplicated logic from how we handle .ts playlists. -                if (mStartup && mSegmentStartTimeUs >= 0 -                        && timeUs - mStartTimeUs > targetDurationUs) { -                    int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs); -                    if (newSeqNumber >= mSeqNumber) { -                        --mSeqNumber; -                    } else { -                        mSeqNumber = newSeqNumber; -                    } -                    return -EAGAIN; -                } - -                mStartTimeUsNotify->setInt64("timeUsAudio", timeUs); -                mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);                  mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO); -                mStartTimeUsNotify->post(); -                mStartTimeUsNotify.clear();                  mStartup = false;              }          }          if (mStopParams != NULL) { -            // Queue discontinuity in original stream.              int32_t discontinuitySeq;              int64_t stopTimeUs;              if (!mStopParams->findInt32("discontinuitySeq", &discontinuitySeq)                      || discontinuitySeq > mDiscontinuitySeq                      || !mStopParams->findInt64("timeUsAudio", &stopTimeUs)                      || (discontinuitySeq == mDiscontinuitySeq && unitTimeUs >= stopTimeUs)) { -                packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());                  mStreamTypeMask = 0;                  mPacketSources.clear();                  return ERROR_OUT_OF_RANGE; @@ -1687,33 +2080,11 @@ void PlaylistFetcher::updateDuration() {      msg->post();  } -int64_t PlaylistFetcher::resumeThreshold(const sp<AMessage> &msg) { -    int64_t durationUs; -    if (msg->findInt64("durationUs", &durationUs) && durationUs > 0) { -        return kNumSkipFrames * durationUs; -    } - -    sp<RefBase> obj; -    msg->findObject("format", &obj); -    MetaData *format = static_cast<MetaData *>(obj.get()); - -    const char *mime; -    CHECK(format->findCString(kKeyMIMEType, &mime)); -    bool audio = !strncasecmp(mime, "audio/", 6); -    if (audio) { -        // Assumes 1000 samples per frame. -        int32_t sampleRate; -        CHECK(format->findInt32(kKeySampleRate, &sampleRate)); -        return kNumSkipFrames  /* frames */ * 1000 /* samples */ -                * (1000000 / sampleRate) /* sample duration (us) */; -    } else { -        int32_t frameRate; -        if (format->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) { -            return kNumSkipFrames * (1000000 / frameRate); -        } -    } - -    return 500000ll; +void PlaylistFetcher::updateTargetDuration() { +    sp<AMessage> msg = mNotify->dup(); +    msg->setInt32("what", kWhatTargetDurationUpdate); +    msg->setInt64("targetDurationUs", mPlaylist->getTargetDuration()); +    msg->post();  }  }  // namespace android diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h index 4e15f85..c8ca457 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.h +++ b/media/libstagefright/httplive/PlaylistFetcher.h @@ -27,7 +27,7 @@ namespace android {  struct ABuffer;  struct AnotherPacketSource; -struct DataSource; +class DataSource;  struct HTTPBase;  struct LiveDataSource;  struct M3UParser; @@ -36,6 +36,7 @@ class String8;  struct PlaylistFetcher : public AHandler {      static const int64_t kMinBufferedDurationUs;      static const int32_t kDownloadBlockSize; +    static const int64_t kFetcherResumeThreshold;      enum {          kWhatStarted, @@ -43,36 +44,43 @@ struct PlaylistFetcher : public AHandler {          kWhatStopped,          kWhatError,          kWhatDurationUpdate, -        kWhatTemporarilyDoneFetching, +        kWhatTargetDurationUpdate,          kWhatPrepared,          kWhatPreparationFailed,          kWhatStartedAt, +        kWhatStopReached, +        kWhatPlaylistFetched, +        kWhatMetadataDetected,      };      PlaylistFetcher(              const sp<AMessage> ¬ify,              const sp<LiveSession> &session,              const char *uri, +            int32_t id,              int32_t subtitleGeneration); -    sp<DataSource> getDataSource(); +    int32_t getFetcherID() const;      void startAsync(              const sp<AnotherPacketSource> &audioSource,              const sp<AnotherPacketSource> &videoSource,              const sp<AnotherPacketSource> &subtitleSource, +            const sp<AnotherPacketSource> &metadataSource,              int64_t startTimeUs = -1ll,         // starting timestamps              int64_t segmentStartTimeUs = -1ll, // starting position within playlist              // startTimeUs!=segmentStartTimeUs only when playlist is live -            int32_t startDiscontinuitySeq = 0, -            bool adaptive = false); +            int32_t startDiscontinuitySeq = -1, +            LiveSession::SeekMode seekMode = LiveSession::kSeekModeExactPosition); -    void pauseAsync(); +    void pauseAsync(float thresholdRatio, bool disconnect);      void stopAsync(bool clear = true);      void resumeUntilAsync(const sp<AMessage> ¶ms); +    void fetchPlaylistAsync(); +      uint32_t getStreamTypeMask() const {          return mStreamTypeMask;      } @@ -93,8 +101,11 @@ private:          kWhatMonitorQueue   = 'moni',          kWhatResumeUntil    = 'rsme',          kWhatDownloadNext   = 'dlnx', +        kWhatFetchPlaylist  = 'flst'      }; +    struct DownloadState; +      static const int64_t kMaxMonitorDelayUs;      static const int32_t kNumSkipFrames; @@ -105,9 +116,12 @@ private:      sp<AMessage> mNotify;      sp<AMessage> mStartTimeUsNotify; +    sp<HTTPDownloader> mHTTPDownloader;      sp<LiveSession> mSession;      AString mURI; +    int32_t mFetcherID; +      uint32_t mStreamTypeMask;      int64_t mStartTimeUs; @@ -116,7 +130,7 @@ private:      // adapting or switching tracks.      int64_t mSegmentStartTimeUs; -    ssize_t mDiscontinuitySeq; +    int32_t mDiscontinuitySeq;      bool mStartTimeUsRelative;      sp<AMessage> mStopParams; // message containing the latest timestamps we should fetch. @@ -126,17 +140,21 @@ private:      KeyedVector<AString, sp<ABuffer> > mAESKeyForURI;      int64_t mLastPlaylistFetchTimeUs; +    int64_t mPlaylistTimeUs;      sp<M3UParser> mPlaylist;      int32_t mSeqNumber;      int32_t mNumRetries;      bool mStartup; -    bool mAdaptive; -    bool mPrepared; +    bool mIDRFound; +    int32_t mSeekMode; +    bool mTimeChangeSignaled;      int64_t mNextPTSTimeUs;      int32_t mMonitorQueueGeneration;      const int32_t mSubtitleGeneration; +    int32_t mLastDiscontinuitySeq; +      enum RefreshState {          INITIAL_MINIMUM_RELOAD_DELAY,          FIRST_UNCHANGED_RELOAD_ATTEMPT, @@ -150,9 +168,8 @@ private:      sp<ATSParser> mTSParser;      bool mFirstPTSValid; -    uint64_t mFirstPTS;      int64_t mFirstTimeUs; -    int64_t mAbsoluteTimeAnchorUs; +    int64_t mSegmentFirstPTS;      sp<AnotherPacketSource> mVideoBuffer;      // Stores the initialization vector to decrypt the next block of cipher text, which can @@ -160,6 +177,13 @@ private:      // the last block of cipher text (cipher-block chaining).      unsigned char mAESInitVec[16]; +    Mutex mThresholdLock; +    float mThresholdRatio; + +    sp<DownloadState> mDownloadState; + +    bool mHasMetadata; +      // Set first to true if decrypting the first segment of a playlist segment. When      // first is true, reset the initialization vector based on the available      // information in the manifest; otherwise, use the initialization vector as @@ -175,6 +199,10 @@ private:      void postMonitorQueue(int64_t delayUs = 0, int64_t minDelayUs = 0);      void cancelMonitorQueue(); +    void setStoppingThreshold(float thresholdRatio, bool disconnect); +    void resetStoppingThreshold(bool disconnect); +    float getStoppingThreshold(); +    bool shouldPauseDownload();      int64_t delayUsToRefreshPlaylist() const;      status_t refreshPlaylist(); @@ -182,12 +210,19 @@ private:      // Returns the media time in us of the segment specified by seqNumber.      // This is computed by summing the durations of all segments before it.      int64_t getSegmentStartTimeUs(int32_t seqNumber) const; +    // Returns the duration time in us of the segment specified. +    int64_t getSegmentDurationUs(int32_t seqNumber) const;      status_t onStart(const sp<AMessage> &msg);      void onPause();      void onStop(const sp<AMessage> &msg);      void onMonitorQueue();      void onDownloadNext(); +    bool initDownloadState( +            AString &uri, +            sp<AMessage> &itemMeta, +            int32_t &firstSeqNumberInPlaylist, +            int32_t &lastSeqNumberInPlaylist);      // Resume a fetcher to continue until the stopping point stored in msg.      status_t onResumeUntil(const sp<AMessage> &msg); @@ -196,25 +231,24 @@ private:              const sp<ABuffer> &accessUnit,              const sp<AnotherPacketSource> &source,              bool discard = false); +    bool isStartTimeReached(int64_t timeUs);      status_t extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &buffer);      status_t extractAndQueueAccessUnits(              const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta); +    void notifyStopReached();      void notifyError(status_t err);      void queueDiscontinuity(              ATSParser::DiscontinuityType type, const sp<AMessage> &extra); -    int32_t getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const; +    bool adjustSeqNumberWithAnchorTime(int64_t anchorTimeUs);      int32_t getSeqNumberForDiscontinuity(size_t discontinuitySeq) const;      int32_t getSeqNumberForTime(int64_t timeUs) const;      void updateDuration(); - -    // Before resuming a fetcher in onResume, check the remaining duration is longer than that -    // returned by resumeThreshold. -    int64_t resumeThreshold(const sp<AMessage> &msg); +    void updateTargetDuration();      DISALLOW_EVIL_CONSTRUCTORS(PlaylistFetcher);  }; diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk index 2194c38..68bd017 100644 --- a/media/libstagefright/id3/Android.mk +++ b/media/libstagefright/id3/Android.mk @@ -4,7 +4,8 @@ include $(CLEAR_VARS)  LOCAL_SRC_FILES := \  	ID3.cpp -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_MODULE := libstagefright_id3 @@ -17,7 +18,8 @@ include $(CLEAR_VARS)  LOCAL_SRC_FILES := \  	testid3.cpp -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_SHARED_LIBRARIES := \  	libstagefright libutils liblog libbinder libstagefright_foundation diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 77d65e0..758b2c9 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -21,6 +21,7 @@  #include "HTTPBase.h"  #include "TimedEventQueue.h" +#include <media/AudioResamplerPublic.h>  #include <media/MediaPlayerInterface.h>  #include <media/stagefright/DataSource.h>  #include <media/stagefright/OMXClient.h> @@ -31,20 +32,20 @@  namespace android { -struct AudioPlayer; +class AudioPlayer;  struct ClockEstimator; -struct DataSource; -struct MediaBuffer; +class IDataSource; +class MediaBuffer;  struct MediaExtractor;  struct MediaSource;  struct NuCachedSource2; -struct IGraphicBufferProducer; +class IGraphicBufferProducer;  class DrmManagerClinet;  class DecryptHandle;  class TimedTextDriver; -struct WVMExtractor; +class WVMExtractor;  struct AwesomeRenderer : public RefBase {      AwesomeRenderer() {} @@ -93,6 +94,8 @@ struct AwesomePlayer {      status_t setParameter(int key, const Parcel &request);      status_t getParameter(int key, Parcel *reply); +    status_t setPlaybackSettings(const AudioPlaybackRate &rate); +    status_t getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */);      status_t invoke(const Parcel &request, Parcel *reply);      status_t setCacheStatCollectFreq(const Parcel &request); @@ -180,6 +183,7 @@ private:      sp<MediaSource> mOmxSource;      sp<MediaSource> mAudioSource;      AudioPlayer *mAudioPlayer; +    AudioPlaybackRate mPlaybackSettings;      int64_t mDurationUs;      int32_t mDisplayWidth; diff --git a/media/libstagefright/include/CallbackDataSource.h b/media/libstagefright/include/CallbackDataSource.h new file mode 100644 index 0000000..1a21dd3 --- /dev/null +++ b/media/libstagefright/include/CallbackDataSource.h @@ -0,0 +1,79 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_CALLBACKDATASOURCE_H +#define ANDROID_CALLBACKDATASOURCE_H + +#include <media/stagefright/DataSource.h> +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +class IDataSource; +class IMemory; + +// A stagefright DataSource that wraps a binder IDataSource. It's a "Callback" +// DataSource because it calls back to the IDataSource for data. +class CallbackDataSource : public DataSource { +public: +    CallbackDataSource(const sp<IDataSource>& iDataSource); +    virtual ~CallbackDataSource(); + +    // DataSource implementation. +    virtual status_t initCheck() const; +    virtual ssize_t readAt(off64_t offset, void *data, size_t size); +    virtual status_t getSize(off64_t *size); + +private: +    sp<IDataSource> mIDataSource; +    sp<IMemory> mMemory; + +    DISALLOW_EVIL_CONSTRUCTORS(CallbackDataSource); +}; + + +// A caching DataSource that wraps a CallbackDataSource. For reads smaller +// than kCacheSize it will read up to kCacheSize ahead and cache it. +// This reduces the number of binder round trips to the IDataSource and has a significant +// impact on time taken for filetype sniffing and metadata extraction. +class TinyCacheSource : public DataSource { +public: +    TinyCacheSource(const sp<DataSource>& source); + +    virtual status_t initCheck() const; +    virtual ssize_t readAt(off64_t offset, void* data, size_t size); +    virtual status_t getSize(off64_t* size); +    virtual uint32_t flags(); + +private: +    // 2kb comes from experimenting with the time-to-first-frame from a MediaPlayer +    // with an in-memory MediaDataSource source on a Nexus 5. Beyond 2kb there was +    // no improvement. +    enum { +        kCacheSize = 2048, +    }; + +    sp<DataSource> mSource; +    uint8_t mCache[kCacheSize]; +    off64_t mCachedOffset; +    size_t mCachedSize; + +    DISALLOW_EVIL_CONSTRUCTORS(TinyCacheSource); +}; + +}; // namespace android + +#endif // ANDROID_CALLBACKDATASOURCE_H diff --git a/media/libstagefright/include/MPEG2PSExtractor.h b/media/libstagefright/include/MPEG2PSExtractor.h index fb76564..22cb02d 100644 --- a/media/libstagefright/include/MPEG2PSExtractor.h +++ b/media/libstagefright/include/MPEG2PSExtractor.h @@ -28,7 +28,7 @@ namespace android {  struct ABuffer;  struct AMessage;  struct Track; -struct String8; +class String8;  struct MPEG2PSExtractor : public MediaExtractor {      MPEG2PSExtractor(const sp<DataSource> &source); diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h index db1187d..8eb8f6c 100644 --- a/media/libstagefright/include/MPEG2TSExtractor.h +++ b/media/libstagefright/include/MPEG2TSExtractor.h @@ -20,7 +20,9 @@  #include <media/stagefright/foundation/ABase.h>  #include <media/stagefright/MediaExtractor.h> +#include <media/stagefright/MediaSource.h>  #include <utils/threads.h> +#include <utils/KeyedVector.h>  #include <utils/Vector.h>  namespace android { @@ -30,7 +32,7 @@ struct AnotherPacketSource;  struct ATSParser;  class DataSource;  struct MPEG2TSSource; -struct String8; +class String8;  struct MPEG2TSExtractor : public MediaExtractor {      MPEG2TSExtractor(const sp<DataSource> &source); @@ -54,10 +56,21 @@ private:      Vector<sp<AnotherPacketSource> > mSourceImpls; +    Vector<KeyedVector<int64_t, off64_t> > mSyncPoints; +    // Sync points used for seeking --- normally one for video track is used. +    // If no video track is present, audio track will be used instead. +    KeyedVector<int64_t, off64_t> *mSeekSyncPoints; +      off64_t mOffset;      void init();      status_t feedMore(); +    status_t seek(int64_t seekTimeUs, +            const MediaSource::ReadOptions::SeekMode& seekMode); +    status_t queueDiscontinuityForSeek(int64_t actualSeekTimeUs); +    status_t seekBeyond(int64_t seekTimeUs); + +    status_t feedUntilBufferAvailable(const sp<AnotherPacketSource> &impl);      DISALLOW_EVIL_CONSTRUCTORS(MPEG2TSExtractor);  }; diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index 1fe6fcf..3067c3d 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -83,6 +83,8 @@ private:      Vector<SidxEntry> mSidxEntries;      off64_t mMoofOffset; +    bool mMoofFound; +    bool mMdatFound;      Vector<PsshInfo> mPssh; @@ -102,11 +104,15 @@ private:      String8 mLastCommentName;      String8 mLastCommentData; +    KeyedVector<uint32_t, AString> mMetaKeyMap; +      status_t readMetaData();      status_t parseChunk(off64_t *offset, int depth);      status_t parseITunesMetaData(off64_t offset, size_t size);      status_t parse3GPPMetaData(off64_t offset, size_t size, int depth);      void parseID3v2MetaData(off64_t offset); +    status_t parseQTMetaKey(off64_t data_offset, size_t data_size); +    status_t parseQTMetaVal(int32_t keyId, off64_t data_offset, size_t data_size);      status_t updateAudioTrackInfoFromESDS_MPEG4Audio(              const void *esds_data, size_t esds_size); diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index e8c4970..d468dfc 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -24,7 +24,7 @@  namespace android {  struct OMXMaster; -class OMXNodeInstance; +struct OMXNodeInstance;  class OMX : public BnOMX,              public IBinder::DeathRecipient { @@ -69,7 +69,7 @@ public:              node_id node, OMX_U32 port_index, OMX_U32* usage);      virtual status_t storeMetaDataInBuffers( -            node_id node, OMX_U32 port_index, OMX_BOOL enable); +            node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type);      virtual status_t prepareForAdaptivePlayback(              node_id node, OMX_U32 portIndex, OMX_BOOL enable, @@ -81,7 +81,7 @@ public:      virtual status_t useBuffer(              node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -            buffer_id *buffer); +            buffer_id *buffer, OMX_U32 allottedSize);      virtual status_t useGraphicBuffer(              node_id node, OMX_U32 port_index, @@ -93,7 +93,17 @@ public:      virtual status_t createInputSurface(              node_id node, OMX_U32 port_index, -            sp<IGraphicBufferProducer> *bufferProducer); +            sp<IGraphicBufferProducer> *bufferProducer, +            MetadataBufferType *type); + +    virtual status_t createPersistentInputSurface( +            sp<IGraphicBufferProducer> *bufferProducer, +            sp<IGraphicBufferConsumer> *bufferConsumer); + +    virtual status_t setInputSurface( +            node_id node, OMX_U32 port_index, +            const sp<IGraphicBufferConsumer> &bufferConsumer, +            MetadataBufferType *type);      virtual status_t signalEndOfInputStream(node_id node); @@ -103,18 +113,18 @@ public:      virtual status_t allocateBufferWithBackup(              node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -            buffer_id *buffer); +            buffer_id *buffer, OMX_U32 allottedSize);      virtual status_t freeBuffer(              node_id node, OMX_U32 port_index, buffer_id buffer); -    virtual status_t fillBuffer(node_id node, buffer_id buffer); +    virtual status_t fillBuffer(node_id node, buffer_id buffer, int fenceFd);      virtual status_t emptyBuffer(              node_id node,              buffer_id buffer,              OMX_U32 range_offset, OMX_U32 range_length, -            OMX_U32 flags, OMX_TICKS timestamp); +            OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);      virtual status_t getExtensionIndex(              node_id node, @@ -138,10 +148,10 @@ public:              OMX_IN OMX_PTR pEventData);      OMX_ERRORTYPE OnEmptyBufferDone( -            node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); +            node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd);      OMX_ERRORTYPE OnFillBufferDone( -            node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer); +            node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd);      void invalidateNodeID(node_id node); diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index b26e940..f68e0a9 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -27,7 +27,9 @@ namespace android {  class IOMXObserver;  struct OMXMaster; -struct GraphicBufferSource; +class GraphicBufferSource; + +status_t StatusFromOMXError(OMX_ERRORTYPE err);  struct OMXNodeInstance {      OMXNodeInstance( @@ -56,7 +58,8 @@ struct OMXNodeInstance {      status_t getGraphicBufferUsage(OMX_U32 portIndex, OMX_U32* usage); -    status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable); +    status_t storeMetaDataInBuffers( +            OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type);      status_t prepareForAdaptivePlayback(              OMX_U32 portIndex, OMX_BOOL enable, @@ -68,7 +71,7 @@ struct OMXNodeInstance {      status_t useBuffer(              OMX_U32 portIndex, const sp<IMemory> ¶ms, -            OMX::buffer_id *buffer); +            OMX::buffer_id *buffer, OMX_U32 allottedSize);      status_t useGraphicBuffer(              OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer, @@ -79,7 +82,16 @@ struct OMXNodeInstance {              OMX::buffer_id buffer);      status_t createInputSurface( -            OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer); +            OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer, +            MetadataBufferType *type); + +    static status_t createPersistentInputSurface( +            sp<IGraphicBufferProducer> *bufferProducer, +            sp<IGraphicBufferConsumer> *bufferConsumer); + +    status_t setInputSurface( +            OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer, +            MetadataBufferType *type);      status_t signalEndOfInputStream(); @@ -89,21 +101,20 @@ struct OMXNodeInstance {      status_t allocateBufferWithBackup(              OMX_U32 portIndex, const sp<IMemory> ¶ms, -            OMX::buffer_id *buffer); +            OMX::buffer_id *buffer, OMX_U32 allottedSize);      status_t freeBuffer(OMX_U32 portIndex, OMX::buffer_id buffer); -    status_t fillBuffer(OMX::buffer_id buffer); +    status_t fillBuffer(OMX::buffer_id buffer, int fenceFd);      status_t emptyBuffer(              OMX::buffer_id buffer,              OMX_U32 rangeOffset, OMX_U32 rangeLength, -            OMX_U32 flags, OMX_TICKS timestamp); +            OMX_U32 flags, OMX_TICKS timestamp, int fenceFd); -    status_t emptyDirectBuffer( -            OMX_BUFFERHEADERTYPE *header, -            OMX_U32 rangeOffset, OMX_U32 rangeLength, -            OMX_U32 flags, OMX_TICKS timestamp); +    status_t emptyGraphicBuffer( +            OMX_BUFFERHEADERTYPE *header, const sp<GraphicBuffer> &buffer, +            OMX_U32 flags, OMX_TICKS timestamp, int fenceFd);      status_t getExtensionIndex(              const char *parameterName, OMX_INDEXTYPE *index); @@ -114,6 +125,8 @@ struct OMXNodeInstance {              const void *data,              size_t size); +    // handles messages and removes them from the list +    void onMessages(std::list<omx_message> &messages);      void onMessage(const omx_message &msg);      void onObserverDied(OMXMaster *master);      void onGetHandleFailed(); @@ -147,6 +160,7 @@ private:      uint32_t mBufferIDCount;      KeyedVector<OMX::buffer_id, OMX_BUFFERHEADERTYPE *> mBufferIDToBufferHeader;      KeyedVector<OMX_BUFFERHEADERTYPE *, OMX::buffer_id> mBufferHeaderToBufferID; +    MetadataBufferType mMetadataType[2];      // For debug support      char *mName; @@ -194,16 +208,35 @@ private:              OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);      status_t storeMetaDataInBuffers_l( -            OMX_U32 portIndex, OMX_BOOL enable, -            OMX_BOOL useGraphicBuffer, OMX_BOOL *usingGraphicBufferInMeta); +            OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type); + +    // Stores fence into buffer if it is ANWBuffer type and has enough space. +    // otherwise, waits for the fence to signal.  Takes ownership of |fenceFd|. +    status_t storeFenceInMeta_l( +            OMX_BUFFERHEADERTYPE *header, int fenceFd, OMX_U32 portIndex); + +    // Retrieves the fence from buffer if ANWBuffer type and has enough space. Otherwise, returns -1 +    int retrieveFenceFromMeta_l( +            OMX_BUFFERHEADERTYPE *header, OMX_U32 portIndex);      status_t emptyBuffer_l(              OMX_BUFFERHEADERTYPE *header, -            OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr); +            OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr, int fenceFd); +    status_t updateGraphicBufferInMeta_l( +            OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer, +            OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header); + +    status_t createGraphicBufferSource( +            OMX_U32 portIndex, sp<IGraphicBufferConsumer> consumer /* nullable */, +            MetadataBufferType *type);      sp<GraphicBufferSource> getGraphicBufferSource();      void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource); +    // Handles |msg|, and may modify it. Returns true iff completely handled it and +    // |msg| does not need to be sent to the event listener. +    bool handleMessage(omx_message &msg); +      OMXNodeInstance(const OMXNodeInstance &);      OMXNodeInstance &operator=(const OMXNodeInstance &);  }; diff --git a/media/libstagefright/include/OggExtractor.h b/media/libstagefright/include/OggExtractor.h index e97c8cd..c647cbb 100644 --- a/media/libstagefright/include/OggExtractor.h +++ b/media/libstagefright/include/OggExtractor.h @@ -27,7 +27,7 @@ struct AMessage;  class DataSource;  class String8; -struct MyVorbisExtractor; +struct MyOggExtractor;  struct OggSource;  struct OggExtractor : public MediaExtractor { @@ -48,7 +48,7 @@ private:      sp<DataSource> mDataSource;      status_t mInitCheck; -    MyVorbisExtractor *mImpl; +    MyOggExtractor *mImpl;      OggExtractor(const OggExtractor &);      OggExtractor &operator=(const OggExtractor &); diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h index 60c9e7e..7053247 100644 --- a/media/libstagefright/include/SampleIterator.h +++ b/media/libstagefright/include/SampleIterator.h @@ -18,7 +18,7 @@  namespace android { -struct SampleTable; +class SampleTable;  struct SampleIterator {      SampleIterator(SampleTable *table); diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h index fa3ea89..757b308 100644 --- a/media/libstagefright/include/SoftwareRenderer.h +++ b/media/libstagefright/include/SoftwareRenderer.h @@ -19,22 +19,27 @@  #define SOFTWARE_RENDERER_H_  #include <media/stagefright/ColorConverter.h> +#include <media/stagefright/FrameRenderTracker.h>  #include <utils/RefBase.h>  #include <system/window.h> +#include <list> +  namespace android {  struct AMessage;  class SoftwareRenderer {  public: -    explicit SoftwareRenderer(const sp<ANativeWindow> &nativeWindow); +    explicit SoftwareRenderer( +            const sp<ANativeWindow> &nativeWindow, int32_t rotation = 0);      ~SoftwareRenderer(); -    void render( -            const void *data, size_t size, int64_t timestampNs, +    std::list<FrameRenderTracker::Info> render( +            const void *data, size_t size, int64_t mediaTimeUs, nsecs_t renderTimeNs,              void *platformPrivate, const sp<AMessage> &format); +    void clearTracker();  private:      enum YUVMode { @@ -48,6 +53,8 @@ private:      int32_t mWidth, mHeight;      int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;      int32_t mCropWidth, mCropHeight; +    int32_t mRotationDegrees; +    FrameRenderTracker mRenderTracker;      SoftwareRenderer(const SoftwareRenderer &);      SoftwareRenderer &operator=(const SoftwareRenderer &); diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h index 6632c27..fd739d0 100644 --- a/media/libstagefright/include/StagefrightMetadataRetriever.h +++ b/media/libstagefright/include/StagefrightMetadataRetriever.h @@ -25,7 +25,7 @@  namespace android { -struct DataSource; +class DataSource;  class MediaExtractor;  struct StagefrightMetadataRetriever : public MediaMetadataRetrieverInterface { @@ -38,6 +38,7 @@ struct StagefrightMetadataRetriever : public MediaMetadataRetrieverInterface {              const KeyedVector<String8, String8> *headers);      virtual status_t setDataSource(int fd, int64_t offset, int64_t length); +    virtual status_t setDataSource(const sp<DataSource>& source);      virtual VideoFrame *getFrameAtTime(int64_t timeUs, int option);      virtual MediaAlbumArt *extractAlbumArt(); @@ -53,6 +54,8 @@ private:      MediaAlbumArt *mAlbumArt;      void parseMetaData(); +    // Delete album art and clear metadata. +    void clearMetadata();      StagefrightMetadataRetriever(const StagefrightMetadataRetriever &); diff --git a/media/libstagefright/include/TimedEventQueue.h b/media/libstagefright/include/TimedEventQueue.h index 2963150..890f7e8 100644 --- a/media/libstagefright/include/TimedEventQueue.h +++ b/media/libstagefright/include/TimedEventQueue.h @@ -46,7 +46,7 @@ struct TimedEventQueue {          virtual void fire(TimedEventQueue *queue, int64_t now_us) = 0;      private: -        friend class TimedEventQueue; +        friend struct TimedEventQueue;          event_id mEventID; diff --git a/media/libstagefright/include/VBRISeeker.h b/media/libstagefright/include/VBRISeeker.h index 1a2bf9f..c57d571 100644 --- a/media/libstagefright/include/VBRISeeker.h +++ b/media/libstagefright/include/VBRISeeker.h @@ -24,7 +24,7 @@  namespace android { -struct DataSource; +class DataSource;  struct VBRISeeker : public MP3Seeker {      static sp<VBRISeeker> CreateFromSource( diff --git a/media/libstagefright/include/XINGSeeker.h b/media/libstagefright/include/XINGSeeker.h index c408576..cce04f0 100644 --- a/media/libstagefright/include/XINGSeeker.h +++ b/media/libstagefright/include/XINGSeeker.h @@ -22,7 +22,7 @@  namespace android { -struct DataSource; +class DataSource;  struct XINGSeeker : public MP3Seeker {      static sp<XINGSeeker> CreateFromSource( diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h index c270bc1..dafa07e 100644 --- a/media/libstagefright/include/avc_utils.h +++ b/media/libstagefright/include/avc_utils.h @@ -36,6 +36,11 @@ enum {      kAVCProfileCAVLC444Intra = 0x2c  }; +struct NALPosition { +    size_t nalOffset; +    size_t nalSize; +}; +  // Optionally returns sample aspect ratio as well.  void FindAVCDimensions(          const sp<ABuffer> &seqParamSet, diff --git a/media/libstagefright/matroska/Android.mk b/media/libstagefright/matroska/Android.mk index 446ff8c..1e8c2b2 100644 --- a/media/libstagefright/matroska/Android.mk +++ b/media/libstagefright/matroska/Android.mk @@ -8,7 +8,8 @@ LOCAL_C_INCLUDES:= \          $(TOP)/external/libvpx/libwebm \          $(TOP)/frameworks/native/include/media/openmax \ -LOCAL_CFLAGS += -Wno-multichar -Werror +LOCAL_CFLAGS += -Wno-multichar -Werror -Wall +LOCAL_CLANG := true  LOCAL_MODULE:= libstagefright_matroska diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index c30e807..ecc2573 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -944,6 +944,11 @@ void MatroskaExtractor::addTracks() {          ALOGV("codec id = %s", codecID);          ALOGV("codec name = %s", track->GetCodecNameAsUTF8()); +        if (codecID == NULL) { +            ALOGW("unknown codecID is not supported."); +            continue; +        } +          size_t codecPrivateSize;          const unsigned char *codecPrivate =              track->GetCodecPrivate(codecPrivateSize); diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index 1eae6cf..e3c3e80 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -35,6 +35,7 @@  #include <media/stagefright/Utils.h>  #include <media/IStreamSource.h>  #include <utils/KeyedVector.h> +#include <utils/Vector.h>  #include <inttypes.h> @@ -47,15 +48,19 @@ namespace android {  static const size_t kTSPacketSize = 188;  struct ATSParser::Program : public RefBase { -    Program(ATSParser *parser, unsigned programNumber, unsigned programMapPID); +    Program(ATSParser *parser, unsigned programNumber, unsigned programMapPID, +            int64_t lastRecoveredPTS);      bool parsePSISection(              unsigned pid, ABitReader *br, status_t *err); +    // Pass to appropriate stream according to pid, and set event if it's a PES +    // with a sync frame. +    // Note that the method itself does not touch event.      bool parsePID(              unsigned pid, unsigned continuity_counter,              unsigned payload_unit_start_indicator, -            ABitReader *br, status_t *err); +            ABitReader *br, status_t *err, SyncEvent *event);      void signalDiscontinuity(              DiscontinuityType type, const sp<AMessage> &extra); @@ -86,14 +91,22 @@ struct ATSParser::Program : public RefBase {      }  private: +    struct StreamInfo { +        unsigned mType; +        unsigned mPID; +    }; +      ATSParser *mParser;      unsigned mProgramNumber;      unsigned mProgramMapPID;      KeyedVector<unsigned, sp<Stream> > mStreams;      bool mFirstPTSValid;      uint64_t mFirstPTS; +    int64_t mLastRecoveredPTS;      status_t parseProgramMap(ABitReader *br); +    int64_t recoverPTS(uint64_t PTS_33bit); +    bool switchPIDs(const Vector<StreamInfo> &infos);      DISALLOW_EVIL_CONSTRUCTORS(Program);  }; @@ -108,10 +121,14 @@ struct ATSParser::Stream : public RefBase {      unsigned pid() const { return mElementaryPID; }      void setPID(unsigned pid) { mElementaryPID = pid; } +    // Parse the payload and set event when PES with a sync frame is detected. +    // This method knows when a PES starts; so record mPesStartOffset in that +    // case.      status_t parse(              unsigned continuity_counter,              unsigned payload_unit_start_indicator, -            ABitReader *br); +            ABitReader *br, +            SyncEvent *event);      void signalDiscontinuity(              DiscontinuityType type, const sp<AMessage> &extra); @@ -122,6 +139,7 @@ struct ATSParser::Stream : public RefBase {      bool isAudio() const;      bool isVideo() const; +    bool isMeta() const;  protected:      virtual ~Stream(); @@ -139,17 +157,24 @@ private:      bool mEOSReached;      uint64_t mPrevPTS; +    off64_t mPesStartOffset;      ElementaryStreamQueue *mQueue; -    status_t flush(); -    status_t parsePES(ABitReader *br); - +    // Flush accumulated payload if necessary --- i.e. at EOS or at the start of +    // another payload. event is set if the flushed payload is PES with a sync +    // frame. +    status_t flush(SyncEvent *event); +    // Strip and parse PES headers and pass remaining payload into onPayload +    // with parsed metadata. event is set if the PES contains a sync frame. +    status_t parsePES(ABitReader *br, SyncEvent *event); + +    // Feed the payload into mQueue and if a packet is identified, queue it +    // into mSource. If the packet is a sync frame. set event with start offset +    // and timestamp of the packet.      void onPayloadData(              unsigned PTS_DTS_flags, uint64_t PTS, uint64_t DTS, -            const uint8_t *data, size_t size); - -    void extractAACFrames(const sp<ABuffer> &buffer); +            const uint8_t *data, size_t size, SyncEvent *event);      DISALLOW_EVIL_CONSTRUCTORS(Stream);  }; @@ -158,10 +183,12 @@ struct ATSParser::PSISection : public RefBase {      PSISection();      status_t append(const void *data, size_t size); +    void setSkipBytes(uint8_t skip);      void clear();      bool isComplete() const;      bool isEmpty() const; +    bool isCRCOkay() const;      const uint8_t *data() const;      size_t size() const; @@ -171,19 +198,34 @@ protected:  private:      sp<ABuffer> mBuffer; +    uint8_t mSkipBytes; +    static uint32_t CRC_TABLE[];      DISALLOW_EVIL_CONSTRUCTORS(PSISection);  }; +ATSParser::SyncEvent::SyncEvent(off64_t offset) +    : mInit(false), mOffset(offset), mTimeUs(0) {} + +void ATSParser::SyncEvent::init(off64_t offset, const sp<MediaSource> &source, +        int64_t timeUs) { +    mInit = true; +    mOffset = offset; +    mMediaSource = source; +    mTimeUs = timeUs; +} +  ////////////////////////////////////////////////////////////////////////////////  ATSParser::Program::Program( -        ATSParser *parser, unsigned programNumber, unsigned programMapPID) +        ATSParser *parser, unsigned programNumber, unsigned programMapPID, +        int64_t lastRecoveredPTS)      : mParser(parser),        mProgramNumber(programNumber),        mProgramMapPID(programMapPID),        mFirstPTSValid(false), -      mFirstPTS(0) { +      mFirstPTS(0), +      mLastRecoveredPTS(lastRecoveredPTS) {      ALOGV("new program number %u", programNumber);  } @@ -203,7 +245,7 @@ bool ATSParser::Program::parsePSISection(  bool ATSParser::Program::parsePID(          unsigned pid, unsigned continuity_counter,          unsigned payload_unit_start_indicator, -        ABitReader *br, status_t *err) { +        ABitReader *br, status_t *err, SyncEvent *event) {      *err = OK;      ssize_t index = mStreams.indexOfKey(pid); @@ -212,7 +254,7 @@ bool ATSParser::Program::parsePID(      }      *err = mStreams.editValueAt(index)->parse( -            continuity_counter, payload_unit_start_indicator, br); +            continuity_counter, payload_unit_start_indicator, br, event);      return true;  } @@ -238,10 +280,75 @@ void ATSParser::Program::signalEOS(status_t finalResult) {      }  } -struct StreamInfo { -    unsigned mType; -    unsigned mPID; -}; +bool ATSParser::Program::switchPIDs(const Vector<StreamInfo> &infos) { +    bool success = false; + +    if (mStreams.size() == infos.size()) { +        // build type->PIDs map for old and new mapping +        size_t i; +        KeyedVector<int32_t, Vector<int32_t> > oldType2PIDs, newType2PIDs; +        for (i = 0; i < mStreams.size(); ++i) { +            ssize_t index = oldType2PIDs.indexOfKey(mStreams[i]->type()); +            if (index < 0) { +                oldType2PIDs.add(mStreams[i]->type(), Vector<int32_t>()); +            } +            oldType2PIDs.editValueFor(mStreams[i]->type()).push_back(mStreams[i]->pid()); +        } +        for (i = 0; i < infos.size(); ++i) { +            ssize_t index = newType2PIDs.indexOfKey(infos[i].mType); +            if (index < 0) { +                newType2PIDs.add(infos[i].mType, Vector<int32_t>()); +            } +            newType2PIDs.editValueFor(infos[i].mType).push_back(infos[i].mPID); +        } + +        // we can recover if the number of streams for each type hasn't changed +        if (oldType2PIDs.size() == newType2PIDs.size()) { +            success = true; +            for (i = 0; i < oldType2PIDs.size(); ++i) { +                // KeyedVector is sorted, we just compare key and size of each index +                if (oldType2PIDs.keyAt(i) != newType2PIDs.keyAt(i) +                        || oldType2PIDs[i].size() != newType2PIDs[i].size()) { +                     success = false; +                     break; +                } +            } +        } + +        if (success) { +            // save current streams to temp +            KeyedVector<int32_t, sp<Stream> > temp; +            for (i = 0; i < mStreams.size(); ++i) { +                 temp.add(mStreams.keyAt(i), mStreams.editValueAt(i)); +            } + +            mStreams.clear(); +            for (i = 0; i < temp.size(); ++i) { +                // The two checks below shouldn't happen, +                // we already checked above the stream count matches +                ssize_t index = newType2PIDs.indexOfKey(temp[i]->type()); +                if (index < 0) { +                    return false; +                } +                Vector<int32_t> &newPIDs = newType2PIDs.editValueAt(index); +                if (newPIDs.isEmpty()) { +                    return false; +                } + +                // get the next PID for temp[i]->type() in the new PID map +                Vector<int32_t>::iterator it = newPIDs.begin(); + +                // change the PID of the stream, and add it back +                temp.editValueAt(i)->setPID(*it); +                mStreams.add(temp[i]->pid(), temp.editValueAt(i)); + +                // removed the used PID +                newPIDs.erase(it); +            } +        } +    } +    return success; +}  status_t ATSParser::Program::parseProgramMap(ABitReader *br) {      unsigned table_id = br->getBits(8); @@ -257,13 +364,11 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {          return ERROR_MALFORMED;      } -    CHECK_EQ(br->getBits(1), 0u); +    br->skipBits(1);  // '0'      MY_LOGV("  reserved = %u", br->getBits(2));      unsigned section_length = br->getBits(12);      ALOGV("  section_length = %u", section_length); -    CHECK_EQ(section_length & 0xc00, 0u); -    CHECK_LE(section_length, 1021u);      MY_LOGV("  program_number = %u", br->getBits(16));      MY_LOGV("  reserved = %u", br->getBits(2)); @@ -280,7 +385,6 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {      unsigned program_info_length = br->getBits(12);      ALOGV("  program_info_length = %u", program_info_length); -    CHECK_EQ(program_info_length & 0xc00, 0u);      br->skipBits(program_info_length * 8);  // skip descriptors @@ -291,8 +395,7 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {      // final CRC.      size_t infoBytesRemaining = section_length - 9 - program_info_length - 4; -    while (infoBytesRemaining > 0) { -        CHECK_GE(infoBytesRemaining, 5u); +    while (infoBytesRemaining >= 5) {          unsigned streamType = br->getBits(8);          ALOGV("    stream_type = 0x%02x", streamType); @@ -306,9 +409,6 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {          unsigned ES_info_length = br->getBits(12);          ALOGV("    ES_info_length = %u", ES_info_length); -        CHECK_EQ(ES_info_length & 0xc00, 0u); - -        CHECK_GE(infoBytesRemaining - 5, ES_info_length);  #if 0          br->skipBits(ES_info_length * 8);  // skip descriptors @@ -320,13 +420,13 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {              unsigned descLength = br->getBits(8);              ALOGV("      len = %u", descLength); -            CHECK_GE(info_bytes_remaining, 2 + descLength); - +            if (info_bytes_remaining < descLength) { +                return ERROR_MALFORMED; +            }              br->skipBits(descLength * 8);              info_bytes_remaining -= descLength + 2;          } -        CHECK_EQ(info_bytes_remaining, 0u);  #endif          StreamInfo info; @@ -337,7 +437,9 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {          infoBytesRemaining -= 5 + ES_info_length;      } -    CHECK_EQ(infoBytesRemaining, 0u); +    if (infoBytesRemaining != 0) { +        ALOGW("Section data remains unconsumed"); +    }      MY_LOGV("  CRC = 0x%08x", br->getBits(32));      bool PIDsChanged = false; @@ -370,39 +472,8 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {          }  #endif -        // The only case we can recover from is if we have two streams -        // and they switched PIDs. - -        bool success = false; - -        if (mStreams.size() == 2 && infos.size() == 2) { -            const StreamInfo &info1 = infos.itemAt(0); -            const StreamInfo &info2 = infos.itemAt(1); - -            sp<Stream> s1 = mStreams.editValueAt(0); -            sp<Stream> s2 = mStreams.editValueAt(1); - -            bool caseA = -                info1.mPID == s1->pid() && info1.mType == s2->type() -                    && info2.mPID == s2->pid() && info2.mType == s1->type(); - -            bool caseB = -                info1.mPID == s2->pid() && info1.mType == s1->type() -                    && info2.mPID == s1->pid() && info2.mType == s2->type(); - -            if (caseA || caseB) { -                unsigned pid1 = s1->pid(); -                unsigned pid2 = s2->pid(); -                s1->setPID(pid2); -                s2->setPID(pid1); - -                mStreams.clear(); -                mStreams.add(s1->pid(), s1); -                mStreams.add(s2->pid(), s2); - -                success = true; -            } -        } +        // we can recover if number of streams for each type remain the same +        bool success = switchPIDs(infos);          if (!success) {              ALOGI("Stream PIDs changed and we cannot recover."); @@ -426,6 +497,32 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) {      return OK;  } +int64_t ATSParser::Program::recoverPTS(uint64_t PTS_33bit) { +    // We only have the lower 33-bit of the PTS. It could overflow within a +    // reasonable amount of time. To handle the wrap-around, use fancy math +    // to get an extended PTS that is within [-0xffffffff, 0xffffffff] +    // of the latest recovered PTS. +    if (mLastRecoveredPTS < 0ll) { +        // Use the original 33bit number for 1st frame, the reason is that +        // if 1st frame wraps to negative that's far away from 0, we could +        // never start. Only start wrapping around from 2nd frame. +        mLastRecoveredPTS = static_cast<int64_t>(PTS_33bit); +    } else { +        mLastRecoveredPTS = static_cast<int64_t>( +                ((mLastRecoveredPTS - PTS_33bit + 0x100000000ll) +                & 0xfffffffe00000000ull) | PTS_33bit); +        // We start from 0, but recovered PTS could be slightly below 0. +        // Clamp it to 0 as rest of the pipeline doesn't take negative pts. +        // (eg. video is read first and starts at 0, but audio starts at 0xfffffff0) +        if (mLastRecoveredPTS < 0ll) { +            ALOGI("Clamping negative recovered PTS (%" PRId64 ") to 0", mLastRecoveredPTS); +            mLastRecoveredPTS = 0ll; +        } +    } + +    return mLastRecoveredPTS; +} +  sp<MediaSource> ATSParser::Program::getSource(SourceType type) {      size_t index = (type == AUDIO) ? 0 : 0; @@ -456,6 +553,8 @@ bool ATSParser::Program::hasSource(SourceType type) const {  }  int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) { +    PTS = recoverPTS(PTS); +      if (!(mParser->mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)) {          if (!mFirstPTSValid) {              mFirstPTSValid = true; @@ -530,6 +629,11 @@ ATSParser::Stream::Stream(                      ElementaryStreamQueue::AC3);              break; +        case STREAMTYPE_METADATA: +            mQueue = new ElementaryStreamQueue( +                    ElementaryStreamQueue::METADATA); +            break; +          default:              break;      } @@ -549,7 +653,8 @@ ATSParser::Stream::~Stream() {  status_t ATSParser::Stream::parse(          unsigned continuity_counter, -        unsigned payload_unit_start_indicator, ABitReader *br) { +        unsigned payload_unit_start_indicator, ABitReader *br, +        SyncEvent *event) {      if (mQueue == NULL) {          return OK;      } @@ -580,19 +685,22 @@ status_t ATSParser::Stream::parse(      mExpectedContinuityCounter = (continuity_counter + 1) & 0x0f;      if (payload_unit_start_indicator) { +        off64_t offset = (event != NULL) ? event->getOffset() : 0;          if (mPayloadStarted) {              // Otherwise we run the danger of receiving the trailing bytes              // of a PES packet that we never saw the start of and assuming              // we have a a complete PES packet. -            status_t err = flush(); +            status_t err = flush(event);              if (err != OK) { -                return err; +                ALOGW("Error (%08x) happened while flushing; we simply discard " +                      "the PES packet and continue.", err);              }          }          mPayloadStarted = true; +        mPesStartOffset = offset;      }      if (!mPayloadStarted) { @@ -600,7 +708,10 @@ status_t ATSParser::Stream::parse(      }      size_t payloadSizeBits = br->numBitsLeft(); -    CHECK_EQ(payloadSizeBits % 8, 0u); +    if (payloadSizeBits % 8 != 0u) { +        ALOGE("Wrong value"); +        return BAD_VALUE; +    }      size_t neededSize = mBuffer->size() + payloadSizeBits / 8;      if (mBuffer->capacity() < neededSize) { @@ -648,6 +759,13 @@ bool ATSParser::Stream::isAudio() const {      }  } +bool ATSParser::Stream::isMeta() const { +    if (mStreamType == STREAMTYPE_METADATA) { +        return true; +    } +    return false; +} +  void ATSParser::Stream::signalDiscontinuity(          DiscontinuityType type, const sp<AMessage> &extra) {      mExpectedContinuityCounter = -1; @@ -657,6 +775,7 @@ void ATSParser::Stream::signalDiscontinuity(      }      mPayloadStarted = false; +    mEOSReached = false;      mBuffer->setRange(0, 0);      bool clearFormat = false; @@ -695,10 +814,10 @@ void ATSParser::Stream::signalEOS(status_t finalResult) {          mSource->signalEOS(finalResult);      }      mEOSReached = true; -    flush(); +    flush(NULL);  } -status_t ATSParser::Stream::parsePES(ABitReader *br) { +status_t ATSParser::Stream::parsePES(ABitReader *br, SyncEvent *event) {      unsigned packet_startcode_prefix = br->getBits(24);      ALOGV("packet_startcode_prefix = 0x%08x", packet_startcode_prefix); @@ -710,8 +829,6 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {          return ERROR_MALFORMED;      } -    CHECK_EQ(packet_startcode_prefix, 0x000001u); -      unsigned stream_id = br->getBits(8);      ALOGV("stream_id = 0x%02x", stream_id); @@ -726,7 +843,9 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {              && stream_id != 0xff  // program_stream_directory              && stream_id != 0xf2  // DSMCC              && stream_id != 0xf8) {  // H.222.1 type E -        CHECK_EQ(br->getBits(2), 2u); +        if (br->getBits(2) != 2u) { +            return ERROR_MALFORMED; +        }          MY_LOGV("PES_scrambling_control = %u", br->getBits(2));          MY_LOGV("PES_priority = %u", br->getBits(1)); @@ -760,34 +879,51 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {          uint64_t PTS = 0, DTS = 0;          if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) { -            CHECK_GE(optional_bytes_remaining, 5u); +            if (optional_bytes_remaining < 5u) { +                return ERROR_MALFORMED; +            }              if (br->getBits(4) != PTS_DTS_flags) { -                ALOGE("PES data Error!");                  return ERROR_MALFORMED;              }              PTS = ((uint64_t)br->getBits(3)) << 30; -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              PTS |= ((uint64_t)br->getBits(15)) << 15; -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              PTS |= br->getBits(15); -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ALOGV("PTS = 0x%016" PRIx64 " (%.2f)", PTS, PTS / 90000.0);              optional_bytes_remaining -= 5;              if (PTS_DTS_flags == 3) { -                CHECK_GE(optional_bytes_remaining, 5u); +                if (optional_bytes_remaining < 5u) { +                    return ERROR_MALFORMED; +                } -                CHECK_EQ(br->getBits(4), 1u); +                if (br->getBits(4) != 1u) { +                    return ERROR_MALFORMED; +                }                  DTS = ((uint64_t)br->getBits(3)) << 30; -                CHECK_EQ(br->getBits(1), 1u); +                if (br->getBits(1) != 1u) { +                    return ERROR_MALFORMED; +                }                  DTS |= ((uint64_t)br->getBits(15)) << 15; -                CHECK_EQ(br->getBits(1), 1u); +                if (br->getBits(1) != 1u) { +                    return ERROR_MALFORMED; +                }                  DTS |= br->getBits(15); -                CHECK_EQ(br->getBits(1), 1u); +                if (br->getBits(1) != 1u) { +                    return ERROR_MALFORMED; +                }                  ALOGV("DTS = %" PRIu64, DTS); @@ -796,31 +932,47 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {          }          if (ESCR_flag) { -            CHECK_GE(optional_bytes_remaining, 6u); +            if (optional_bytes_remaining < 6u) { +                return ERROR_MALFORMED; +            }              br->getBits(2);              uint64_t ESCR = ((uint64_t)br->getBits(3)) << 30; -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ESCR |= ((uint64_t)br->getBits(15)) << 15; -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ESCR |= br->getBits(15); -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ALOGV("ESCR = %" PRIu64, ESCR);              MY_LOGV("ESCR_extension = %u", br->getBits(9)); -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              optional_bytes_remaining -= 6;          }          if (ES_rate_flag) { -            CHECK_GE(optional_bytes_remaining, 3u); +            if (optional_bytes_remaining < 3u) { +                return ERROR_MALFORMED; +            } -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              MY_LOGV("ES_rate = %u", br->getBits(22)); -            CHECK_EQ(br->getBits(1), 1u); +            if (br->getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              optional_bytes_remaining -= 3;          } @@ -830,7 +982,9 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {          // ES data follows.          if (PES_packet_length != 0) { -            CHECK_GE(PES_packet_length, PES_header_data_length + 3); +            if (PES_packet_length < PES_header_data_length + 3) { +                return ERROR_MALFORMED; +            }              unsigned dataLength =                  PES_packet_length - 3 - PES_header_data_length; @@ -843,35 +997,39 @@ status_t ATSParser::Stream::parsePES(ABitReader *br) {                  return ERROR_MALFORMED;              } -            CHECK_GE(br->numBitsLeft(), dataLength * 8); -              onPayloadData( -                    PTS_DTS_flags, PTS, DTS, br->data(), dataLength); +                    PTS_DTS_flags, PTS, DTS, br->data(), dataLength, event);              br->skipBits(dataLength * 8);          } else {              onPayloadData(                      PTS_DTS_flags, PTS, DTS, -                    br->data(), br->numBitsLeft() / 8); +                    br->data(), br->numBitsLeft() / 8, event);              size_t payloadSizeBits = br->numBitsLeft(); -            CHECK_EQ(payloadSizeBits % 8, 0u); +            if (payloadSizeBits % 8 != 0u) { +                return ERROR_MALFORMED; +            }              ALOGV("There's %zu bytes of payload.", payloadSizeBits / 8);          }      } else if (stream_id == 0xbe) {  // padding_stream -        CHECK_NE(PES_packet_length, 0u); +        if (PES_packet_length == 0u) { +            return ERROR_MALFORMED; +        }          br->skipBits(PES_packet_length * 8);      } else { -        CHECK_NE(PES_packet_length, 0u); +        if (PES_packet_length == 0u) { +            return ERROR_MALFORMED; +        }          br->skipBits(PES_packet_length * 8);      }      return OK;  } -status_t ATSParser::Stream::flush() { -    if (mBuffer->size() == 0) { +status_t ATSParser::Stream::flush(SyncEvent *event) { +    if (mBuffer == NULL || mBuffer->size() == 0) {          return OK;      } @@ -879,7 +1037,7 @@ status_t ATSParser::Stream::flush() {      ABitReader br(mBuffer->data(), mBuffer->size()); -    status_t err = parsePES(&br); +    status_t err = parsePES(&br, event);      mBuffer->setRange(0, 0); @@ -888,7 +1046,7 @@ status_t ATSParser::Stream::flush() {  void ATSParser::Stream::onPayloadData(          unsigned PTS_DTS_flags, uint64_t PTS, uint64_t /* DTS */, -        const uint8_t *data, size_t size) { +        const uint8_t *data, size_t size, SyncEvent *event) {  #if 0      ALOGI("payload streamType 0x%02x, PTS = 0x%016llx, dPTS = %lld",            mStreamType, @@ -915,6 +1073,7 @@ void ATSParser::Stream::onPayloadData(      }      sp<ABuffer> accessUnit; +    bool found = false;      while ((accessUnit = mQueue->dequeueAccessUnit()) != NULL) {          if (mSource == NULL) {              sp<MetaData> meta = mQueue->getFormat(); @@ -942,6 +1101,17 @@ void ATSParser::Stream::onPayloadData(              }              mSource->queueAccessUnit(accessUnit);          } + +        if ((event != NULL) && !found && mQueue->getFormat() != NULL) { +            int32_t sync = 0; +            if (accessUnit->meta()->findInt32("isSync", &sync) && sync) { +                int64_t timeUs; +                if (accessUnit->meta()->findInt64("timeUs", &timeUs)) { +                    found = true; +                    event->init(mPesStartOffset, mSource, timeUs); +                } +            } +        }      }  } @@ -963,6 +1133,14 @@ sp<MediaSource> ATSParser::Stream::getSource(SourceType type) {              break;          } +        case META: +        { +            if (isMeta()) { +                return mSource; +            } +            break; +        } +          default:              break;      } @@ -977,6 +1155,7 @@ ATSParser::ATSParser(uint32_t flags)        mAbsoluteTimeAnchorUs(-1ll),        mTimeOffsetValid(false),        mTimeOffsetUs(0ll), +      mLastRecoveredPTS(-1ll),        mNumTSPacketsParsed(0),        mNumPCRs(0) {      mPSISections.add(0 /* PID */, new PSISection); @@ -985,31 +1164,54 @@ ATSParser::ATSParser(uint32_t flags)  ATSParser::~ATSParser() {  } -status_t ATSParser::feedTSPacket(const void *data, size_t size) { -    CHECK_EQ(size, kTSPacketSize); +status_t ATSParser::feedTSPacket(const void *data, size_t size, +        SyncEvent *event) { +    if (size != kTSPacketSize) { +        ALOGE("Wrong TS packet size"); +        return BAD_VALUE; +    }      ABitReader br((const uint8_t *)data, kTSPacketSize); -    return parseTS(&br); +    return parseTS(&br, event);  }  void ATSParser::signalDiscontinuity(          DiscontinuityType type, const sp<AMessage> &extra) {      int64_t mediaTimeUs; -    if ((type & DISCONTINUITY_TIME) -            && extra != NULL -            && extra->findInt64( -                IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) { -        mAbsoluteTimeAnchorUs = mediaTimeUs; +    if ((type & DISCONTINUITY_TIME) && extra != NULL) { +        if (extra->findInt64(IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) { +            mAbsoluteTimeAnchorUs = mediaTimeUs; +        } +        if ((mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE) +                && extra->findInt64( +                    IStreamListener::kKeyRecentMediaTimeUs, &mediaTimeUs)) { +            if (mAbsoluteTimeAnchorUs >= 0ll) { +                mediaTimeUs -= mAbsoluteTimeAnchorUs; +            } +            if (mTimeOffsetValid) { +                mediaTimeUs -= mTimeOffsetUs; +            } +            mLastRecoveredPTS = (mediaTimeUs * 9) / 100; +        }      } else if (type == DISCONTINUITY_ABSOLUTE_TIME) {          int64_t timeUs; -        CHECK(extra->findInt64("timeUs", &timeUs)); +        if (!extra->findInt64("timeUs", &timeUs)) { +            ALOGE("timeUs not found"); +            return; +        } -        CHECK(mPrograms.empty()); +        if (!mPrograms.empty()) { +            ALOGE("mPrograms is not empty"); +            return; +        }          mAbsoluteTimeAnchorUs = timeUs;          return;      } else if (type == DISCONTINUITY_TIME_OFFSET) {          int64_t offset; -        CHECK(extra->findInt64("offset", &offset)); +        if (!extra->findInt64("offset", &offset)) { +            ALOGE("offset not found"); +            return; +        }          mTimeOffsetValid = true;          mTimeOffsetUs = offset; @@ -1022,7 +1224,10 @@ void ATSParser::signalDiscontinuity(  }  void ATSParser::signalEOS(status_t finalResult) { -    CHECK_NE(finalResult, (status_t)OK); +    if (finalResult == (status_t) OK) { +        ALOGE("finalResult not OK"); +        return; +    }      for (size_t i = 0; i < mPrograms.size(); ++i) {          mPrograms.editItemAt(i)->signalEOS(finalResult); @@ -1038,14 +1243,12 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) {      }      unsigned section_syntax_indictor = br->getBits(1);      ALOGV("  section_syntax_indictor = %u", section_syntax_indictor); -    CHECK_EQ(section_syntax_indictor, 1u); -    CHECK_EQ(br->getBits(1), 0u); +    br->skipBits(1);  // '0'      MY_LOGV("  reserved = %u", br->getBits(2));      unsigned section_length = br->getBits(12);      ALOGV("  section_length = %u", section_length); -    CHECK_EQ(section_length & 0xc00, 0u);      MY_LOGV("  transport_stream_id = %u", br->getBits(16));      MY_LOGV("  reserved = %u", br->getBits(2)); @@ -1055,7 +1258,6 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) {      MY_LOGV("  last_section_number = %u", br->getBits(8));      size_t numProgramBytes = (section_length - 5 /* header */ - 4 /* crc */); -    CHECK_EQ((numProgramBytes % 4), 0u);      for (size_t i = 0; i < numProgramBytes / 4; ++i) {          unsigned program_number = br->getBits(16); @@ -1083,7 +1285,7 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) {              if (!found) {                  mPrograms.push( -                        new Program(this, program_number, programMapPID)); +                        new Program(this, program_number, programMapPID, mLastRecoveredPTS));              }              if (mPSISections.indexOfKey(programMapPID) < 0) { @@ -1098,7 +1300,8 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) {  status_t ATSParser::parsePID(          ABitReader *br, unsigned PID,          unsigned continuity_counter, -        unsigned payload_unit_start_indicator) { +        unsigned payload_unit_start_indicator, +        SyncEvent *event) {      ssize_t sectionIndex = mPSISections.indexOfKey(PID);      if (sectionIndex >= 0) { @@ -1106,14 +1309,18 @@ status_t ATSParser::parsePID(          if (payload_unit_start_indicator) {              if (!section->isEmpty()) { -                return ERROR_UNSUPPORTED; +                ALOGW("parsePID encounters payload_unit_start_indicator when section is not empty"); +                section->clear();              }              unsigned skip = br->getBits(8); +            section->setSkipBytes(skip + 1);  // skip filler bytes + pointer field itself              br->skipBits(skip * 8);          } -        CHECK((br->numBitsLeft() % 8) == 0); +        if (br->numBitsLeft() % 8 != 0) { +            return ERROR_MALFORMED; +        }          status_t err = section->append(br->data(), br->numBitsLeft() / 8);          if (err != OK) { @@ -1124,6 +1331,9 @@ status_t ATSParser::parsePID(              return OK;          } +        if (!section->isCRCOkay()) { +            return BAD_VALUE; +        }          ABitReader sectionBits(section->data(), section->size());          if (PID == 0) { @@ -1163,7 +1373,7 @@ status_t ATSParser::parsePID(          status_t err;          if (mPrograms.editItemAt(i)->parsePID(                      PID, continuity_counter, payload_unit_start_indicator, -                    br, &err)) { +                    br, &err, event)) {              if (err != OK) {                  return err;              } @@ -1180,10 +1390,15 @@ status_t ATSParser::parsePID(      return OK;  } -void ATSParser::parseAdaptationField(ABitReader *br, unsigned PID) { +status_t ATSParser::parseAdaptationField(ABitReader *br, unsigned PID) {      unsigned adaptation_field_length = br->getBits(8);      if (adaptation_field_length > 0) { +        if (adaptation_field_length * 8 > br->numBitsLeft()) { +            ALOGV("Adaptation field should be included in a single TS packet."); +            return ERROR_MALFORMED; +        } +          unsigned discontinuity_indicator = br->getBits(1);          if (discontinuity_indicator) { @@ -1196,6 +1411,9 @@ void ATSParser::parseAdaptationField(ABitReader *br, unsigned PID) {          size_t numBitsRead = 4;          if (PCR_flag) { +            if (adaptation_field_length * 8 < 52) { +                return ERROR_MALFORMED; +            }              br->skipBits(4);              uint64_t PCR_base = br->getBits(32);              PCR_base = (PCR_base << 1) | br->getBits(1); @@ -1226,13 +1444,12 @@ void ATSParser::parseAdaptationField(ABitReader *br, unsigned PID) {              numBitsRead += 52;          } -        CHECK_GE(adaptation_field_length * 8, numBitsRead); -          br->skipBits(adaptation_field_length * 8 - numBitsRead);      } +    return OK;  } -status_t ATSParser::parseTS(ABitReader *br) { +status_t ATSParser::parseTS(ABitReader *br, SyncEvent *event) {      ALOGV("---");      unsigned sync_byte = br->getBits(8); @@ -1264,15 +1481,16 @@ status_t ATSParser::parseTS(ABitReader *br) {      // ALOGI("PID = 0x%04x, continuity_counter = %u", PID, continuity_counter); -    if (adaptation_field_control == 2 || adaptation_field_control == 3) { -        parseAdaptationField(br, PID); -    } -      status_t err = OK; -    if (adaptation_field_control == 1 || adaptation_field_control == 3) { -        err = parsePID( -                br, PID, continuity_counter, payload_unit_start_indicator); +    if (adaptation_field_control == 2 || adaptation_field_control == 3) { +        err = parseAdaptationField(br, PID); +    } +    if (err == OK) { +        if (adaptation_field_control == 1 || adaptation_field_control == 3) { +            err = parsePID(br, PID, continuity_counter, +                    payload_unit_start_indicator, event); +        }      }      ++mNumTSPacketsParsed; @@ -1346,7 +1564,79 @@ void ATSParser::updatePCR(  //////////////////////////////////////////////////////////////////////////////// -ATSParser::PSISection::PSISection() { + +// CRC32 used for PSI section. The table was generated by following command: +// $ python pycrc.py --model crc-32-mpeg --algorithm table-driven --generate c +// Visit http://www.tty1.net/pycrc/index_en.html for more details. +uint32_t ATSParser::PSISection::CRC_TABLE[] = { +    0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, +    0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, +    0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, +    0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, +    0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, +    0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, +    0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, +    0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, +    0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, +    0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, +    0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, +    0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, +    0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, +    0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, +    0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, +    0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, +    0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, +    0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, +    0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, +    0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, +    0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, +    0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, +    0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, +    0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, +    0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, +    0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, +    0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, +    0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, +    0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, +    0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, +    0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, +    0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, +    0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, +    0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, +    0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, +    0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, +    0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, +    0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, +    0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, +    0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, +    0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, +    0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, +    0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, +    0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, +    0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, +    0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, +    0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, +    0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, +    0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, +    0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, +    0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, +    0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, +    0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, +    0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, +    0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, +    0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, +    0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, +    0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, +    0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, +    0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, +    0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, +    0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, +    0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, +    0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 +    }; + +ATSParser::PSISection::PSISection() : +    mSkipBytes(0) {  }  ATSParser::PSISection::~PSISection() { @@ -1377,10 +1667,15 @@ status_t ATSParser::PSISection::append(const void *data, size_t size) {      return OK;  } +void ATSParser::PSISection::setSkipBytes(uint8_t skip) { +    mSkipBytes = skip; +} +  void ATSParser::PSISection::clear() {      if (mBuffer != NULL) {          mBuffer->setRange(0, 0);      } +    mSkipBytes = 0;  }  bool ATSParser::PSISection::isComplete() const { @@ -1404,4 +1699,30 @@ size_t ATSParser::PSISection::size() const {      return mBuffer == NULL ? 0 : mBuffer->size();  } +bool ATSParser::PSISection::isCRCOkay() const { +    if (!isComplete()) { +        return false; +    } +    uint8_t* data = mBuffer->data(); + +    // Return true if section_syntax_indicator says no section follows the field section_length. +    if ((data[1] & 0x80) == 0) { +        return true; +    } + +    unsigned sectionLength = U16_AT(data + 1) & 0xfff; +    ALOGV("sectionLength %u, skip %u", sectionLength, mSkipBytes); + +    // Skip the preceding field present when payload start indicator is on. +    sectionLength -= mSkipBytes; + +    uint32_t crc = 0xffffffff; +    for(unsigned i = 0; i < sectionLength + 4 /* crc */; i++) { +        uint8_t b = data[i]; +        int index = ((crc >> 24) ^ (b & 0xff)) & 0xff; +        crc = CRC_TABLE[index] ^ (crc << 8); +    } +    ALOGV("crc: %08x\n", crc); +    return (crc == 0); +}  }  // namespace android diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h index 75d76dc..430a8d5 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.h +++ b/media/libstagefright/mpeg2ts/ATSParser.h @@ -22,6 +22,7 @@  #include <media/stagefright/foundation/ABase.h>  #include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaSource.h>  #include <utils/KeyedVector.h>  #include <utils/Vector.h>  #include <utils/RefBase.h> @@ -30,7 +31,6 @@ namespace android {  class ABitReader;  struct ABuffer; -struct MediaSource;  struct ATSParser : public RefBase {      enum DiscontinuityType { @@ -46,6 +46,9 @@ struct ATSParser : public RefBase {              DISCONTINUITY_AUDIO_FORMAT                  | DISCONTINUITY_VIDEO_FORMAT                  | DISCONTINUITY_TIME, +        DISCONTINUITY_FORMAT_ONLY       = +            DISCONTINUITY_AUDIO_FORMAT +                | DISCONTINUITY_VIDEO_FORMAT,      };      enum Flags { @@ -59,9 +62,43 @@ struct ATSParser : public RefBase {          ALIGNED_VIDEO_DATA         = 2,      }; +    // Event is used to signal sync point event at feedTSPacket(). +    struct SyncEvent { +        SyncEvent(off64_t offset); + +        void init(off64_t offset, const sp<MediaSource> &source, +                int64_t timeUs); + +        bool isInit() { return mInit; } +        off64_t getOffset() { return mOffset; } +        const sp<MediaSource> &getMediaSource() { return mMediaSource; } +        int64_t getTimeUs() { return mTimeUs; } + +    private: +        bool mInit; +        /* +         * mInit == false: the current offset +         * mInit == true: the start offset of sync payload +         */ +        off64_t mOffset; +        /* The media source object for this event. */ +        sp<MediaSource> mMediaSource; +        /* The timestamp of the sync frame. */ +        int64_t mTimeUs; +    }; +      ATSParser(uint32_t flags = 0); -    status_t feedTSPacket(const void *data, size_t size); +    // Feed a TS packet into the parser. uninitialized event with the start +    // offset of this TS packet goes in, and if the parser detects PES with +    // a sync frame, the event will be initiailzed with the start offset of the +    // PES. Note that the offset of the event can be different from what we fed, +    // as a PES may consist of multiple TS packets. +    // +    // Even in the case feedTSPacket() returns non-OK value, event still may be +    // initialized if the parsing failed after the detection. +    status_t feedTSPacket( +            const void *data, size_t size, SyncEvent *event = NULL);      void signalDiscontinuity(              DiscontinuityType type, const sp<AMessage> &extra); @@ -71,7 +108,8 @@ struct ATSParser : public RefBase {      enum SourceType {          VIDEO = 0,          AUDIO = 1, -        NUM_SOURCE_TYPES = 2 +        META  = 2, +        NUM_SOURCE_TYPES = 3      };      sp<MediaSource> getSource(SourceType type);      bool hasSource(SourceType type) const; @@ -87,6 +125,7 @@ struct ATSParser : public RefBase {          STREAMTYPE_MPEG2_AUDIO          = 0x04,          STREAMTYPE_MPEG2_AUDIO_ADTS     = 0x0f,          STREAMTYPE_MPEG4_VIDEO          = 0x10, +        STREAMTYPE_METADATA             = 0x15,          STREAMTYPE_H264                 = 0x1b,          // From ATSC A/53 Part 3:2009, 6.7.1 @@ -115,20 +154,31 @@ private:      bool mTimeOffsetValid;      int64_t mTimeOffsetUs; +    int64_t mLastRecoveredPTS;      size_t mNumTSPacketsParsed;      void parseProgramAssociationTable(ABitReader *br);      void parseProgramMap(ABitReader *br); -    void parsePES(ABitReader *br); - +    // Parse PES packet where br is pointing to. If the PES contains a sync +    // frame, set event with the time and the start offset of this PES. +    // Note that the method itself does not touch event. +    void parsePES(ABitReader *br, SyncEvent *event); + +    // Strip remaining packet headers and pass to appropriate program/stream +    // to parse the payload. If the payload turns out to be PES and contains +    // a sync frame, event shall be set with the time and start offset of the +    // PES. +    // Note that the method itself does not touch event.      status_t parsePID(          ABitReader *br, unsigned PID,          unsigned continuity_counter, -        unsigned payload_unit_start_indicator); +        unsigned payload_unit_start_indicator, +        SyncEvent *event); -    void parseAdaptationField(ABitReader *br, unsigned PID); -    status_t parseTS(ABitReader *br); +    status_t parseAdaptationField(ABitReader *br, unsigned PID); +    // see feedTSPacket(). +    status_t parseTS(ABitReader *br, SyncEvent *event);      void updatePCR(unsigned PID, uint64_t PCR, size_t byteOffsetFromStart); diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk index c17a0b7..16b0160 100644 --- a/media/libstagefright/mpeg2ts/Android.mk +++ b/media/libstagefright/mpeg2ts/Android.mk @@ -13,7 +13,8 @@ LOCAL_C_INCLUDES:= \  	$(TOP)/frameworks/av/media/libstagefright \  	$(TOP)/frameworks/native/include/media/openmax -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_MODULE:= libstagefright_mpeg2ts diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index f266fe7..cabde32 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -19,6 +19,8 @@  #include "AnotherPacketSource.h" +#include "include/avc_utils.h" +  #include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AMessage.h> @@ -27,6 +29,7 @@  #include <media/stagefright/MediaBuffer.h>  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MetaData.h> +#include <media/stagefright/Utils.h>  #include <utils/Vector.h>  #include <inttypes.h> @@ -38,17 +41,22 @@ const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs  AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)      : mIsAudio(false),        mIsVideo(false), +      mEnabled(true),        mFormat(NULL),        mLastQueuedTimeUs(0),        mEOSResult(OK),        mLatestEnqueuedMeta(NULL), -      mLatestDequeuedMeta(NULL), -      mQueuedDiscontinuityCount(0) { +      mLatestDequeuedMeta(NULL) {      setFormat(meta); + +    mDiscontinuitySegments.push_back(DiscontinuitySegment());  }  void AnotherPacketSource::setFormat(const sp<MetaData> &meta) { -    CHECK(mFormat == NULL); +    if (mFormat != NULL) { +        // Only allowed to be set once. Requires explicit clear to reset. +        return; +    }      mIsAudio = false;      mIsVideo = false; @@ -66,7 +74,7 @@ void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {      } else  if (!strncasecmp("video/", mime, 6)) {          mIsVideo = true;      } else { -        CHECK(!strncasecmp("text/", mime, 5)); +        CHECK(!strncasecmp("text/", mime, 5) || !strncasecmp("application/", mime, 12));      }  } @@ -91,13 +99,12 @@ sp<MetaData> AnotherPacketSource::getFormat() {      while (it != mBuffers.end()) {          sp<ABuffer> buffer = *it;          int32_t discontinuity; -        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { -            break; -        } - -        sp<RefBase> object; -        if (buffer->meta()->findObject("format", &object)) { -            return mFormat = static_cast<MetaData*>(object.get()); +        if (!buffer->meta()->findInt32("discontinuity", &discontinuity)) { +            sp<RefBase> object; +            if (buffer->meta()->findObject("format", &object)) { +                setFormat(static_cast<MetaData*>(object.get())); +                return mFormat; +            }          }          ++it; @@ -123,15 +130,24 @@ status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {                  mFormat.clear();              } -            --mQueuedDiscontinuityCount; +            mDiscontinuitySegments.erase(mDiscontinuitySegments.begin()); +            // CHECK(!mDiscontinuitySegments.empty());              return INFO_DISCONTINUITY;          } +        // CHECK(!mDiscontinuitySegments.empty()); +        DiscontinuitySegment &seg = *mDiscontinuitySegments.begin(); + +        int64_t timeUs;          mLatestDequeuedMeta = (*buffer)->meta()->dup(); +        CHECK(mLatestDequeuedMeta->findInt64("timeUs", &timeUs)); +        if (timeUs > seg.mMaxDequeTimeUs) { +            seg.mMaxDequeTimeUs = timeUs; +        }          sp<RefBase> object;          if ((*buffer)->meta()->findObject("format", &object)) { -            mFormat = static_cast<MetaData*>(object.get()); +            setFormat(static_cast<MetaData*>(object.get()));          }          return OK; @@ -140,6 +156,12 @@ status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) {      return mEOSResult;  } +void AnotherPacketSource::requeueAccessUnit(const sp<ABuffer> &buffer) { +    // TODO: update corresponding book keeping info. +    Mutex::Autolock autoLock(mLock); +    mBuffers.push_front(buffer); +} +  status_t AnotherPacketSource::read(          MediaBuffer **out, const ReadOptions *) {      *out = NULL; @@ -153,7 +175,6 @@ status_t AnotherPacketSource::read(          const sp<ABuffer> buffer = *mBuffers.begin();          mBuffers.erase(mBuffers.begin()); -        mLatestDequeuedMeta = buffer->meta()->dup();          int32_t discontinuity;          if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { @@ -161,21 +182,40 @@ status_t AnotherPacketSource::read(                  mFormat.clear();              } +            mDiscontinuitySegments.erase(mDiscontinuitySegments.begin()); +            // CHECK(!mDiscontinuitySegments.empty());              return INFO_DISCONTINUITY;          } +        mLatestDequeuedMeta = buffer->meta()->dup(); +          sp<RefBase> object;          if (buffer->meta()->findObject("format", &object)) { -            mFormat = static_cast<MetaData*>(object.get()); +            setFormat(static_cast<MetaData*>(object.get()));          }          int64_t timeUs;          CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); +        // CHECK(!mDiscontinuitySegments.empty()); +        DiscontinuitySegment &seg = *mDiscontinuitySegments.begin(); +        if (timeUs > seg.mMaxDequeTimeUs) { +            seg.mMaxDequeTimeUs = timeUs; +        }          MediaBuffer *mediaBuffer = new MediaBuffer(buffer);          mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs); +        int32_t isSync; +        if (buffer->meta()->findInt32("isSync", &isSync)) { +            mediaBuffer->meta_data()->setInt32(kKeyIsSyncFrame, isSync); +        } + +        sp<ABuffer> sei; +        if (buffer->meta()->findBuffer("sei", &sei) && sei != NULL) { +            mediaBuffer->meta_data()->setData(kKeySEI, 0, sei->data(), sei->size()); +        } +          *out = mediaBuffer;          return OK;      } @@ -203,18 +243,35 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {          return;      } -    int64_t lastQueuedTimeUs; -    CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs)); -    mLastQueuedTimeUs = lastQueuedTimeUs; -    ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); -      Mutex::Autolock autoLock(mLock);      mBuffers.push_back(buffer);      mCondition.signal();      int32_t discontinuity; -    if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { -        ++mQueuedDiscontinuityCount; +    if (buffer->meta()->findInt32("discontinuity", &discontinuity)){ +        ALOGV("queueing a discontinuity with queueAccessUnit"); + +        mLastQueuedTimeUs = 0ll; +        mEOSResult = OK; +        mLatestEnqueuedMeta = NULL; + +        mDiscontinuitySegments.push_back(DiscontinuitySegment()); +        return; +    } + +    int64_t lastQueuedTimeUs; +    CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs)); +    mLastQueuedTimeUs = lastQueuedTimeUs; +    ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", +            mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); + +    // CHECK(!mDiscontinuitySegments.empty()); +    DiscontinuitySegment &tailSeg = *(--mDiscontinuitySegments.end()); +    if (lastQueuedTimeUs > tailSeg.mMaxEnqueTimeUs) { +        tailSeg.mMaxEnqueTimeUs = lastQueuedTimeUs; +    } +    if (tailSeg.mMaxDequeTimeUs == -1) { +        tailSeg.mMaxDequeTimeUs = lastQueuedTimeUs;      }      if (mLatestEnqueuedMeta == NULL) { @@ -240,7 +297,9 @@ void AnotherPacketSource::clear() {      mBuffers.clear();      mEOSResult = OK; -    mQueuedDiscontinuityCount = 0; + +    mDiscontinuitySegments.clear(); +    mDiscontinuitySegments.push_back(DiscontinuitySegment());      mFormat = NULL;      mLatestEnqueuedMeta = NULL; @@ -267,6 +326,14 @@ void AnotherPacketSource::queueDiscontinuity(              ++it;          } + +        for (List<DiscontinuitySegment>::iterator it2 = mDiscontinuitySegments.begin(); +                it2 != mDiscontinuitySegments.end(); +                ++it2) { +            DiscontinuitySegment &seg = *it2; +            seg.clear(); +        } +      }      mEOSResult = OK; @@ -277,7 +344,8 @@ void AnotherPacketSource::queueDiscontinuity(          return;      } -    ++mQueuedDiscontinuityCount; +    mDiscontinuitySegments.push_back(DiscontinuitySegment()); +      sp<ABuffer> buffer = new ABuffer(0);      buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));      buffer->meta()->setMessage("extra", extra); @@ -296,6 +364,10 @@ void AnotherPacketSource::signalEOS(status_t result) {  bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {      Mutex::Autolock autoLock(mLock); +    *finalResult = OK; +    if (!mEnabled) { +        return false; +    }      if (!mBuffers.empty()) {          return true;      } @@ -304,86 +376,53 @@ bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {      return false;  } -int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) { +bool AnotherPacketSource::hasDataBufferAvailable(status_t *finalResult) {      Mutex::Autolock autoLock(mLock); -    return getBufferedDurationUs_l(finalResult); -} - -int64_t AnotherPacketSource::getBufferedDurationUs_l(status_t *finalResult) { -    *finalResult = mEOSResult; - -    if (mBuffers.empty()) { -        return 0; +    *finalResult = OK; +    if (!mEnabled) { +        return false;      } - -    int64_t time1 = -1; -    int64_t time2 = -1; -    int64_t durationUs = 0; - -    List<sp<ABuffer> >::iterator it = mBuffers.begin(); -    while (it != mBuffers.end()) { -        const sp<ABuffer> &buffer = *it; - -        int64_t timeUs; -        if (buffer->meta()->findInt64("timeUs", &timeUs)) { -            if (time1 < 0 || timeUs < time1) { -                time1 = timeUs; -            } - -            if (time2 < 0 || timeUs > time2) { -                time2 = timeUs; -            } -        } else { -            // This is a discontinuity, reset everything. -            durationUs += time2 - time1; -            time1 = time2 = -1; +    List<sp<ABuffer> >::iterator it; +    for (it = mBuffers.begin(); it != mBuffers.end(); it++) { +        int32_t discontinuity; +        if (!(*it)->meta()->findInt32("discontinuity", &discontinuity)) { +            return true;          } - -        ++it;      } -    return durationUs + (time2 - time1); +    *finalResult = mEOSResult; +    return false;  } -// A cheaper but less precise version of getBufferedDurationUs that we would like to use in -// LiveSession::dequeueAccessUnit to trigger downwards adaptation. -int64_t AnotherPacketSource::getEstimatedDurationUs() { +size_t AnotherPacketSource::getAvailableBufferCount(status_t *finalResult) {      Mutex::Autolock autoLock(mLock); -    if (mBuffers.empty()) { -        return 0; -    } - -    if (mQueuedDiscontinuityCount > 0) { -        status_t finalResult; -        return getBufferedDurationUs_l(&finalResult); -    } -    List<sp<ABuffer> >::iterator it = mBuffers.begin(); -    sp<ABuffer> buffer = *it; - -    int64_t startTimeUs; -    buffer->meta()->findInt64("timeUs", &startTimeUs); -    if (startTimeUs < 0) { +    *finalResult = OK; +    if (!mEnabled) {          return 0;      } +    if (!mBuffers.empty()) { +        return mBuffers.size(); +    } +    *finalResult = mEOSResult; +    return 0; +} -    it = mBuffers.end(); -    --it; -    buffer = *it; +int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) { +    Mutex::Autolock autoLock(mLock); +    *finalResult = mEOSResult; -    int64_t endTimeUs; -    buffer->meta()->findInt64("timeUs", &endTimeUs); -    if (endTimeUs < 0) { -        return 0; +    int64_t durationUs = 0; +    for (List<DiscontinuitySegment>::iterator it = mDiscontinuitySegments.begin(); +            it != mDiscontinuitySegments.end(); +            ++it) { +        const DiscontinuitySegment &seg = *it; +        // dequeued access units should be a subset of enqueued access units +        // CHECK(seg.maxEnqueTimeUs >= seg.mMaxDequeTimeUs); +        durationUs += (seg.mMaxEnqueTimeUs - seg.mMaxDequeTimeUs);      } -    int64_t diffUs; -    if (endTimeUs > startTimeUs) { -        diffUs = endTimeUs - startTimeUs; -    } else { -        diffUs = startTimeUs - endTimeUs; -    } -    return diffUs; +    return durationUs;  }  status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) { @@ -422,4 +461,171 @@ sp<AMessage> AnotherPacketSource::getLatestDequeuedMeta() {      return mLatestDequeuedMeta;  } +void AnotherPacketSource::enable(bool enable) { +    Mutex::Autolock autoLock(mLock); +    mEnabled = enable; +} + +/* + * returns the sample meta that's delayUs after queue head + * (NULL if such sample is unavailable) + */ +sp<AMessage> AnotherPacketSource::getMetaAfterLastDequeued(int64_t delayUs) { +    Mutex::Autolock autoLock(mLock); +    int64_t firstUs = -1; +    int64_t lastUs = -1; +    int64_t durationUs = 0; + +    List<sp<ABuffer> >::iterator it; +    for (it = mBuffers.begin(); it != mBuffers.end(); ++it) { +        const sp<ABuffer> &buffer = *it; +        int32_t discontinuity; +        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { +            durationUs += lastUs - firstUs; +            firstUs = -1; +            lastUs = -1; +            continue; +        } +        int64_t timeUs; +        if (buffer->meta()->findInt64("timeUs", &timeUs)) { +            if (firstUs < 0) { +                firstUs = timeUs; +            } +            if (lastUs < 0 || timeUs > lastUs) { +                lastUs = timeUs; +            } +            if (durationUs + (lastUs - firstUs) >= delayUs) { +                return buffer->meta(); +            } +        } +    } +    return NULL; +} + +/* + * removes samples with time equal or after meta + */ +void AnotherPacketSource::trimBuffersAfterMeta( +        const sp<AMessage> &meta) { +    if (meta == NULL) { +        ALOGW("trimming with NULL meta, ignoring"); +        return; +    } + +    Mutex::Autolock autoLock(mLock); +    if (mBuffers.empty()) { +        return; +    } + +    HLSTime stopTime(meta); +    ALOGV("trimBuffersAfterMeta: discontinuitySeq %d, timeUs %lld", +            stopTime.mSeq, (long long)stopTime.mTimeUs); + +    List<sp<ABuffer> >::iterator it; +    List<DiscontinuitySegment >::iterator it2; +    sp<AMessage> newLatestEnqueuedMeta = NULL; +    int64_t newLastQueuedTimeUs = 0; +    for (it = mBuffers.begin(), it2 = mDiscontinuitySegments.begin(); it != mBuffers.end(); ++it) { +        const sp<ABuffer> &buffer = *it; +        int32_t discontinuity; +        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { +            // CHECK(it2 != mDiscontinuitySegments.end()); +            ++it2; +            continue; +        } + +        HLSTime curTime(buffer->meta()); +        if (!(curTime < stopTime)) { +            ALOGV("trimming from %lld (inclusive) to end", +                    (long long)curTime.mTimeUs); +            break; +        } +        newLatestEnqueuedMeta = buffer->meta(); +        newLastQueuedTimeUs = curTime.mTimeUs; +    } + +    mBuffers.erase(it, mBuffers.end()); +    mLatestEnqueuedMeta = newLatestEnqueuedMeta; +    mLastQueuedTimeUs = newLastQueuedTimeUs; + +    DiscontinuitySegment &seg = *it2; +    if (newLatestEnqueuedMeta != NULL) { +        seg.mMaxEnqueTimeUs = newLastQueuedTimeUs; +    } else { +        seg.clear(); +    } +    mDiscontinuitySegments.erase(++it2, mDiscontinuitySegments.end()); +} + +/* + * removes samples with time equal or before meta; + * returns first sample left in the queue. + * + * (for AVC, if trim happens, the samples left will always start + * at next IDR.) + */ +sp<AMessage> AnotherPacketSource::trimBuffersBeforeMeta( +        const sp<AMessage> &meta) { +    HLSTime startTime(meta); +    ALOGV("trimBuffersBeforeMeta: discontinuitySeq %d, timeUs %lld", +            startTime.mSeq, (long long)startTime.mTimeUs); + +    sp<AMessage> firstMeta; +    int64_t firstTimeUs = -1; +    Mutex::Autolock autoLock(mLock); +    if (mBuffers.empty()) { +        return NULL; +    } + +    sp<MetaData> format; +    bool isAvc = false; + +    List<sp<ABuffer> >::iterator it; +    for (it = mBuffers.begin(); it != mBuffers.end(); ++it) { +        const sp<ABuffer> &buffer = *it; +        int32_t discontinuity; +        if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { +            mDiscontinuitySegments.erase(mDiscontinuitySegments.begin()); +            // CHECK(!mDiscontinuitySegments.empty()); +            format = NULL; +            isAvc = false; +            continue; +        } +        if (format == NULL) { +            sp<RefBase> object; +            if (buffer->meta()->findObject("format", &object)) { +                const char* mime; +                format = static_cast<MetaData*>(object.get()); +                isAvc = format != NULL +                        && format->findCString(kKeyMIMEType, &mime) +                        && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC); +            } +        } +        if (isAvc && !IsIDR(buffer)) { +            continue; +        } + +        HLSTime curTime(buffer->meta()); +        if (startTime < curTime) { +            ALOGV("trimming from beginning to %lld (not inclusive)", +                    (long long)curTime.mTimeUs); +            firstMeta = buffer->meta(); +            firstTimeUs = curTime.mTimeUs; +            break; +        } +    } +    mBuffers.erase(mBuffers.begin(), it); +    mLatestDequeuedMeta = NULL; + +    // CHECK(!mDiscontinuitySegments.empty()); +    DiscontinuitySegment &seg = *mDiscontinuitySegments.begin(); +    if (firstTimeUs >= 0) { +        seg.mMaxDequeTimeUs = firstTimeUs; +    } else { +        seg.clear(); +    } + +    return firstMeta; +} +  }  // namespace android diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h index 809a858..28a0e89 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h @@ -43,14 +43,20 @@ struct AnotherPacketSource : public MediaSource {      void clear(); +    // Returns true if we have any packets including discontinuities      bool hasBufferAvailable(status_t *finalResult); +    // Returns true if we have packets that's not discontinuities +    bool hasDataBufferAvailable(status_t *finalResult); + +    // Returns the number of available buffers. finalResult is always OK +    // if this method returns non-0, or the final result if it returns 0. +    size_t getAvailableBufferCount(status_t *finalResult); +      // Returns the difference between the last and the first queued      // presentation timestamps since the last discontinuity (if any).      int64_t getBufferedDurationUs(status_t *finalResult); -    int64_t getEstimatedDurationUs(); -      status_t nextBufferTime(int64_t *timeUs);      void queueAccessUnit(const sp<ABuffer> &buffer); @@ -63,21 +69,48 @@ struct AnotherPacketSource : public MediaSource {      void signalEOS(status_t result);      status_t dequeueAccessUnit(sp<ABuffer> *buffer); +    void requeueAccessUnit(const sp<ABuffer> &buffer);      bool isFinished(int64_t duration) const; +    void enable(bool enable); +      sp<AMessage> getLatestEnqueuedMeta();      sp<AMessage> getLatestDequeuedMeta(); +    sp<AMessage> getMetaAfterLastDequeued(int64_t delayUs); + +    void trimBuffersAfterMeta(const sp<AMessage> &meta); +    sp<AMessage> trimBuffersBeforeMeta(const sp<AMessage> &meta);  protected:      virtual ~AnotherPacketSource();  private: + +    struct DiscontinuitySegment { +        int64_t mMaxDequeTimeUs, mMaxEnqueTimeUs; +        DiscontinuitySegment() +            : mMaxDequeTimeUs(-1), +              mMaxEnqueTimeUs(-1) { +        }; + +        void clear() { +            mMaxDequeTimeUs = mMaxEnqueTimeUs = -1; +        } +    }; + +    // Discontinuity segments are consecutive access units between +    // discontinuity markers. There should always be at least _ONE_ +    // discontinuity segment, hence the various CHECKs in +    // AnotherPacketSource.cpp for non-empty()-ness. +    List<DiscontinuitySegment> mDiscontinuitySegments; +      Mutex mLock;      Condition mCondition;      bool mIsAudio;      bool mIsVideo; +    bool mEnabled;      sp<MetaData> mFormat;      int64_t mLastQueuedTimeUs;      List<sp<ABuffer> > mBuffers; @@ -85,10 +118,7 @@ private:      sp<AMessage> mLatestEnqueuedMeta;      sp<AMessage> mLatestDequeuedMeta; -    size_t  mQueuedDiscontinuityCount; -      bool wasFormatChange(int32_t discontinuityType) const; -    int64_t getBufferedDurationUs_l(status_t *finalResult);      DISALLOW_EVIL_CONSTRUCTORS(AnotherPacketSource);  }; diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index 5527df0..36ec367 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -56,6 +56,8 @@ void ElementaryStreamQueue::clear(bool clearFormat) {      if (clearFormat) {          mFormat.clear();      } + +    mEOSReached = false;  }  // Parse AC3 header assuming the current ptr is start position of syncframe, @@ -415,13 +417,14 @@ status_t ElementaryStreamQueue::appendData(              }              case PCM_AUDIO: +            case METADATA:              {                  break;              }              default: -                TRESPASS(); -                break; +                ALOGE("Unknown mode: %d", mMode); +                return ERROR_MALFORMED;          }      } @@ -499,8 +502,13 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnit() {              return dequeueAccessUnitMPEG4Video();          case PCM_AUDIO:              return dequeueAccessUnitPCMAudio(); +        case METADATA: +            return dequeueAccessUnitMetadata();          default: -            CHECK_EQ((unsigned)mMode, (unsigned)MPEG_AUDIO); +            if (mMode != MPEG_AUDIO) { +                ALOGE("Unknown mode"); +                return NULL; +            }              return dequeueAccessUnitMPEGAudio();      }  } @@ -537,8 +545,12 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAC3() {      memcpy(accessUnit->data(), mBuffer->data(), syncStartPos + payloadSize);      int64_t timeUs = fetchTimestamp(syncStartPos + payloadSize); -    CHECK_GE(timeUs, 0ll); +    if (timeUs < 0ll) { +        ALOGE("negative timeUs"); +        return NULL; +    }      accessUnit->meta()->setInt64("timeUs", timeUs); +    accessUnit->meta()->setInt32("isSync", 1);      memmove(              mBuffer->data(), @@ -556,15 +568,24 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() {      }      ABitReader bits(mBuffer->data(), 4); -    CHECK_EQ(bits.getBits(8), 0xa0); +    if (bits.getBits(8) != 0xa0) { +        ALOGE("Unexpected bit values"); +        return NULL; +    }      unsigned numAUs = bits.getBits(8);      bits.skipBits(8);      unsigned quantization_word_length __unused = bits.getBits(2);      unsigned audio_sampling_frequency = bits.getBits(3);      unsigned num_channels = bits.getBits(3); -    CHECK_EQ(audio_sampling_frequency, 2);  // 48kHz -    CHECK_EQ(num_channels, 1u);  // stereo! +    if (audio_sampling_frequency != 2) { +        ALOGE("Wrong sampling freq"); +        return NULL; +    } +    if (num_channels != 1u) { +        ALOGE("Wrong channel #"); +        return NULL; +    }      if (mFormat == NULL) {          mFormat = new MetaData; @@ -586,8 +607,12 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() {      memcpy(accessUnit->data(), mBuffer->data() + 4, payloadSize);      int64_t timeUs = fetchTimestamp(payloadSize + 4); -    CHECK_GE(timeUs, 0ll); +    if (timeUs < 0ll) { +        ALOGE("Negative timeUs"); +        return NULL; +    }      accessUnit->meta()->setInt64("timeUs", timeUs); +    accessUnit->meta()->setInt32("isSync", 1);      int16_t *ptr = (int16_t *)accessUnit->data();      for (size_t i = 0; i < payloadSize / sizeof(int16_t); ++i) { @@ -609,22 +634,25 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {          return NULL;      } -    CHECK(!mRangeInfos.empty()); +    if (mRangeInfos.empty()) { +        return NULL; +    }      const RangeInfo &info = *mRangeInfos.begin();      if (mBuffer->size() < info.mLength) {          return NULL;      } -    CHECK_GE(info.mTimestampUs, 0ll); +    if (info.mTimestampUs < 0ll) { +        ALOGE("Negative info.mTimestampUs"); +        return NULL; +    }      // The idea here is consume all AAC frames starting at offsets before      // info.mLength so we can assign a meaningful timestamp without      // having to interpolate.      // The final AAC frame may well extend into the next RangeInfo but      // that's ok. -    // TODO: the logic commented above is skipped because codec cannot take -    // arbitrary sized input buffers;      size_t offset = 0;      while (offset < info.mLength) {          if (offset + 7 > mBuffer->size()) { @@ -635,17 +663,26 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {          // adts_fixed_header -        CHECK_EQ(bits.getBits(12), 0xfffu); +        if (bits.getBits(12) != 0xfffu) { +            ALOGE("Wrong atds_fixed_header"); +            return NULL; +        }          bits.skipBits(3);  // ID, layer          bool protection_absent __unused = bits.getBits(1) != 0;          if (mFormat == NULL) {              unsigned profile = bits.getBits(2); -            CHECK_NE(profile, 3u); +            if (profile == 3u) { +                ALOGE("profile should not be 3"); +                return NULL; +            }              unsigned sampling_freq_index = bits.getBits(4);              bits.getBits(1);  // private_bit              unsigned channel_configuration = bits.getBits(3); -            CHECK_NE(channel_configuration, 0u); +            if (channel_configuration == 0u) { +                ALOGE("channel_config should not be 0"); +                return NULL; +            }              bits.skipBits(2);  // original_copy, home              mFormat = MakeAACCodecSpecificData( @@ -655,8 +692,14 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {              int32_t sampleRate;              int32_t numChannels; -            CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate)); -            CHECK(mFormat->findInt32(kKeyChannelCount, &numChannels)); +            if (!mFormat->findInt32(kKeySampleRate, &sampleRate)) { +                ALOGE("SampleRate not found"); +                return NULL; +            } +            if (!mFormat->findInt32(kKeyChannelCount, &numChannels)) { +                ALOGE("ChannelCount not found"); +                return NULL; +            }              ALOGI("found AAC codec config (%d Hz, %d channels)",                   sampleRate, numChannels); @@ -679,7 +722,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {          if (number_of_raw_data_blocks_in_frame != 0) {              // To be implemented. -            TRESPASS(); +            ALOGE("Should not reach here."); +            return NULL;          }          if (offset + aac_frame_length > mBuffer->size()) { @@ -689,12 +733,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {          size_t headerSize __unused = protection_absent ? 7 : 9;          offset += aac_frame_length; -        // TODO: move back to concatenation when codec can support arbitrary input buffers. -        // For now only queue a single buffer -        break;      } -    int64_t timeUs = fetchTimestampAAC(offset); +    int64_t timeUs = fetchTimestamp(offset);      sp<ABuffer> accessUnit = new ABuffer(offset);      memcpy(accessUnit->data(), mBuffer->data(), offset); @@ -704,6 +745,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {      mBuffer->setRange(0, mBuffer->size() - offset);      accessUnit->meta()->setInt64("timeUs", timeUs); +    accessUnit->meta()->setInt32("isSync", 1);      return accessUnit;  } @@ -713,43 +755,10 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {      bool first = true;      while (size > 0) { -        CHECK(!mRangeInfos.empty()); - -        RangeInfo *info = &*mRangeInfos.begin(); - -        if (first) { -            timeUs = info->mTimestampUs; -            first = false; -        } - -        if (info->mLength > size) { -            info->mLength -= size; -            size = 0; -        } else { -            size -= info->mLength; - -            mRangeInfos.erase(mRangeInfos.begin()); -            info = NULL; +        if (mRangeInfos.empty()) { +            return timeUs;          } -    } - -    if (timeUs == 0ll) { -        ALOGV("Returning 0 timestamp"); -    } - -    return timeUs; -} - -// TODO: avoid interpolating timestamps once codec supports arbitrary sized input buffers -int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) { -    int64_t timeUs = -1; -    bool first = true; - -    size_t samplesize = size; -    while (size > 0) { -        CHECK(!mRangeInfos.empty()); -          RangeInfo *info = &*mRangeInfos.begin();          if (first) { @@ -758,11 +767,7 @@ int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) {          }          if (info->mLength > size) { -            int32_t sampleRate; -            CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate));              info->mLength -= size; -            size_t numSamples = 1024 * size / samplesize; -            info->mTimestampUs += numSamples * 1000000ll / sampleRate;              size = 0;          } else {              size -= info->mLength; @@ -780,11 +785,6 @@ int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) {      return timeUs;  } -struct NALPosition { -    size_t nalOffset; -    size_t nalSize; -}; -  sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {      const uint8_t *data = mBuffer->data(); @@ -792,11 +792,13 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {      Vector<NALPosition> nals;      size_t totalSize = 0; +    size_t seiCount = 0;      status_t err;      const uint8_t *nalStart;      size_t nalSize;      bool foundSlice = false; +    bool foundIDR = false;      while ((err = getNextNALUnit(&data, &size, &nalStart, &nalSize)) == OK) {          if (nalSize == 0) continue; @@ -804,6 +806,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {          bool flush = false;          if (nalType == 1 || nalType == 5) { +            if (nalType == 5) { +                foundIDR = true; +            }              if (foundSlice) {                  ABitReader br(nalStart + 1, nalSize);                  unsigned first_mb_in_slice = parseUE(&br); @@ -821,6 +826,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {              // next frame.              flush = true; +        } else if (nalType == 6 && nalSize > 0) { +            // found non-zero sized SEI +            ++seiCount;          }          if (flush) { @@ -829,21 +837,32 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {              size_t auSize = 4 * nals.size() + totalSize;              sp<ABuffer> accessUnit = new ABuffer(auSize); +            sp<ABuffer> sei; + +            if (seiCount > 0) { +                sei = new ABuffer(seiCount * sizeof(NALPosition)); +                accessUnit->meta()->setBuffer("sei", sei); +            }  #if !LOG_NDEBUG              AString out;  #endif              size_t dstOffset = 0; +            size_t seiIndex = 0;              for (size_t i = 0; i < nals.size(); ++i) {                  const NALPosition &pos = nals.itemAt(i);                  unsigned nalType = mBuffer->data()[pos.nalOffset] & 0x1f; -                if (nalType == 6) { -                    sp<ABuffer> sei = new ABuffer(pos.nalSize); -                    memcpy(sei->data(), mBuffer->data() + pos.nalOffset, pos.nalSize); -                    accessUnit->meta()->setBuffer("sei", sei); +                if (nalType == 6 && pos.nalSize > 0) { +                    if (seiIndex >= sei->size() / sizeof(NALPosition)) { +                        ALOGE("Wrong seiIndex"); +                        return NULL; +                    } +                    NALPosition &seiPos = ((NALPosition *)sei->data())[seiIndex++]; +                    seiPos.nalOffset = dstOffset + 4; +                    seiPos.nalSize = pos.nalSize;                  }  #if !LOG_NDEBUG @@ -878,9 +897,15 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {              mBuffer->setRange(0, mBuffer->size() - nextScan);              int64_t timeUs = fetchTimestamp(nextScan); -            CHECK_GE(timeUs, 0ll); +            if (timeUs < 0ll) { +                ALOGE("Negative timeUs"); +                return NULL; +            }              accessUnit->meta()->setInt64("timeUs", timeUs); +            if (foundIDR) { +                accessUnit->meta()->setInt32("isSync", 1); +            }              if (mFormat == NULL) {                  mFormat = MakeAVCCodecSpecificData(accessUnit); @@ -897,7 +922,10 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {          totalSize += nalSize;      } -    CHECK_EQ(err, (status_t)-EAGAIN); +    if (err != (status_t)-EAGAIN) { +        ALOGE("Unexpeted err"); +        return NULL; +    }      return NULL;  } @@ -914,9 +942,12 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {      size_t frameSize;      int samplingRate, numChannels, bitrate, numSamples; -    CHECK(GetMPEGAudioFrameSize( +    if (!GetMPEGAudioFrameSize(                  header, &frameSize, &samplingRate, &numChannels, -                &bitrate, &numSamples)); +                &bitrate, &numSamples)) { +        ALOGE("Failed to get audio frame size"); +        return NULL; +    }      if (size < frameSize) {          return NULL; @@ -934,9 +965,13 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {      mBuffer->setRange(0, mBuffer->size() - frameSize);      int64_t timeUs = fetchTimestamp(frameSize); -    CHECK_GE(timeUs, 0ll); +    if (timeUs < 0ll) { +        ALOGE("Negative timeUs"); +        return NULL; +    }      accessUnit->meta()->setInt64("timeUs", timeUs); +    accessUnit->meta()->setInt32("isSync", 1);      if (mFormat == NULL) {          mFormat = new MetaData; @@ -955,7 +990,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {                          kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);                  break;              default: -                TRESPASS(); +                return NULL;          }          mFormat->setInt32(kKeySampleRate, samplingRate); @@ -966,7 +1001,10 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGAudio() {  }  static void EncodeSize14(uint8_t **_ptr, size_t size) { -    CHECK_LE(size, 0x3fff); +    if (size > 0x3fff) { +        ALOGE("Wrong size"); +        return; +    }      uint8_t *ptr = *_ptr; @@ -1013,6 +1051,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {      int pprevStartCode = -1;      int prevStartCode = -1;      int currentStartCode = -1; +    bool gopFound = false; +    bool isClosedGop = false; +    bool brokenLink = false;      size_t offset = 0;      while (offset + 3 < size) { @@ -1038,7 +1079,10 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {              // seqHeader without/with extension              if (mFormat == NULL) { -                CHECK_GE(size, 7u); +                if (size < 7u) { +                    ALOGE("Size too small"); +                    return NULL; +                }                  unsigned width =                      (data[4] << 4) | data[5] >> 4; @@ -1075,6 +1119,13 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {              }          } +        if (mFormat != NULL && currentStartCode == 0xb8) { +            // GOP layer +            gopFound = true; +            isClosedGop = (data[offset + 7] & 0x40) != 0; +            brokenLink = (data[offset + 7] & 0x20) != 0; +        } +          if (mFormat != NULL && currentStartCode == 0x00) {              // Picture start @@ -1091,11 +1142,17 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEGVideo() {                  mBuffer->setRange(0, mBuffer->size() - offset);                  int64_t timeUs = fetchTimestamp(offset); -                CHECK_GE(timeUs, 0ll); +                if (timeUs < 0ll) { +                    ALOGE("Negative timeUs"); +                    return NULL; +                }                  offset = 0;                  accessUnit->meta()->setInt64("timeUs", timeUs); +                if (gopFound && (!brokenLink || isClosedGop)) { +                    accessUnit->meta()->setInt32("isSync", 1); +                }                  ALOGV("returning MPEG video access unit at time %" PRId64 " us",                        timeUs); @@ -1121,7 +1178,7 @@ static ssize_t getNextChunkSize(      }      if (memcmp(kStartCode, data, 3)) { -        TRESPASS(); +        return -EAGAIN;      }      size_t offset = 3; @@ -1181,25 +1238,37 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {              case EXPECT_VISUAL_OBJECT_START:              { -                CHECK_EQ(chunkType, 0xb5); +                if (chunkType != 0xb5) { +                    ALOGE("Unexpected chunkType"); +                    return NULL; +                }                  state = EXPECT_VO_START;                  break;              }              case EXPECT_VO_START:              { -                CHECK_LE(chunkType, 0x1f); +                if (chunkType > 0x1f) { +                    ALOGE("Unexpected chunkType"); +                    return NULL; +                }                  state = EXPECT_VOL_START;                  break;              }              case EXPECT_VOL_START:              { -                CHECK((chunkType & 0xf0) == 0x20); +                if ((chunkType & 0xf0) != 0x20) { +                    ALOGE("Wrong chunkType"); +                    return NULL; +                } -                CHECK(ExtractDimensionsFromVOLHeader( +                if (!ExtractDimensionsFromVOLHeader(                              &data[offset], chunkSize, -                            &width, &height)); +                            &width, &height)) { +                    ALOGE("Failed to get dimension"); +                    return NULL; +                }                  state = WAIT_FOR_VOP_START;                  break; @@ -1240,6 +1309,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {              case SKIP_TO_VOP_START:              {                  if (chunkType == 0xb6) { +                    int vopCodingType = (data[offset + 4] & 0xc0) >> 6; +                      offset += chunkSize;                      sp<ABuffer> accessUnit = new ABuffer(offset); @@ -1250,11 +1321,17 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {                      mBuffer->setRange(0, size);                      int64_t timeUs = fetchTimestamp(offset); -                    CHECK_GE(timeUs, 0ll); +                    if (timeUs < 0ll) { +                        ALOGE("Negative timeus"); +                        return NULL; +                    }                      offset = 0;                      accessUnit->meta()->setInt64("timeUs", timeUs); +                    if (vopCodingType == 0) {  // intra-coded VOP +                        accessUnit->meta()->setInt32("isSync", 1); +                    }                      ALOGV("returning MPEG4 video access unit at time %" PRId64 " us",                           timeUs); @@ -1271,7 +1348,8 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMPEG4Video() {              }              default: -                TRESPASS(); +                ALOGE("Unknown state: %d", state); +                return NULL;          }          if (discard) { @@ -1300,5 +1378,25 @@ void ElementaryStreamQueue::signalEOS() {      }  } +sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitMetadata() { +    size_t size = mBuffer->size(); +    if (!size) { +        return NULL; +    } + +    sp<ABuffer> accessUnit = new ABuffer(size); +    int64_t timeUs = fetchTimestamp(size); +    accessUnit->meta()->setInt64("timeUs", timeUs); + +    memcpy(accessUnit->data(), mBuffer->data(), size); +    mBuffer->setRange(0, 0); + +    if (mFormat == NULL) { +        mFormat = new MetaData; +        mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_DATA_TIMED_ID3); +    } + +    return accessUnit; +}  }  // namespace android diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h index a6d812f..e9f96b7 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.h +++ b/media/libstagefright/mpeg2ts/ESQueue.h @@ -37,6 +37,7 @@ struct ElementaryStreamQueue {          MPEG_VIDEO,          MPEG4_VIDEO,          PCM_AUDIO, +        METADATA,      };      enum Flags { @@ -75,11 +76,11 @@ private:      sp<ABuffer> dequeueAccessUnitMPEGVideo();      sp<ABuffer> dequeueAccessUnitMPEG4Video();      sp<ABuffer> dequeueAccessUnitPCMAudio(); +    sp<ABuffer> dequeueAccessUnitMetadata();      // consume a logical (compressed) access unit of size "size",      // returns its timestamp in us (or -1 if no time information).      int64_t fetchTimestamp(size_t size); -    int64_t fetchTimestampAAC(size_t size);      DISALLOW_EVIL_CONSTRUCTORS(ElementaryStreamQueue);  }; diff --git a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp index 85859f7..6d9fe9d 100644 --- a/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp +++ b/media/libstagefright/mpeg2ts/MPEG2PSExtractor.cpp @@ -265,7 +265,10 @@ ssize_t MPEG2PSExtractor::dequeuePES() {      }      unsigned PES_packet_length = U16_AT(mBuffer->data() + 4); -    CHECK_NE(PES_packet_length, 0u); +    if (PES_packet_length == 0u) { +        ALOGE("PES_packet_length is 0"); +        return -EAGAIN; +    }      size_t n = PES_packet_length + 6; @@ -286,7 +289,10 @@ ssize_t MPEG2PSExtractor::dequeuePES() {          return ERROR_MALFORMED;      } -    CHECK_EQ(packet_startcode_prefix, 0x000001u); +    if (packet_startcode_prefix != 0x000001u) { +        ALOGE("Wrong PES prefix"); +        return ERROR_MALFORMED; +    }      unsigned stream_id = br.getBits(8);      ALOGV("stream_id = 0x%02x", stream_id); @@ -366,8 +372,7 @@ ssize_t MPEG2PSExtractor::dequeuePES() {              && stream_id != 0xff  // program_stream_directory              && stream_id != 0xf2  // DSMCC              && stream_id != 0xf8) {  // H.222.1 type E -        CHECK_EQ(br.getBits(2), 2u); - +        /* unsigned PES_marker_bits = */br.getBits(2);  // should be 0x2(hex)          /* unsigned PES_scrambling_control = */br.getBits(2);          /* unsigned PES_priority = */br.getBits(1);          /* unsigned data_alignment_indicator = */br.getBits(1); @@ -400,16 +405,26 @@ ssize_t MPEG2PSExtractor::dequeuePES() {          uint64_t PTS = 0, DTS = 0;          if (PTS_DTS_flags == 2 || PTS_DTS_flags == 3) { -            CHECK_GE(optional_bytes_remaining, 5u); +            if (optional_bytes_remaining < 5u) { +                return ERROR_MALFORMED; +            } -            CHECK_EQ(br.getBits(4), PTS_DTS_flags); +            if (br.getBits(4) != PTS_DTS_flags) { +                return ERROR_MALFORMED; +            }              PTS = ((uint64_t)br.getBits(3)) << 30; -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              PTS |= ((uint64_t)br.getBits(15)) << 15; -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              PTS |= br.getBits(15); -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ALOGV("PTS = %" PRIu64, PTS);              // ALOGI("PTS = %.2f secs", PTS / 90000.0f); @@ -417,16 +432,26 @@ ssize_t MPEG2PSExtractor::dequeuePES() {              optional_bytes_remaining -= 5;              if (PTS_DTS_flags == 3) { -                CHECK_GE(optional_bytes_remaining, 5u); +                if (optional_bytes_remaining < 5u) { +                    return ERROR_MALFORMED; +                } -                CHECK_EQ(br.getBits(4), 1u); +                if (br.getBits(4) != 1u) { +                    return ERROR_MALFORMED; +                }                  DTS = ((uint64_t)br.getBits(3)) << 30; -                CHECK_EQ(br.getBits(1), 1u); +                if (br.getBits(1) != 1u) { +                    return ERROR_MALFORMED; +                }                  DTS |= ((uint64_t)br.getBits(15)) << 15; -                CHECK_EQ(br.getBits(1), 1u); +                if (br.getBits(1) != 1u) { +                    return ERROR_MALFORMED; +                }                  DTS |= br.getBits(15); -                CHECK_EQ(br.getBits(1), 1u); +                if (br.getBits(1) != 1u) { +                    return ERROR_MALFORMED; +                }                  ALOGV("DTS = %" PRIu64, DTS); @@ -435,40 +460,62 @@ ssize_t MPEG2PSExtractor::dequeuePES() {          }          if (ESCR_flag) { -            CHECK_GE(optional_bytes_remaining, 6u); +            if (optional_bytes_remaining < 6u) { +                return ERROR_MALFORMED; +            }              br.getBits(2);              uint64_t ESCR = ((uint64_t)br.getBits(3)) << 30; -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ESCR |= ((uint64_t)br.getBits(15)) << 15; -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ESCR |= br.getBits(15); -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              ALOGV("ESCR = %" PRIu64, ESCR);              /* unsigned ESCR_extension = */br.getBits(9); -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              optional_bytes_remaining -= 6;          }          if (ES_rate_flag) { -            CHECK_GE(optional_bytes_remaining, 3u); +            if (optional_bytes_remaining < 3u) { +                return ERROR_MALFORMED; +            } -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              /* unsigned ES_rate = */br.getBits(22); -            CHECK_EQ(br.getBits(1), 1u); +            if (br.getBits(1) != 1u) { +                return ERROR_MALFORMED; +            }              optional_bytes_remaining -= 3;          } +        if (br.numBitsLeft() < optional_bytes_remaining * 8) { +            return ERROR_MALFORMED; +        } +          br.skipBits(optional_bytes_remaining * 8);          // ES data follows. -        CHECK_GE(PES_packet_length, PES_header_data_length + 3); +        if (PES_packet_length < PES_header_data_length + 3) { +            return ERROR_MALFORMED; +        }          unsigned dataLength =              PES_packet_length - 3 - PES_header_data_length; @@ -481,7 +528,9 @@ ssize_t MPEG2PSExtractor::dequeuePES() {              return ERROR_MALFORMED;          } -        CHECK_GE(br.numBitsLeft(), dataLength * 8); +        if (br.numBitsLeft() < dataLength * 8) { +            return ERROR_MALFORMED; +        }          ssize_t index = mTracks.indexOfKey(stream_id);          if (index < 0 && mScanning) { @@ -521,10 +570,14 @@ ssize_t MPEG2PSExtractor::dequeuePES() {              return err;          }      } else if (stream_id == 0xbe) {  // padding_stream -        CHECK_NE(PES_packet_length, 0u); +        if (PES_packet_length == 0u) { +            return ERROR_MALFORMED; +        }          br.skipBits(PES_packet_length * 8);      } else { -        CHECK_NE(PES_packet_length, 0u); +        if (PES_packet_length == 0u) { +            return ERROR_MALFORMED; +        }          br.skipBits(PES_packet_length * 8);      } diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp index 74cb5d8..cbe9673 100644 --- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp +++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp @@ -16,17 +16,22 @@  //#define LOG_NDEBUG 0  #define LOG_TAG "MPEG2TSExtractor" + +#include <inttypes.h>  #include <utils/Log.h>  #include "include/MPEG2TSExtractor.h"  #include "include/NuCachedSource2.h" +#include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h>  #include <media/stagefright/DataSource.h>  #include <media/stagefright/MediaDefs.h>  #include <media/stagefright/MediaErrors.h>  #include <media/stagefright/MediaSource.h>  #include <media/stagefright/MetaData.h> +#include <media/IStreamSource.h>  #include <utils/String8.h>  #include "AnotherPacketSource.h" @@ -40,7 +45,7 @@ struct MPEG2TSSource : public MediaSource {      MPEG2TSSource(              const sp<MPEG2TSExtractor> &extractor,              const sp<AnotherPacketSource> &impl, -            bool seekable); +            bool doesSeek);      virtual status_t start(MetaData *params = NULL);      virtual status_t stop(); @@ -54,8 +59,8 @@ private:      sp<AnotherPacketSource> mImpl;      // If there are both audio and video streams, only the video stream -    // will be seekable, otherwise the single stream will be seekable. -    bool mSeekable; +    // will signal seek on the extractor; otherwise the single stream will seek. +    bool mDoesSeek;      DISALLOW_EVIL_CONSTRUCTORS(MPEG2TSSource);  }; @@ -63,10 +68,10 @@ private:  MPEG2TSSource::MPEG2TSSource(          const sp<MPEG2TSExtractor> &extractor,          const sp<AnotherPacketSource> &impl, -        bool seekable) +        bool doesSeek)      : mExtractor(extractor),        mImpl(impl), -      mSeekable(seekable) { +      mDoesSeek(doesSeek) {  }  status_t MPEG2TSSource::start(MetaData *params) { @@ -85,27 +90,18 @@ status_t MPEG2TSSource::read(          MediaBuffer **out, const ReadOptions *options) {      *out = NULL; -    status_t finalResult; -    while (!mImpl->hasBufferAvailable(&finalResult)) { -        if (finalResult != OK) { -            return ERROR_END_OF_STREAM; -        } - -        status_t err = mExtractor->feedMore(); +    int64_t seekTimeUs; +    ReadOptions::SeekMode seekMode; +    if (mDoesSeek && options && options->getSeekTo(&seekTimeUs, &seekMode)) { +        // seek is needed +        status_t err = mExtractor->seek(seekTimeUs, seekMode);          if (err != OK) { -            mImpl->signalEOS(err); +            return err;          }      } -    int64_t seekTimeUs; -    ReadOptions::SeekMode seekMode; -    if (mSeekable && options && options->getSeekTo(&seekTimeUs, &seekMode)) { -        // A seek was requested, but we don't actually support seeking and so can only "seek" to -        // the current position -        int64_t nextBufTimeUs; -        if (mImpl->nextBufferTime(&nextBufTimeUs) != OK || seekTimeUs != nextBufTimeUs) { -            return ERROR_UNSUPPORTED; -        } +    if (mExtractor->feedUntilBufferAvailable(mImpl) != OK) { +        return ERROR_END_OF_STREAM;      }      return mImpl->read(out, options); @@ -129,20 +125,10 @@ sp<MediaSource> MPEG2TSExtractor::getTrack(size_t index) {          return NULL;      } -    bool seekable = true; -    if (mSourceImpls.size() > 1) { -        CHECK_EQ(mSourceImpls.size(), 2u); - -        sp<MetaData> meta = mSourceImpls.editItemAt(index)->getFormat(); -        const char *mime; -        CHECK(meta->findCString(kKeyMIMEType, &mime)); - -        if (!strncasecmp("audio/", mime, 6)) { -            seekable = false; -        } -    } - -    return new MPEG2TSSource(this, mSourceImpls.editItemAt(index), seekable); +    // The seek reference track (video if present; audio otherwise) performs +    // seek requests, while other tracks ignore requests. +    return new MPEG2TSSource(this, mSourceImpls.editItemAt(index), +            (mSeekSyncPoints == &mSyncPoints.editItemAt(index)));  }  sp<MetaData> MPEG2TSExtractor::getTrackMetaData( @@ -161,7 +147,7 @@ sp<MetaData> MPEG2TSExtractor::getMetaData() {  void MPEG2TSExtractor::init() {      bool haveAudio = false;      bool haveVideo = false; -    int numPacketsParsed = 0; +    int64_t startTime = ALooper::GetNowUs();      while (feedMore() == OK) {          if (haveAudio && haveVideo) { @@ -175,6 +161,8 @@ void MPEG2TSExtractor::init() {              if (impl != NULL) {                  haveVideo = true;                  mSourceImpls.push(impl); +                mSyncPoints.push(); +                mSeekSyncPoints = &mSyncPoints.editTop();              }          } @@ -186,15 +174,75 @@ void MPEG2TSExtractor::init() {              if (impl != NULL) {                  haveAudio = true;                  mSourceImpls.push(impl); +                mSyncPoints.push(); +                if (!haveVideo) { +                    mSeekSyncPoints = &mSyncPoints.editTop(); +                }              }          } -        if (++numPacketsParsed > 10000) { +        // Wait only for 2 seconds to detect audio/video streams. +        if (ALooper::GetNowUs() - startTime > 2000000ll) {              break;          }      } -    ALOGI("haveAudio=%d, haveVideo=%d", haveAudio, haveVideo); +    off64_t size; +    if (mDataSource->getSize(&size) == OK && (haveAudio || haveVideo)) { +        sp<AnotherPacketSource> impl = haveVideo +                ? (AnotherPacketSource *)mParser->getSource( +                        ATSParser::VIDEO).get() +                : (AnotherPacketSource *)mParser->getSource( +                        ATSParser::AUDIO).get(); +        size_t prevSyncSize = 1; +        int64_t durationUs = -1; +        List<int64_t> durations; +        // Estimate duration --- stabilize until you get <500ms deviation. +        while (feedMore() == OK +                && ALooper::GetNowUs() - startTime <= 2000000ll) { +            if (mSeekSyncPoints->size() > prevSyncSize) { +                prevSyncSize = mSeekSyncPoints->size(); +                int64_t diffUs = mSeekSyncPoints->keyAt(prevSyncSize - 1) +                        - mSeekSyncPoints->keyAt(0); +                off64_t diffOffset = mSeekSyncPoints->valueAt(prevSyncSize - 1) +                        - mSeekSyncPoints->valueAt(0); +                durationUs = size * diffUs / diffOffset; +                durations.push_back(durationUs); +                if (durations.size() > 5) { +                    durations.erase(durations.begin()); +                    int64_t min = *durations.begin(); +                    int64_t max = *durations.begin(); +                    for (List<int64_t>::iterator i = durations.begin(); +                            i != durations.end(); +                            ++i) { +                        if (min > *i) { +                            min = *i; +                        } +                        if (max < *i) { +                            max = *i; +                        } +                    } +                    if (max - min < 500 * 1000) { +                        break; +                    } +                } +            } +        } +        status_t err; +        int64_t bufferedDurationUs; +        bufferedDurationUs = impl->getBufferedDurationUs(&err); +        if (err == ERROR_END_OF_STREAM) { +            durationUs = bufferedDurationUs; +        } +        if (durationUs > 0) { +            const sp<MetaData> meta = impl->getFormat(); +            meta->setInt64(kKeyDuration, durationUs); +            impl->setFormat(meta); +        } +    } + +    ALOGI("haveAudio=%d, haveVideo=%d, elaspedTime=%" PRId64, +            haveAudio, haveVideo, ALooper::GetNowUs() - startTime);  }  status_t MPEG2TSExtractor::feedMore() { @@ -210,12 +258,206 @@ status_t MPEG2TSExtractor::feedMore() {          return (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;      } +    ATSParser::SyncEvent event(mOffset);      mOffset += n; -    return mParser->feedTSPacket(packet, kTSPacketSize); +    status_t err = mParser->feedTSPacket(packet, kTSPacketSize, &event); +    if (event.isInit()) { +        for (size_t i = 0; i < mSourceImpls.size(); ++i) { +            if (mSourceImpls[i].get() == event.getMediaSource().get()) { +                KeyedVector<int64_t, off64_t> *syncPoints = &mSyncPoints.editItemAt(i); +                syncPoints->add(event.getTimeUs(), event.getOffset()); +                // We're keeping the size of the sync points at most 5mb per a track. +                size_t size = syncPoints->size(); +                if (size >= 327680) { +                    int64_t firstTimeUs = syncPoints->keyAt(0); +                    int64_t lastTimeUs = syncPoints->keyAt(size - 1); +                    if (event.getTimeUs() - firstTimeUs > lastTimeUs - event.getTimeUs()) { +                        syncPoints->removeItemsAt(0, 4096); +                    } else { +                        syncPoints->removeItemsAt(size - 4096, 4096); +                    } +                } +                break; +            } +        } +    } +    return err;  }  uint32_t MPEG2TSExtractor::flags() const { -    return CAN_PAUSE; +    return CAN_PAUSE | CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD; +} + +status_t MPEG2TSExtractor::seek(int64_t seekTimeUs, +        const MediaSource::ReadOptions::SeekMode &seekMode) { +    if (mSeekSyncPoints == NULL || mSeekSyncPoints->isEmpty()) { +        ALOGW("No sync point to seek to."); +        // ... and therefore we have nothing useful to do here. +        return OK; +    } + +    // Determine whether we're seeking beyond the known area. +    bool shouldSeekBeyond = +            (seekTimeUs > mSeekSyncPoints->keyAt(mSeekSyncPoints->size() - 1)); + +    // Determine the sync point to seek. +    size_t index = 0; +    for (; index < mSeekSyncPoints->size(); ++index) { +        int64_t timeUs = mSeekSyncPoints->keyAt(index); +        if (timeUs > seekTimeUs) { +            break; +        } +    } + +    switch (seekMode) { +        case MediaSource::ReadOptions::SEEK_NEXT_SYNC: +            if (index == mSeekSyncPoints->size()) { +                ALOGW("Next sync not found; starting from the latest sync."); +                --index; +            } +            break; +        case MediaSource::ReadOptions::SEEK_CLOSEST_SYNC: +        case MediaSource::ReadOptions::SEEK_CLOSEST: +            ALOGW("seekMode not supported: %d; falling back to PREVIOUS_SYNC", +                    seekMode); +            // fall-through +        case MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC: +            if (index == 0) { +                ALOGW("Previous sync not found; starting from the earliest " +                        "sync."); +            } else { +                --index; +            } +            break; +    } +    if (!shouldSeekBeyond || mOffset <= mSeekSyncPoints->valueAt(index)) { +        int64_t actualSeekTimeUs = mSeekSyncPoints->keyAt(index); +        mOffset = mSeekSyncPoints->valueAt(index); +        status_t err = queueDiscontinuityForSeek(actualSeekTimeUs); +        if (err != OK) { +            return err; +        } +    } + +    if (shouldSeekBeyond) { +        status_t err = seekBeyond(seekTimeUs); +        if (err != OK) { +            return err; +        } +    } + +    // Fast-forward to sync frame. +    for (size_t i = 0; i < mSourceImpls.size(); ++i) { +        const sp<AnotherPacketSource> &impl = mSourceImpls[i]; +        status_t err; +        feedUntilBufferAvailable(impl); +        while (impl->hasBufferAvailable(&err)) { +            sp<AMessage> meta = impl->getMetaAfterLastDequeued(0); +            sp<ABuffer> buffer; +            if (meta == NULL) { +                return UNKNOWN_ERROR; +            } +            int32_t sync; +            if (meta->findInt32("isSync", &sync) && sync) { +                break; +            } +            err = impl->dequeueAccessUnit(&buffer); +            if (err != OK) { +                return err; +            } +            feedUntilBufferAvailable(impl); +        } +    } + +    return OK; +} + +status_t MPEG2TSExtractor::queueDiscontinuityForSeek(int64_t actualSeekTimeUs) { +    // Signal discontinuity +    sp<AMessage> extra(new AMessage); +    extra->setInt64(IStreamListener::kKeyMediaTimeUs, actualSeekTimeUs); +    mParser->signalDiscontinuity(ATSParser::DISCONTINUITY_TIME, extra); + +    // After discontinuity, impl should only have discontinuities +    // with the last being what we queued. Dequeue them all here. +    for (size_t i = 0; i < mSourceImpls.size(); ++i) { +        const sp<AnotherPacketSource> &impl = mSourceImpls.itemAt(i); +        sp<ABuffer> buffer; +        status_t err; +        while (impl->hasBufferAvailable(&err)) { +            if (err != OK) { +                return err; +            } +            err = impl->dequeueAccessUnit(&buffer); +            // If the source contains anything but discontinuity, that's +            // a programming mistake. +            CHECK(err == INFO_DISCONTINUITY); +        } +    } + +    // Feed until we have a buffer for each source. +    for (size_t i = 0; i < mSourceImpls.size(); ++i) { +        const sp<AnotherPacketSource> &impl = mSourceImpls.itemAt(i); +        sp<ABuffer> buffer; +        status_t err = feedUntilBufferAvailable(impl); +        if (err != OK) { +            return err; +        } +    } + +    return OK; +} + +status_t MPEG2TSExtractor::seekBeyond(int64_t seekTimeUs) { +    // If we're seeking beyond where we know --- read until we reach there. +    size_t syncPointsSize = mSeekSyncPoints->size(); + +    while (seekTimeUs > mSeekSyncPoints->keyAt( +            mSeekSyncPoints->size() - 1)) { +        status_t err; +        if (syncPointsSize < mSeekSyncPoints->size()) { +            syncPointsSize = mSeekSyncPoints->size(); +            int64_t syncTimeUs = mSeekSyncPoints->keyAt(syncPointsSize - 1); +            // Dequeue buffers before sync point in order to avoid too much +            // cache building up. +            sp<ABuffer> buffer; +            for (size_t i = 0; i < mSourceImpls.size(); ++i) { +                const sp<AnotherPacketSource> &impl = mSourceImpls[i]; +                int64_t timeUs; +                while ((err = impl->nextBufferTime(&timeUs)) == OK) { +                    if (timeUs < syncTimeUs) { +                        impl->dequeueAccessUnit(&buffer); +                    } else { +                        break; +                    } +                } +                if (err != OK && err != -EWOULDBLOCK) { +                    return err; +                } +            } +        } +        if (feedMore() != OK) { +            return ERROR_END_OF_STREAM; +        } +    } + +    return OK; +} + +status_t MPEG2TSExtractor::feedUntilBufferAvailable( +        const sp<AnotherPacketSource> &impl) { +    status_t finalResult; +    while (!impl->hasBufferAvailable(&finalResult)) { +        if (finalResult != OK) { +            return finalResult; +        } + +        status_t err = feedMore(); +        if (err != OK) { +            impl->signalEOS(err); +        } +    } +    return OK;  }  //////////////////////////////////////////////////////////////////////////////// diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index aaa8334..5f0f567 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -1,11 +1,8 @@  LOCAL_PATH:= $(call my-dir)  include $(CLEAR_VARS) -ifeq ($(TARGET_DEVICE), manta) -    LOCAL_CFLAGS += -DSURFACE_IS_BGR32 -endif -  LOCAL_SRC_FILES:=                     \ +        FrameDropper.cpp              \          GraphicBufferSource.cpp       \          OMX.cpp                       \          OMXMaster.cpp                 \ @@ -34,6 +31,8 @@ LOCAL_SHARED_LIBRARIES :=               \          libdl  LOCAL_MODULE:= libstagefright_omx +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/omx/FrameDropper.cpp b/media/libstagefright/omx/FrameDropper.cpp new file mode 100644 index 0000000..9a4952e --- /dev/null +++ b/media/libstagefright/omx/FrameDropper.cpp @@ -0,0 +1,73 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "FrameDropper" +#include <utils/Log.h> + +#include "FrameDropper.h" + +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +static const int64_t kMaxJitterUs = 2000; + +FrameDropper::FrameDropper() +    : mDesiredMinTimeUs(-1), +      mMinIntervalUs(0) { +} + +FrameDropper::~FrameDropper() { +} + +status_t FrameDropper::setMaxFrameRate(float maxFrameRate) { +    if (maxFrameRate <= 0) { +        ALOGE("framerate should be positive but got %f.", maxFrameRate); +        return BAD_VALUE; +    } +    mMinIntervalUs = (int64_t) (1000000.0f / maxFrameRate); +    return OK; +} + +bool FrameDropper::shouldDrop(int64_t timeUs) { +    if (mMinIntervalUs <= 0) { +        return false; +    } + +    if (mDesiredMinTimeUs < 0) { +        mDesiredMinTimeUs = timeUs + mMinIntervalUs; +        ALOGV("first frame %lld, next desired frame %lld", +                (long long)timeUs, (long long)mDesiredMinTimeUs); +        return false; +    } + +    if (timeUs < (mDesiredMinTimeUs - kMaxJitterUs)) { +        ALOGV("drop frame %lld, desired frame %lld, diff %lld", +                (long long)timeUs, (long long)mDesiredMinTimeUs, +                (long long)(mDesiredMinTimeUs - timeUs)); +        return true; +    } + +    int64_t n = (timeUs - mDesiredMinTimeUs + kMaxJitterUs) / mMinIntervalUs; +    mDesiredMinTimeUs += (n + 1) * mMinIntervalUs; +    ALOGV("keep frame %lld, next desired frame %lld, diff %lld", +            (long long)timeUs, (long long)mDesiredMinTimeUs, +            (long long)(mDesiredMinTimeUs - timeUs)); +    return false; +} + +}  // namespace android diff --git a/media/libstagefright/omx/FrameDropper.h b/media/libstagefright/omx/FrameDropper.h new file mode 100644 index 0000000..c5a6d4b --- /dev/null +++ b/media/libstagefright/omx/FrameDropper.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAME_DROPPER_H_ + +#define FRAME_DROPPER_H_ + +#include <utils/Errors.h> +#include <utils/RefBase.h> + +#include <media/stagefright/foundation/ABase.h> + +namespace android { + +struct FrameDropper : public RefBase { +    // No frames will be dropped until a valid max frame rate is set. +    FrameDropper(); + +    // maxFrameRate required to be positive. +    status_t setMaxFrameRate(float maxFrameRate); + +    // Returns false if max frame rate has not been set via setMaxFrameRate. +    bool shouldDrop(int64_t timeUs); + +protected: +    virtual ~FrameDropper(); + +private: +    int64_t mDesiredMinTimeUs; +    int64_t mMinIntervalUs; + +    DISALLOW_EVIL_CONSTRUCTORS(FrameDropper); +}; + +}  // namespace android + +#endif  // FRAME_DROPPER_H_ diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index 2945644..1a7dc9d 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -23,28 +23,104 @@  #include "GraphicBufferSource.h"  #include <OMX_Core.h> +#include <OMX_IndexExt.h>  #include <media/stagefright/foundation/ADebug.h>  #include <media/stagefright/foundation/AMessage.h>  #include <media/hardware/MetadataBufferType.h>  #include <ui/GraphicBuffer.h>  #include <gui/BufferItem.h> +#include <HardwareAPI.h>  #include <inttypes.h> +#include "FrameDropper.h"  namespace android {  static const bool EXTRA_CHECK = true; +GraphicBufferSource::PersistentProxyListener::PersistentProxyListener( +        const wp<IGraphicBufferConsumer> &consumer, +        const wp<ConsumerListener>& consumerListener) : +    mConsumerListener(consumerListener), +    mConsumer(consumer) {} -GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, -        uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount, -        bool useGraphicBufferInMeta) : +GraphicBufferSource::PersistentProxyListener::~PersistentProxyListener() {} + +void GraphicBufferSource::PersistentProxyListener::onFrameAvailable( +        const BufferItem& item) { +    sp<ConsumerListener> listener(mConsumerListener.promote()); +    if (listener != NULL) { +        listener->onFrameAvailable(item); +    } else { +        sp<IGraphicBufferConsumer> consumer(mConsumer.promote()); +        if (consumer == NULL) { +            return; +        } +        BufferItem bi; +        status_t err = consumer->acquireBuffer(&bi, 0); +        if (err != OK) { +            ALOGE("PersistentProxyListener: acquireBuffer failed (%d)", err); +            return; +        } + +        err = consumer->detachBuffer(bi.mBuf); +        if (err != OK) { +            ALOGE("PersistentProxyListener: detachBuffer failed (%d)", err); +            return; +        } + +        err = consumer->attachBuffer(&bi.mBuf, bi.mGraphicBuffer); +        if (err != OK) { +            ALOGE("PersistentProxyListener: attachBuffer failed (%d)", err); +            return; +        } + +        err = consumer->releaseBuffer(bi.mBuf, 0, +                EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, bi.mFence); +        if (err != OK) { +            ALOGE("PersistentProxyListener: releaseBuffer failed (%d)", err); +        } +    } +} + +void GraphicBufferSource::PersistentProxyListener::onFrameReplaced( +        const BufferItem& item) { +    sp<ConsumerListener> listener(mConsumerListener.promote()); +    if (listener != NULL) { +        listener->onFrameReplaced(item); +    } +} + +void GraphicBufferSource::PersistentProxyListener::onBuffersReleased() { +    sp<ConsumerListener> listener(mConsumerListener.promote()); +    if (listener != NULL) { +        listener->onBuffersReleased(); +    } +} + +void GraphicBufferSource::PersistentProxyListener::onSidebandStreamChanged() { +    sp<ConsumerListener> listener(mConsumerListener.promote()); +    if (listener != NULL) { +        listener->onSidebandStreamChanged(); +    } +} + +GraphicBufferSource::GraphicBufferSource( +        OMXNodeInstance* nodeInstance, +        uint32_t bufferWidth, +        uint32_t bufferHeight, +        uint32_t bufferCount, +        uint32_t consumerUsage, +        const sp<IGraphicBufferConsumer> &consumer) :      mInitCheck(UNKNOWN_ERROR),      mNodeInstance(nodeInstance),      mExecuting(false),      mSuspended(false), +    mIsPersistent(false), +    mConsumer(consumer),      mNumFramesAvailable(0), +    mNumBufferAcquired(0),      mEndOfStream(false),      mEndOfStreamSent(false),      mMaxTimestampGapUs(-1ll), @@ -54,15 +130,15 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,      mRepeatAfterUs(-1ll),      mRepeatLastFrameGeneration(0),      mRepeatLastFrameTimestamp(-1ll), -    mLatestSubmittedBufferId(-1), -    mLatestSubmittedBufferFrameNum(0), -    mLatestSubmittedBufferUseCount(0), +    mLatestBufferId(-1), +    mLatestBufferFrameNum(0), +    mLatestBufferUseCount(0), +    mLatestBufferFence(Fence::NO_FENCE),      mRepeatBufferDeferred(false),      mTimePerCaptureUs(-1ll),      mTimePerFrameUs(-1ll),      mPrevCaptureUs(-1ll), -    mPrevFrameUs(-1ll), -    mUseGraphicBufferInMeta(useGraphicBufferInMeta) { +    mPrevFrameUs(-1ll) {      ALOGV("GraphicBufferSource w=%u h=%u c=%u",              bufferWidth, bufferHeight, bufferCount); @@ -73,26 +149,38 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,          return;      } -    String8 name("GraphicBufferSource"); +    if (mConsumer == NULL) { +        String8 name("GraphicBufferSource"); -    BufferQueue::createBufferQueue(&mProducer, &mConsumer); -    mConsumer->setConsumerName(name); -    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight); -    mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER); +        BufferQueue::createBufferQueue(&mProducer, &mConsumer); +        mConsumer->setConsumerName(name); -    mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount); -    if (mInitCheck != NO_ERROR) { -        ALOGE("Unable to set BQ max acquired buffer count to %u: %d", -                bufferCount, mInitCheck); -        return; -    } +        // use consumer usage bits queried from encoder, but always add HW_VIDEO_ENCODER +        // for backward compatibility. +        consumerUsage |= GRALLOC_USAGE_HW_VIDEO_ENCODER; +        mConsumer->setConsumerUsageBits(consumerUsage); +        mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount); +        if (mInitCheck != NO_ERROR) { +            ALOGE("Unable to set BQ max acquired buffer count to %u: %d", +                    bufferCount, mInitCheck); +            return; +        } +    } else { +        mIsPersistent = true; +    } +    mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);      // Note that we can't create an sp<...>(this) in a ctor that will not keep a      // reference once the ctor ends, as that would cause the refcount of 'this'      // dropping to 0 at the end of the ctor.  Since all we need is a wp<...>      // that's what we create.      wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this); -    sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener); +    sp<IConsumerListener> proxy; +    if (!mIsPersistent) { +        proxy = new BufferQueue::ProxyConsumerListener(listener); +    } else { +        proxy = new PersistentProxyListener(mConsumer, listener); +    }      mInitCheck = mConsumer->consumerConnect(proxy, false);      if (mInitCheck != NO_ERROR) { @@ -105,8 +193,15 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,  }  GraphicBufferSource::~GraphicBufferSource() { -    ALOGV("~GraphicBufferSource"); -    if (mConsumer != NULL) { +    if (mLatestBufferId >= 0) { +        releaseBuffer( +                mLatestBufferId, mLatestBufferFrameNum, +                mBufferSlot[mLatestBufferId], mLatestBufferFence); +    } +    if (mNumBufferAcquired != 0) { +        ALOGW("potential buffer leak (acquired %d)", mNumBufferAcquired); +    } +    if (mConsumer != NULL && !mIsPersistent) {          status_t err = mConsumer->consumerDisconnect();          if (err != NO_ERROR) {              ALOGW("consumerDisconnect failed: %d", err); @@ -153,9 +248,9 @@ void GraphicBufferSource::omxExecuting() {          mLooper->registerHandler(mReflector);          mLooper->start(); -        if (mLatestSubmittedBufferId >= 0) { +        if (mLatestBufferId >= 0) {              sp<AMessage> msg = -                new AMessage(kWhatRepeatLastFrame, mReflector->id()); +                new AMessage(kWhatRepeatLastFrame, mReflector);              msg->setInt32("generation", ++mRepeatLastFrameGeneration);              msg->post(mRepeatAfterUs); @@ -218,9 +313,8 @@ void GraphicBufferSource::addCodecBuffer(OMX_BUFFERHEADERTYPE* header) {      mCodecBuffers.add(codecBuffer);  } -void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { +void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int fenceFd) {      Mutex::Autolock autoLock(mMutex); -      if (!mExecuting) {          return;      } @@ -229,6 +323,9 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {      if (cbi < 0) {          // This should never happen.          ALOGE("codecBufferEmptied: buffer not recognized (h=%p)", header); +        if (fenceFd >= 0) { +            ::close(fenceFd); +        }          return;      } @@ -250,30 +347,33 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {          }          // No GraphicBuffer to deal with, no additional input or output is          // expected, so just return. +        if (fenceFd >= 0) { +            ::close(fenceFd); +        }          return;      } -    if (EXTRA_CHECK) { +    if (EXTRA_CHECK && header->nAllocLen >= sizeof(MetadataBufferType)) {          // Pull the graphic buffer handle back out of the buffer, and confirm          // that it matches expectations.          OMX_U8* data = header->pBuffer;          MetadataBufferType type = *(MetadataBufferType *)data; -        if (type == kMetadataBufferTypeGrallocSource) { -            buffer_handle_t bufferHandle; -            memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t)); -            if (bufferHandle != codecBuffer.mGraphicBuffer->handle) { +        if (type == kMetadataBufferTypeGrallocSource +                && header->nAllocLen >= sizeof(VideoGrallocMetadata)) { +            VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)data; +            if (grallocMeta.pHandle != codecBuffer.mGraphicBuffer->handle) {                  // should never happen                  ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p", -                        bufferHandle, codecBuffer.mGraphicBuffer->handle); +                        grallocMeta.pHandle, codecBuffer.mGraphicBuffer->handle);                  CHECK(!"codecBufferEmptied: mismatched buffer");              } -        } else if (type == kMetadataBufferTypeGraphicBuffer) { -            GraphicBuffer *buffer; -            memcpy(&buffer, data + 4, sizeof(buffer)); -            if (buffer != codecBuffer.mGraphicBuffer.get()) { +        } else if (type == kMetadataBufferTypeANWBuffer +                && header->nAllocLen >= sizeof(VideoNativeMetadata)) { +            VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)data; +            if (nativeMeta.pBuffer != codecBuffer.mGraphicBuffer->getNativeBuffer()) {                  // should never happen                  ALOGE("codecBufferEmptied: buffer is %p, expected %p", -                        buffer, codecBuffer.mGraphicBuffer.get()); +                        nativeMeta.pBuffer, codecBuffer.mGraphicBuffer->getNativeBuffer());                  CHECK(!"codecBufferEmptied: mismatched buffer");              }          } @@ -283,20 +383,21 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {      // If we find a match, release that slot.  If we don't, the BufferQueue      // has dropped that GraphicBuffer, and there's nothing for us to release.      int id = codecBuffer.mBuf; +    sp<Fence> fence = new Fence(fenceFd);      if (mBufferSlot[id] != NULL &&          mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) {          ALOGV("cbi %d matches bq slot %d, handle=%p",                  cbi, id, mBufferSlot[id]->handle); -        if (id == mLatestSubmittedBufferId) { -            CHECK_GT(mLatestSubmittedBufferUseCount--, 0); +        if (id == mLatestBufferId) { +            CHECK_GT(mLatestBufferUseCount--, 0);          } else { -            mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber, -                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); +            releaseBuffer(id, codecBuffer.mFrameNumber, mBufferSlot[id], fence);          }      } else {          ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d",                  cbi); +        // we will not reuse codec buffer, so there is no need to wait for fence      }      // Mark the codec buffer as available by clearing the GraphicBuffer ref. @@ -314,11 +415,11 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {          ALOGV("buffer freed, EOS pending");          submitEndOfInputStream_l();      } else if (mRepeatBufferDeferred) { -        bool success = repeatLatestSubmittedBuffer_l(); +        bool success = repeatLatestBuffer_l();          if (success) { -            ALOGV("deferred repeatLatestSubmittedBuffer_l SUCCESS"); +            ALOGV("deferred repeatLatestBuffer_l SUCCESS");          } else { -            ALOGV("deferred repeatLatestSubmittedBuffer_l FAILURE"); +            ALOGV("deferred repeatLatestBuffer_l FAILURE");          }          mRepeatBufferDeferred = false;      } @@ -372,10 +473,11 @@ void GraphicBufferSource::suspend(bool suspend) {                  break;              } +            ++mNumBufferAcquired;              --mNumFramesAvailable; -            mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, -                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); +            releaseBuffer(item.mBuf, item.mFrameNumber, +                    item.mGraphicBuffer, item.mFence);          }          return;      } @@ -383,12 +485,12 @@ void GraphicBufferSource::suspend(bool suspend) {      mSuspended = false;      if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) { -        if (repeatLatestSubmittedBuffer_l()) { -            ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l SUCCESS"); +        if (repeatLatestBuffer_l()) { +            ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS");              mRepeatBufferDeferred = false;          } else { -            ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l FAILURE"); +            ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE");          }      }  } @@ -422,15 +524,9 @@ bool GraphicBufferSource::fillCodecBuffer_l() {          return false;      } +    mNumBufferAcquired++;      mNumFramesAvailable--; -    // Wait for it to become available. -    err = item.mFence->waitForever("GraphicBufferSource::fillCodecBuffer_l"); -    if (err != OK) { -        ALOGW("failed to wait for buffer fence: %d", err); -        // keep going -    } -      // If this is the first time we're seeing this buffer, add it to our      // slot table.      if (item.mGraphicBuffer != NULL) { @@ -442,61 +538,72 @@ bool GraphicBufferSource::fillCodecBuffer_l() {      // only submit sample if start time is unspecified, or sample      // is queued after the specified start time +    bool dropped = false;      if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {          // if start time is set, offset time stamp by start time          if (mSkipFramesBeforeNs > 0) {              item.mTimestamp -= mSkipFramesBeforeNs;          } -        err = submitBuffer_l(item, cbi); + +        int64_t timeUs = item.mTimestamp / 1000; +        if (mFrameDropper != NULL && mFrameDropper->shouldDrop(timeUs)) { +            ALOGV("skipping frame (%lld) to meet max framerate", static_cast<long long>(timeUs)); +            // set err to OK so that the skipped frame can still be saved as the lastest frame +            err = OK; +            dropped = true; +        } else { +            err = submitBuffer_l(item, cbi); +        }      }      if (err != OK) {          ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf); -        mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, -                EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); +        releaseBuffer(item.mBuf, item.mFrameNumber, item.mGraphicBuffer, item.mFence);      } else {          ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); -        setLatestSubmittedBuffer_l(item); +        setLatestBuffer_l(item, dropped);      }      return true;  } -bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { +bool GraphicBufferSource::repeatLatestBuffer_l() {      CHECK(mExecuting && mNumFramesAvailable == 0); -    if (mLatestSubmittedBufferId < 0 || mSuspended) { +    if (mLatestBufferId < 0 || mSuspended) {          return false;      } -    if (mBufferSlot[mLatestSubmittedBufferId] == NULL) { +    if (mBufferSlot[mLatestBufferId] == NULL) {          // This can happen if the remote side disconnects, causing          // onBuffersReleased() to NULL out our copy of the slots.  The          // buffer is gone, so we have nothing to show.          //          // To be on the safe side we try to release the buffer. -        ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL"); +        ALOGD("repeatLatestBuffer_l: slot was NULL");          mConsumer->releaseBuffer( -                mLatestSubmittedBufferId, -                mLatestSubmittedBufferFrameNum, +                mLatestBufferId, +                mLatestBufferFrameNum,                  EGL_NO_DISPLAY,                  EGL_NO_SYNC_KHR, -                Fence::NO_FENCE); -        mLatestSubmittedBufferId = -1; -        mLatestSubmittedBufferFrameNum = 0; +                mLatestBufferFence); +        mLatestBufferId = -1; +        mLatestBufferFrameNum = 0; +        mLatestBufferFence = Fence::NO_FENCE;          return false;      }      int cbi = findAvailableCodecBuffer_l();      if (cbi < 0) {          // No buffers available, bail. -        ALOGV("repeatLatestSubmittedBuffer_l: no codec buffers."); +        ALOGV("repeatLatestBuffer_l: no codec buffers.");          return false;      }      BufferItem item; -    item.mBuf = mLatestSubmittedBufferId; -    item.mFrameNumber = mLatestSubmittedBufferFrameNum; +    item.mBuf = mLatestBufferId; +    item.mFrameNumber = mLatestBufferFrameNum;      item.mTimestamp = mRepeatLastFrameTimestamp; +    item.mFence = mLatestBufferFence;      status_t err = submitBuffer_l(item, cbi); @@ -504,7 +611,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {          return false;      } -    ++mLatestSubmittedBufferUseCount; +    ++mLatestBufferUseCount;      /* repeat last frame up to kRepeatLastFrameCount times.       * in case of static scene, a single repeat might not get rid of encoder @@ -514,7 +621,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {          mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000;          if (mReflector != NULL) { -            sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id()); +            sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector);              msg->setInt32("generation", ++mRepeatLastFrameGeneration);              msg->post(mRepeatAfterUs);          } @@ -523,31 +630,29 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {      return true;  } -void GraphicBufferSource::setLatestSubmittedBuffer_l( -        const BufferItem &item) { -    ALOGV("setLatestSubmittedBuffer_l"); +void GraphicBufferSource::setLatestBuffer_l( +        const BufferItem &item, bool dropped) { +    ALOGV("setLatestBuffer_l"); -    if (mLatestSubmittedBufferId >= 0) { -        if (mLatestSubmittedBufferUseCount == 0) { -            mConsumer->releaseBuffer( -                    mLatestSubmittedBufferId, -                    mLatestSubmittedBufferFrameNum, -                    EGL_NO_DISPLAY, -                    EGL_NO_SYNC_KHR, -                    Fence::NO_FENCE); +    if (mLatestBufferId >= 0) { +        if (mLatestBufferUseCount == 0) { +            releaseBuffer(mLatestBufferId, mLatestBufferFrameNum, +                    mBufferSlot[mLatestBufferId], mLatestBufferFence); +            // mLatestBufferFence will be set to new fence just below          }      } -    mLatestSubmittedBufferId = item.mBuf; -    mLatestSubmittedBufferFrameNum = item.mFrameNumber; +    mLatestBufferId = item.mBuf; +    mLatestBufferFrameNum = item.mFrameNumber;      mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000; -    mLatestSubmittedBufferUseCount = 1; +    mLatestBufferUseCount = dropped ? 0 : 1;      mRepeatBufferDeferred = false;      mRepeatLastFrameCount = kRepeatLastFrameCount; +    mLatestBufferFence = item.mFence;      if (mReflector != NULL) { -        sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id()); +        sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector);          msg->setInt32("generation", ++mRepeatLastFrameGeneration);          msg->post(mRepeatAfterUs);      } @@ -640,8 +745,7 @@ int64_t GraphicBufferSource::getTimestamp(const BufferItem &item) {      return timeUs;  } -status_t GraphicBufferSource::submitBuffer_l( -        const BufferItem &item, int cbi) { +status_t GraphicBufferSource::submitBuffer_l(const BufferItem &item, int cbi) {      ALOGV("submitBuffer_l cbi=%d", cbi);      int64_t timeUs = getTimestamp(item); @@ -655,36 +759,18 @@ status_t GraphicBufferSource::submitBuffer_l(      codecBuffer.mFrameNumber = item.mFrameNumber;      OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; -    CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t)); -    OMX_U8* data = header->pBuffer; -    buffer_handle_t handle; -    if (!mUseGraphicBufferInMeta) { -        const OMX_U32 type = kMetadataBufferTypeGrallocSource; -        handle = codecBuffer.mGraphicBuffer->handle; -        memcpy(data, &type, 4); -        memcpy(data + 4, &handle, sizeof(buffer_handle_t)); -    } else { -        // codecBuffer holds a reference to the GraphicBuffer, so -        // it is valid while it is with the OMX component -        const OMX_U32 type = kMetadataBufferTypeGraphicBuffer; -        memcpy(data, &type, 4); -        // passing a non-reference-counted graphicBuffer -        GraphicBuffer *buffer = codecBuffer.mGraphicBuffer.get(); -        handle = buffer->handle; -        memcpy(data + 4, &buffer, sizeof(buffer)); -    } - -    status_t err = mNodeInstance->emptyDirectBuffer(header, 0, -            4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME, -            timeUs); +    sp<GraphicBuffer> buffer = codecBuffer.mGraphicBuffer; +    status_t err = mNodeInstance->emptyGraphicBuffer( +            header, buffer, OMX_BUFFERFLAG_ENDOFFRAME, timeUs, +            item.mFence->isValid() ? item.mFence->dup() : -1);      if (err != OK) { -        ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err); +        ALOGW("WARNING: emptyNativeWindowBuffer failed: 0x%x", err);          codecBuffer.mGraphicBuffer = NULL;          return err;      } -    ALOGV("emptyDirectBuffer succeeded, h=%p p=%p bufhandle=%p", -            header, header->pBuffer, handle); +    ALOGV("emptyNativeWindowBuffer succeeded, h=%p p=%p buf=%p bufhandle=%p", +            header, header->pBuffer, buffer->getNativeBuffer(), buffer->handle);      return OK;  } @@ -707,19 +793,9 @@ void GraphicBufferSource::submitEndOfInputStream_l() {      CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));      OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader; -    if (EXTRA_CHECK) { -        // Guard against implementations that don't check nFilledLen. -        size_t fillLen = 4 + sizeof(buffer_handle_t); -        CHECK(header->nAllocLen >= fillLen); -        OMX_U8* data = header->pBuffer; -        memset(data, 0xcd, fillLen); -    } - -    uint64_t timestamp = 0; // does this matter? - -    status_t err = mNodeInstance->emptyDirectBuffer(header, /*offset*/ 0, -            /*length*/ 0, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS, -            timestamp); +    status_t err = mNodeInstance->emptyGraphicBuffer( +            header, NULL /* buffer */, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS, +            0 /* timestamp */, -1 /* fenceFd */);      if (err != OK) {          ALOGW("emptyDirectBuffer EOS failed: 0x%x", err);      } else { @@ -750,6 +826,35 @@ int GraphicBufferSource::findMatchingCodecBuffer_l(      return -1;  } +/* + * Releases an acquired buffer back to the consumer for either persistent + * or non-persistent surfaces. + * + * id: buffer slot to release (in persistent case the id might be changed) + * frameNum: frame number of the frame being released + * buffer: GraphicBuffer pointer to release (note this must not be & as we + *         will clear the original mBufferSlot in persistent case) + * fence: fence of the frame being released + */ +void GraphicBufferSource::releaseBuffer( +        int &id, uint64_t frameNum, +        const sp<GraphicBuffer> buffer, const sp<Fence> &fence) { +    if (mIsPersistent) { +        mConsumer->detachBuffer(id); +        mBufferSlot[id] = NULL; + +        if (mConsumer->attachBuffer(&id, buffer) == OK) { +            mConsumer->releaseBuffer( +                    id, 0, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence); +        } +    } else { +        mConsumer->releaseBuffer( +                id, frameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence); +    } +    id = -1; // invalidate id +    mNumBufferAcquired--; +} +  // BufferQueue::ConsumerListener callback  void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {      Mutex::Autolock autoLock(mMutex); @@ -770,14 +875,17 @@ void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {          BufferItem item;          status_t err = mConsumer->acquireBuffer(&item, 0);          if (err == OK) { +            mNumBufferAcquired++; +              // If this is the first time we're seeing this buffer, add it to our              // slot table.              if (item.mGraphicBuffer != NULL) {                  ALOGV("onFrameAvailable: setting mBufferSlot %d", item.mBuf);                  mBufferSlot[item.mBuf] = item.mGraphicBuffer;              } -            mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, -                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence); + +            releaseBuffer(item.mBuf, item.mFrameNumber, +                    item.mGraphicBuffer, item.mFence);          }          return;      } @@ -842,6 +950,23 @@ status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {      return OK;  } +status_t GraphicBufferSource::setMaxFps(float maxFps) { +    Mutex::Autolock autoLock(mMutex); + +    if (mExecuting) { +        return INVALID_OPERATION; +    } + +    mFrameDropper = new FrameDropper(); +    status_t err = mFrameDropper->setMaxFrameRate(maxFps); +    if (err != OK) { +        mFrameDropper.clear(); +        return err; +    } + +    return OK; +} +  void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {      Mutex::Autolock autoLock(mMutex); @@ -880,12 +1005,12 @@ void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {                  break;              } -            bool success = repeatLatestSubmittedBuffer_l(); +            bool success = repeatLatestBuffer_l();              if (success) { -                ALOGV("repeatLatestSubmittedBuffer_l SUCCESS"); +                ALOGV("repeatLatestBuffer_l SUCCESS");              } else { -                ALOGV("repeatLatestSubmittedBuffer_l FAILURE"); +                ALOGV("repeatLatestBuffer_l FAILURE");                  mRepeatBufferDeferred = true;              }              break; diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index 401bbc3..2f929d9 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -30,6 +30,8 @@  namespace android { +struct FrameDropper; +  /*   * This class is used to feed OMX codecs from a Surface via BufferQueue.   * @@ -48,9 +50,15 @@ namespace android {   */  class GraphicBufferSource : public BufferQueue::ConsumerListener {  public: -    GraphicBufferSource(OMXNodeInstance* nodeInstance, -            uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount, -            bool useGraphicBufferInMeta = false); +    GraphicBufferSource( +            OMXNodeInstance* nodeInstance, +            uint32_t bufferWidth, +            uint32_t bufferHeight, +            uint32_t bufferCount, +            uint32_t consumerUsage, +            const sp<IGraphicBufferConsumer> &consumer = NULL +    ); +      virtual ~GraphicBufferSource();      // We can't throw an exception if the constructor fails, so we just set @@ -86,7 +94,7 @@ public:      // Called from OnEmptyBufferDone.  If we have a BQ buffer available,      // fill it with a new frame of data; otherwise, just mark it as available. -    void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header); +    void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int fenceFd);      // Called when omx_message::FILL_BUFFER_DONE is received. (Currently the      // buffer source will fix timestamp in the header if needed.) @@ -119,6 +127,9 @@ public:      // of suspension on input.      status_t setMaxTimestampGapUs(int64_t maxGapUs); +    // When set, the max frame rate fed to the encoder will be capped at maxFps. +    status_t setMaxFps(float maxFps); +      // Sets the time lapse (or slow motion) parameters.      // data[0] is the time (us) between two frames for playback      // data[1] is the time (us) between two frames for capture @@ -150,6 +161,31 @@ protected:      virtual void onSidebandStreamChanged();  private: +    // PersistentProxyListener is similar to BufferQueue::ProxyConsumerListener +    // except that it returns (acquire/detach/re-attache/release) buffers +    // in onFrameAvailable() if the actual consumer object is no longer valid. +    // +    // This class is used in persistent input surface case to prevent buffer +    // loss when onFrameAvailable() is received while we don't have a valid +    // consumer around. +    class PersistentProxyListener : public BnConsumerListener { +        public: +            PersistentProxyListener( +                    const wp<IGraphicBufferConsumer> &consumer, +                    const wp<ConsumerListener>& consumerListener); +            virtual ~PersistentProxyListener(); +            virtual void onFrameAvailable(const BufferItem& item) override; +            virtual void onFrameReplaced(const BufferItem& item) override; +            virtual void onBuffersReleased() override; +            virtual void onSidebandStreamChanged() override; +         private: +            // mConsumerListener is a weak reference to the IConsumerListener. +            wp<ConsumerListener> mConsumerListener; +            // mConsumer is a weak reference to the IGraphicBufferConsumer, use +            // a weak ref to avoid circular ref between mConsumer and this class +            wp<IGraphicBufferConsumer> mConsumer; +    }; +      // Keep track of codec input buffers.  They may either be available      // (mGraphicBuffer == NULL) or in use by the codec.      struct CodecBuffer { @@ -193,8 +229,13 @@ private:      // doing anything if we don't have a codec buffer available.      void submitEndOfInputStream_l(); -    void setLatestSubmittedBuffer_l(const BufferItem &item); -    bool repeatLatestSubmittedBuffer_l(); +    // Release buffer to the consumer +    void releaseBuffer( +            int &id, uint64_t frameNum, +            const sp<GraphicBuffer> buffer, const sp<Fence> &fence); + +    void setLatestBuffer_l(const BufferItem &item, bool dropped); +    bool repeatLatestBuffer_l();      int64_t getTimestamp(const BufferItem &item);      // Lock, covers all member variables. @@ -214,6 +255,7 @@ private:      // Our BufferQueue interfaces. mProducer is passed to the producer through      // getIGraphicBufferProducer, and mConsumer is used internally to retrieve      // the buffers queued by the producer. +    bool mIsPersistent;      sp<IGraphicBufferProducer> mProducer;      sp<IGraphicBufferConsumer> mConsumer; @@ -221,6 +263,9 @@ private:      // forwarded to the codec.      size_t mNumFramesAvailable; +    // Number of frames acquired from consumer (debug only) +    int32_t mNumBufferAcquired; +      // Set to true if we want to send end-of-stream after we run out of      // frames in BufferQueue.      bool mEndOfStream; @@ -235,7 +280,7 @@ private:      Vector<CodecBuffer> mCodecBuffers;      //// -    friend class AHandlerReflector<GraphicBufferSource>; +    friend struct AHandlerReflector<GraphicBufferSource>;      enum {          kWhatRepeatLastFrame, @@ -250,6 +295,8 @@ private:      int64_t mPrevModifiedTimeUs;      int64_t mSkipFramesBeforeNs; +    sp<FrameDropper> mFrameDropper; +      sp<ALooper> mLooper;      sp<AHandlerReflector<GraphicBufferSource> > mReflector; @@ -258,11 +305,12 @@ private:      int64_t mRepeatLastFrameTimestamp;      int32_t mRepeatLastFrameCount; -    int mLatestSubmittedBufferId; -    uint64_t mLatestSubmittedBufferFrameNum; -    int32_t mLatestSubmittedBufferUseCount; +    int mLatestBufferId; +    uint64_t mLatestBufferFrameNum; +    int32_t mLatestBufferUseCount; +    sp<Fence> mLatestBufferFence; -    // The previously submitted buffer should've been repeated but +    // The previous buffer should've been repeated but      // no codec buffer was available at the time.      bool mRepeatBufferDeferred; @@ -272,7 +320,7 @@ private:      int64_t mPrevCaptureUs;      int64_t mPrevFrameUs; -    bool mUseGraphicBufferInMeta; +    MetadataBufferType mMetadataBufferType;      void onMessageReceived(const sp<AMessage> &msg); diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index f8d38ff..cb7ab5e 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -32,7 +32,9 @@  #include "OMXMaster.h" +#include <OMX_AsString.h>  #include <OMX_Component.h> +#include <OMX_VideoExt.h>  namespace android { @@ -60,7 +62,11 @@ private:  struct OMX::CallbackDispatcher : public RefBase {      CallbackDispatcher(OMXNodeInstance *owner); -    void post(const omx_message &msg); +    // Posts |msg| to the listener's queue. If |realTime| is true, the listener thread is notified +    // that a new message is available on the queue. Otherwise, the message stays on the queue, but +    // the listener is not notified of it. It will process this message when a subsequent message +    // is posted with |realTime| set to true. +    void post(const omx_message &msg, bool realTime = true);      bool loop(); @@ -73,11 +79,11 @@ private:      OMXNodeInstance *mOwner;      bool mDone;      Condition mQueueChanged; -    List<omx_message> mQueue; +    std::list<omx_message> mQueue;      sp<CallbackDispatcherThread> mThread; -    void dispatch(const omx_message &msg); +    void dispatch(std::list<omx_message> &messages);      CallbackDispatcher(const CallbackDispatcher &);      CallbackDispatcher &operator=(const CallbackDispatcher &); @@ -108,24 +114,26 @@ OMX::CallbackDispatcher::~CallbackDispatcher() {      }  } -void OMX::CallbackDispatcher::post(const omx_message &msg) { +void OMX::CallbackDispatcher::post(const omx_message &msg, bool realTime) {      Mutex::Autolock autoLock(mLock);      mQueue.push_back(msg); -    mQueueChanged.signal(); +    if (realTime) { +        mQueueChanged.signal(); +    }  } -void OMX::CallbackDispatcher::dispatch(const omx_message &msg) { +void OMX::CallbackDispatcher::dispatch(std::list<omx_message> &messages) {      if (mOwner == NULL) {          ALOGV("Would have dispatched a message to a node that's already gone.");          return;      } -    mOwner->onMessage(msg); +    mOwner->onMessages(messages);  }  bool OMX::CallbackDispatcher::loop() {      for (;;) { -        omx_message msg; +        std::list<omx_message> messages;          {              Mutex::Autolock autoLock(mLock); @@ -137,11 +145,10 @@ bool OMX::CallbackDispatcher::loop() {                  break;              } -            msg = *mQueue.begin(); -            mQueue.erase(mQueue.begin()); +            messages.swap(mQueue);          } -        dispatch(msg); +        dispatch(messages);      }      return false; @@ -233,11 +240,11 @@ status_t OMX::allocateNode(              instance, &handle);      if (err != OMX_ErrorNone) { -        ALOGE("FAILED to allocate omx component '%s'", name); +        ALOGE("FAILED to allocate omx component '%s' err=%s(%#x)", name, asString(err), err);          instance->onGetHandleFailed(); -        return UNKNOWN_ERROR; +        return StatusFromOMXError(err);      }      *node = makeNodeID(instance); @@ -331,8 +338,8 @@ status_t OMX::getGraphicBufferUsage(  }  status_t OMX::storeMetaDataInBuffers( -        node_id node, OMX_U32 port_index, OMX_BOOL enable) { -    return findInstance(node)->storeMetaDataInBuffers(port_index, enable); +        node_id node, OMX_U32 port_index, OMX_BOOL enable, MetadataBufferType *type) { +    return findInstance(node)->storeMetaDataInBuffers(port_index, enable, type);  }  status_t OMX::prepareForAdaptivePlayback( @@ -351,9 +358,9 @@ status_t OMX::configureVideoTunnelMode(  status_t OMX::useBuffer(          node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -        buffer_id *buffer) { +        buffer_id *buffer, OMX_U32 allottedSize) {      return findInstance(node)->useBuffer( -            port_index, params, buffer); +            port_index, params, buffer, allottedSize);  }  status_t OMX::useGraphicBuffer( @@ -372,11 +379,25 @@ status_t OMX::updateGraphicBufferInMeta(  status_t OMX::createInputSurface(          node_id node, OMX_U32 port_index, -        sp<IGraphicBufferProducer> *bufferProducer) { +        sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {      return findInstance(node)->createInputSurface( -            port_index, bufferProducer); +            port_index, bufferProducer, type); +} + +status_t OMX::createPersistentInputSurface( +        sp<IGraphicBufferProducer> *bufferProducer, +        sp<IGraphicBufferConsumer> *bufferConsumer) { +    return OMXNodeInstance::createPersistentInputSurface( +            bufferProducer, bufferConsumer); +} + +status_t OMX::setInputSurface( +        node_id node, OMX_U32 port_index, +        const sp<IGraphicBufferConsumer> &bufferConsumer, MetadataBufferType *type) { +    return findInstance(node)->setInputSurface(port_index, bufferConsumer, type);  } +  status_t OMX::signalEndOfInputStream(node_id node) {      return findInstance(node)->signalEndOfInputStream();  } @@ -390,9 +411,9 @@ status_t OMX::allocateBuffer(  status_t OMX::allocateBufferWithBackup(          node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, -        buffer_id *buffer) { +        buffer_id *buffer, OMX_U32 allottedSize) {      return findInstance(node)->allocateBufferWithBackup( -            port_index, params, buffer); +            port_index, params, buffer, allottedSize);  }  status_t OMX::freeBuffer(node_id node, OMX_U32 port_index, buffer_id buffer) { @@ -400,17 +421,17 @@ status_t OMX::freeBuffer(node_id node, OMX_U32 port_index, buffer_id buffer) {              port_index, buffer);  } -status_t OMX::fillBuffer(node_id node, buffer_id buffer) { -    return findInstance(node)->fillBuffer(buffer); +status_t OMX::fillBuffer(node_id node, buffer_id buffer, int fenceFd) { +    return findInstance(node)->fillBuffer(buffer, fenceFd);  }  status_t OMX::emptyBuffer(          node_id node,          buffer_id buffer,          OMX_U32 range_offset, OMX_U32 range_length, -        OMX_U32 flags, OMX_TICKS timestamp) { +        OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {      return findInstance(node)->emptyBuffer( -            buffer, range_offset, range_length, flags, timestamp); +            buffer, range_offset, range_length, flags, timestamp, fenceFd);  }  status_t OMX::getExtensionIndex( @@ -435,31 +456,56 @@ OMX_ERRORTYPE OMX::OnEvent(          OMX_IN OMX_EVENTTYPE eEvent,          OMX_IN OMX_U32 nData1,          OMX_IN OMX_U32 nData2, -        OMX_IN OMX_PTR /* pEventData */) { +        OMX_IN OMX_PTR pEventData) {      ALOGV("OnEvent(%d, %" PRIu32", %" PRIu32 ")", eEvent, nData1, nData2);      // Forward to OMXNodeInstance.      findInstance(node)->onEvent(eEvent, nData1, nData2); +    sp<OMX::CallbackDispatcher> dispatcher = findDispatcher(node); + +    // output rendered events are not processed as regular events until they hit the observer +    if (eEvent == OMX_EventOutputRendered) { +        if (pEventData == NULL) { +            return OMX_ErrorBadParameter; +        } + +        // process data from array +        OMX_VIDEO_RENDEREVENTTYPE *renderData = (OMX_VIDEO_RENDEREVENTTYPE *)pEventData; +        for (size_t i = 0; i < nData1; ++i) { +            omx_message msg; +            msg.type = omx_message::FRAME_RENDERED; +            msg.node = node; +            msg.fenceFd = -1; +            msg.u.render_data.timestamp = renderData[i].nMediaTimeUs; +            msg.u.render_data.nanoTime = renderData[i].nSystemTimeNs; + +            dispatcher->post(msg, false /* realTime */); +        } +        return OMX_ErrorNone; +    } +      omx_message msg;      msg.type = omx_message::EVENT;      msg.node = node; +    msg.fenceFd = -1;      msg.u.event_data.event = eEvent;      msg.u.event_data.data1 = nData1;      msg.u.event_data.data2 = nData2; -    findDispatcher(node)->post(msg); +    dispatcher->post(msg, true /* realTime */);      return OMX_ErrorNone;  }  OMX_ERRORTYPE OMX::OnEmptyBufferDone( -        node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) { +        node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd) {      ALOGV("OnEmptyBufferDone buffer=%p", pBuffer);      omx_message msg;      msg.type = omx_message::EMPTY_BUFFER_DONE;      msg.node = node; +    msg.fenceFd = fenceFd;      msg.u.buffer_data.buffer = buffer;      findDispatcher(node)->post(msg); @@ -468,12 +514,13 @@ OMX_ERRORTYPE OMX::OnEmptyBufferDone(  }  OMX_ERRORTYPE OMX::OnFillBufferDone( -        node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) { +        node_id node, buffer_id buffer, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer, int fenceFd) {      ALOGV("OnFillBufferDone buffer=%p", pBuffer);      omx_message msg;      msg.type = omx_message::FILL_BUFFER_DONE;      msg.node = node; +    msg.fenceFd = fenceFd;      msg.u.extended_buffer_data.buffer = buffer;      msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;      msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen; diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 63c8f54..9f1c5d8 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -32,6 +32,7 @@  #include <gui/BufferQueue.h>  #include <HardwareAPI.h>  #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ABuffer.h>  #include <media/stagefright/MediaErrors.h>  #include <utils/misc.h> @@ -75,11 +76,11 @@ static const OMX_U32 kPortIndexOutput = 1;  #define SIMPLE_NEW_BUFFER(buffer_id, port, size, data) \      NEW_BUFFER_FMT(buffer_id, port, "%zu@%p", (size), (data)) -#define EMPTY_BUFFER(addr, header) "%#x [%u@%p]", \ -    (addr), (header)->nAllocLen, (header)->pBuffer -#define FULL_BUFFER(addr, header) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld]", \ +#define EMPTY_BUFFER(addr, header, fenceFd) "%#x [%u@%p fc=%d]", \ +    (addr), (header)->nAllocLen, (header)->pBuffer, (fenceFd) +#define FULL_BUFFER(addr, header, fenceFd) "%#" PRIxPTR " [%u@%p (%u..+%u) f=%x ts=%lld fc=%d]", \      (intptr_t)(addr), (header)->nAllocLen, (header)->pBuffer, \ -    (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp +    (header)->nOffset, (header)->nFilledLen, (header)->nFlags, (header)->nTimeStamp, (fenceFd)  #define WITH_STATS_WRAPPER(fmt, ...) fmt " { IN=%zu/%zu OUT=%zu/%zu }", ##__VA_ARGS__, \      mInputBuffersWithCodec.size(), mNumPortBuffers[kPortIndexInput], \ @@ -120,9 +121,10 @@ struct BufferMeta {              return;          } -        memcpy((OMX_U8 *)mMem->pointer() + header->nOffset, -                header->pBuffer + header->nOffset, -                header->nFilledLen); +        // check component returns proper range +        sp<ABuffer> codec = getBuffer(header, false /* backup */, true /* limit */); + +        memcpy((OMX_U8 *)mMem->pointer() + header->nOffset, codec->data(), codec->size());      }      void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) { @@ -135,6 +137,25 @@ struct BufferMeta {                  header->nFilledLen);      } +    // return either the codec or the backup buffer +    sp<ABuffer> getBuffer(const OMX_BUFFERHEADERTYPE *header, bool backup, bool limit) { +        sp<ABuffer> buf; +        if (backup && mMem != NULL) { +            buf = new ABuffer(mMem->pointer(), mMem->size()); +        } else { +            buf = new ABuffer(header->pBuffer, header->nAllocLen); +        } +        if (limit) { +            if (header->nOffset + header->nFilledLen > header->nOffset +                    && header->nOffset + header->nFilledLen <= header->nAllocLen) { +                buf->setRange(header->nOffset, header->nFilledLen); +            } else { +                buf->setRange(0, 0); +            } +        } +        return buf; +    } +      void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {          mGraphicBuffer = graphicBuffer;      } @@ -180,6 +201,8 @@ OMXNodeInstance::OMXNodeInstance(      mNumPortBuffers[1] = 0;      mDebugLevelBumpPendingBuffers[0] = 0;      mDebugLevelBumpPendingBuffers[1] = 0; +    mMetadataType[0] = kMetadataBufferTypeInvalid; +    mMetadataType[1] = kMetadataBufferTypeInvalid;  }  OMXNodeInstance::~OMXNodeInstance() { @@ -218,13 +241,15 @@ OMX::node_id OMXNodeInstance::nodeID() {      return mNodeID;  } -static status_t StatusFromOMXError(OMX_ERRORTYPE err) { +status_t StatusFromOMXError(OMX_ERRORTYPE err) {      switch (err) {          case OMX_ErrorNone:              return OK;          case OMX_ErrorUnsupportedSetting:          case OMX_ErrorUnsupportedIndex:              return ERROR_UNSUPPORTED; +        case OMX_ErrorInsufficientResources: +            return NO_MEMORY;          default:              return UNKNOWN_ERROR;      } @@ -484,63 +509,73 @@ status_t OMXNodeInstance::getGraphicBufferUsage(  }  status_t OMXNodeInstance::storeMetaDataInBuffers( -        OMX_U32 portIndex, -        OMX_BOOL enable) { +        OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type) {      Mutex::Autolock autolock(mLock);      CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u en:%d", portString(portIndex), portIndex, enable); -    return storeMetaDataInBuffers_l( -            portIndex, enable, -            OMX_FALSE /* useGraphicBuffer */, NULL /* usingGraphicBufferInMetadata */); +    return storeMetaDataInBuffers_l(portIndex, enable, type);  }  status_t OMXNodeInstance::storeMetaDataInBuffers_l( -        OMX_U32 portIndex, -        OMX_BOOL enable, -        OMX_BOOL useGraphicBuffer, -        OMX_BOOL *usingGraphicBufferInMetadata) { +        OMX_U32 portIndex, OMX_BOOL enable, MetadataBufferType *type) { +    if (portIndex != kPortIndexInput && portIndex != kPortIndexOutput) { +        return BAD_VALUE; +    } +      OMX_INDEXTYPE index;      OMX_STRING name = const_cast<OMX_STRING>(              "OMX.google.android.index.storeMetaDataInBuffers"); -    OMX_STRING graphicBufferName = const_cast<OMX_STRING>( -            "OMX.google.android.index.storeGraphicBufferInMetaData"); -    if (usingGraphicBufferInMetadata == NULL) { -        usingGraphicBufferInMetadata = &useGraphicBuffer; -    } +    OMX_STRING nativeBufferName = const_cast<OMX_STRING>( +            "OMX.google.android.index.storeANWBufferInMetadata"); +    MetadataBufferType negotiatedType; -    OMX_ERRORTYPE err = -        (useGraphicBuffer && portIndex == kPortIndexInput) -                ? OMX_GetExtensionIndex(mHandle, graphicBufferName, &index) -                : OMX_ErrorBadParameter; -    if (err == OMX_ErrorNone) { -        *usingGraphicBufferInMetadata = OMX_TRUE; -        name = graphicBufferName; -    } else { -        err = OMX_GetExtensionIndex(mHandle, name, &index); -    } +    StoreMetaDataInBuffersParams params; +    InitOMXParams(¶ms); +    params.nPortIndex = portIndex; +    params.bStoreMetaData = enable; +    OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, nativeBufferName, &index);      OMX_ERRORTYPE xerr = err;      if (err == OMX_ErrorNone) { -        StoreMetaDataInBuffersParams params; -        InitOMXParams(¶ms); -        params.nPortIndex = portIndex; -        params.bStoreMetaData = enable; -          err = OMX_SetParameter(mHandle, index, ¶ms); +        if (err == OMX_ErrorNone) { +            name = nativeBufferName; // set name for debugging +            negotiatedType = kMetadataBufferTypeANWBuffer; +        } +    } +    if (err != OMX_ErrorNone) { +        err = OMX_GetExtensionIndex(mHandle, name, &index); +        xerr = err; +        if (err == OMX_ErrorNone) { +            negotiatedType = kMetadataBufferTypeGrallocSource; +            err = OMX_SetParameter(mHandle, index, ¶ms); +        }      }      // don't log loud error if component does not support metadata mode on the output      if (err != OMX_ErrorNone) { -        *usingGraphicBufferInMetadata = OMX_FALSE;          if (err == OMX_ErrorUnsupportedIndex && portIndex == kPortIndexOutput) {              CLOGW("component does not support metadata mode; using fallback");          } else if (xerr != OMX_ErrorNone) {              CLOG_ERROR(getExtensionIndex, xerr, "%s", name);          } else { -            CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d GB=%d", name, index, -                    portString(portIndex), portIndex, enable, useGraphicBuffer); +            CLOG_ERROR(setParameter, err, "%s(%#x): %s:%u en=%d type=%d", name, index, +                    portString(portIndex), portIndex, enable, negotiatedType); +        } +        negotiatedType = mMetadataType[portIndex]; +    } else { +        if (!enable) { +            negotiatedType = kMetadataBufferTypeInvalid;          } +        mMetadataType[portIndex] = negotiatedType; +    } +    CLOG_CONFIG(storeMetaDataInBuffers, "%s:%u negotiated %s:%d", +            portString(portIndex), portIndex, asString(negotiatedType), negotiatedType); + +    if (type != NULL) { +        *type = negotiatedType;      } +      return StatusFromOMXError(err);  } @@ -618,8 +653,11 @@ status_t OMXNodeInstance::configureVideoTunnelMode(  status_t OMXNodeInstance::useBuffer(          OMX_U32 portIndex, const sp<IMemory> ¶ms, -        OMX::buffer_id *buffer) { +        OMX::buffer_id *buffer, OMX_U32 allottedSize) {      Mutex::Autolock autoLock(mLock); +    if (allottedSize > params->size()) { +        return BAD_VALUE; +    }      BufferMeta *buffer_meta = new BufferMeta(params); @@ -627,10 +665,11 @@ status_t OMXNodeInstance::useBuffer(      OMX_ERRORTYPE err = OMX_UseBuffer(              mHandle, &header, portIndex, buffer_meta, -            params->size(), static_cast<OMX_U8 *>(params->pointer())); +            allottedSize, static_cast<OMX_U8 *>(params->pointer()));      if (err != OMX_ErrorNone) { -        CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER(portIndex, params->size(), params->pointer())); +        CLOG_ERROR(useBuffer, err, SIMPLE_BUFFER( +                portIndex, (size_t)allottedSize, params->pointer()));          delete buffer_meta;          buffer_meta = NULL; @@ -652,7 +691,7 @@ status_t OMXNodeInstance::useBuffer(      }      CLOG_BUFFER(useBuffer, NEW_BUFFER_FMT( -            *buffer, portIndex, "%zu@%p", params->size(), params->pointer())); +            *buffer, portIndex, "%u(%zu)@%p", allottedSize, params->size(), params->pointer()));      return OK;  } @@ -768,38 +807,60 @@ status_t OMXNodeInstance::useGraphicBuffer(      return OK;  } -status_t OMXNodeInstance::updateGraphicBufferInMeta( +status_t OMXNodeInstance::updateGraphicBufferInMeta_l(          OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer, -        OMX::buffer_id buffer) { -    Mutex::Autolock autoLock(mLock); +        OMX::buffer_id buffer, OMX_BUFFERHEADERTYPE *header) { +    if (portIndex != kPortIndexInput && portIndex != kPortIndexOutput) { +        return BAD_VALUE; +    } -    OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer); -    VideoDecoderOutputMetaData *metadata = -        (VideoDecoderOutputMetaData *)(header->pBuffer);      BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);      bufferMeta->setGraphicBuffer(graphicBuffer); -    metadata->eType = kMetadataBufferTypeGrallocSource; -    metadata->pHandle = graphicBuffer->handle; +    if (mMetadataType[portIndex] == kMetadataBufferTypeGrallocSource +            && header->nAllocLen >= sizeof(VideoGrallocMetadata)) { +        VideoGrallocMetadata &metadata = *(VideoGrallocMetadata *)(header->pBuffer); +        metadata.eType = kMetadataBufferTypeGrallocSource; +        metadata.pHandle = graphicBuffer == NULL ? NULL : graphicBuffer->handle; +    } else if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer +            && header->nAllocLen >= sizeof(VideoNativeMetadata)) { +        VideoNativeMetadata &metadata = *(VideoNativeMetadata *)(header->pBuffer); +        metadata.eType = kMetadataBufferTypeANWBuffer; +        metadata.pBuffer = graphicBuffer == NULL ? NULL : graphicBuffer->getNativeBuffer(); +        metadata.nFenceFd = -1; +    } else { +        CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x bad type (%d) or size (%u)", +            portString(portIndex), portIndex, buffer, mMetadataType[portIndex], header->nAllocLen); +        return BAD_VALUE; +    } +      CLOG_BUFFER(updateGraphicBufferInMeta, "%s:%u, %#x := %p", -            portString(portIndex), portIndex, buffer, graphicBuffer->handle); +            portString(portIndex), portIndex, buffer, +            graphicBuffer == NULL ? NULL : graphicBuffer->handle);      return OK;  } -status_t OMXNodeInstance::createInputSurface( -        OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer) { -    Mutex::Autolock autolock(mLock); +status_t OMXNodeInstance::updateGraphicBufferInMeta( +        OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer, +        OMX::buffer_id buffer) { +    Mutex::Autolock autoLock(mLock); +    OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer); +    return updateGraphicBufferInMeta_l(portIndex, graphicBuffer, buffer, header); +} + +status_t OMXNodeInstance::createGraphicBufferSource( +        OMX_U32 portIndex, sp<IGraphicBufferConsumer> bufferConsumer, MetadataBufferType *type) {      status_t err;      const sp<GraphicBufferSource>& surfaceCheck = getGraphicBufferSource();      if (surfaceCheck != NULL) { +        if (portIndex < NELEM(mMetadataType) && type != NULL) { +            *type = mMetadataType[portIndex]; +        }          return ALREADY_EXISTS;      } -    // Input buffers will hold meta-data (gralloc references). -    OMX_BOOL usingGraphicBuffer = OMX_FALSE; -    err = storeMetaDataInBuffers_l( -            portIndex, OMX_TRUE, -            OMX_TRUE /* useGraphicBuffer */, &usingGraphicBuffer); +    // Input buffers will hold meta-data (ANativeWindowBuffer references). +    err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE, type);      if (err != OK) {          return err;      } @@ -825,19 +886,75 @@ status_t OMXNodeInstance::createInputSurface(          return INVALID_OPERATION;      } -    GraphicBufferSource* bufferSource = new GraphicBufferSource( -            this, def.format.video.nFrameWidth, def.format.video.nFrameHeight, -            def.nBufferCountActual, usingGraphicBuffer); +    uint32_t usageBits; +    oerr = OMX_GetParameter( +            mHandle, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, &usageBits); +    if (oerr != OMX_ErrorNone) { +        usageBits = 0; +    } + +    sp<GraphicBufferSource> bufferSource = new GraphicBufferSource(this, +            def.format.video.nFrameWidth, +            def.format.video.nFrameHeight, +            def.nBufferCountActual, +            usageBits, +            bufferConsumer); +      if ((err = bufferSource->initCheck()) != OK) { -        delete bufferSource;          return err;      }      setGraphicBufferSource(bufferSource); -    *bufferProducer = bufferSource->getIGraphicBufferProducer();      return OK;  } +status_t OMXNodeInstance::createInputSurface( +        OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) { +    Mutex::Autolock autolock(mLock); +    status_t err = createGraphicBufferSource(portIndex, NULL /* bufferConsumer */, type); + +    if (err != OK) { +        return err; +    } + +    *bufferProducer = mGraphicBufferSource->getIGraphicBufferProducer(); +    return OK; +} + +//static +status_t OMXNodeInstance::createPersistentInputSurface( +        sp<IGraphicBufferProducer> *bufferProducer, +        sp<IGraphicBufferConsumer> *bufferConsumer) { +    String8 name("GraphicBufferSource"); + +    sp<IGraphicBufferProducer> producer; +    sp<IGraphicBufferConsumer> consumer; +    BufferQueue::createBufferQueue(&producer, &consumer); +    consumer->setConsumerName(name); +    consumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER); + +    sp<BufferQueue::ProxyConsumerListener> proxy = +        new BufferQueue::ProxyConsumerListener(NULL); +    status_t err = consumer->consumerConnect(proxy, false); +    if (err != NO_ERROR) { +        ALOGE("Error connecting to BufferQueue: %s (%d)", +                strerror(-err), err); +        return err; +    } + +    *bufferProducer = producer; +    *bufferConsumer = consumer; + +    return OK; +} + +status_t OMXNodeInstance::setInputSurface( +        OMX_U32 portIndex, const sp<IGraphicBufferConsumer> &bufferConsumer, +        MetadataBufferType *type) { +    Mutex::Autolock autolock(mLock); +    return createGraphicBufferSource(portIndex, bufferConsumer, type); +} +  status_t OMXNodeInstance::signalEndOfInputStream() {      // For non-Surface input, the MediaCodec should convert the call to a      // pair of requests (dequeue input buffer, queue input buffer with EOS @@ -890,19 +1007,21 @@ status_t OMXNodeInstance::allocateBuffer(  status_t OMXNodeInstance::allocateBufferWithBackup(          OMX_U32 portIndex, const sp<IMemory> ¶ms, -        OMX::buffer_id *buffer) { +        OMX::buffer_id *buffer, OMX_U32 allottedSize) {      Mutex::Autolock autoLock(mLock); +    if (allottedSize > params->size()) { +        return BAD_VALUE; +    }      BufferMeta *buffer_meta = new BufferMeta(params, true);      OMX_BUFFERHEADERTYPE *header;      OMX_ERRORTYPE err = OMX_AllocateBuffer( -            mHandle, &header, portIndex, buffer_meta, params->size()); - +            mHandle, &header, portIndex, buffer_meta, allottedSize);      if (err != OMX_ErrorNone) {          CLOG_ERROR(allocateBufferWithBackup, err, -                SIMPLE_BUFFER(portIndex, params->size(), params->pointer())); +                SIMPLE_BUFFER(portIndex, (size_t)allottedSize, params->pointer()));          delete buffer_meta;          buffer_meta = NULL; @@ -922,8 +1041,8 @@ status_t OMXNodeInstance::allocateBufferWithBackup(          bufferSource->addCodecBuffer(header);      } -    CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %p", -            params->size(), params->pointer(), header->pBuffer)); +    CLOG_BUFFER(allocateBufferWithBackup, NEW_BUFFER_FMT(*buffer, portIndex, "%zu@%p :> %u@%p", +            params->size(), params->pointer(), allottedSize, header->pBuffer));      return OK;  } @@ -948,7 +1067,7 @@ status_t OMXNodeInstance::freeBuffer(      return StatusFromOMXError(err);  } -status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) { +status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer, int fenceFd) {      Mutex::Autolock autoLock(mLock);      OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer); @@ -956,15 +1075,22 @@ status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) {      header->nOffset = 0;      header->nFlags = 0; +    // meta now owns fenceFd +    status_t res = storeFenceInMeta_l(header, fenceFd, kPortIndexOutput); +    if (res != OK) { +        CLOG_ERROR(fillBuffer::storeFenceInMeta, res, EMPTY_BUFFER(buffer, header, fenceFd)); +        return res; +    } +      {          Mutex::Autolock _l(mDebugLock);          mOutputBuffersWithCodec.add(header); -        CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header))); +        CLOG_BUMPED_BUFFER(fillBuffer, WITH_STATS(EMPTY_BUFFER(buffer, header, fenceFd)));      }      OMX_ERRORTYPE err = OMX_FillThisBuffer(mHandle, header);      if (err != OMX_ErrorNone) { -        CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header)); +        CLOG_ERROR(fillBuffer, err, EMPTY_BUFFER(buffer, header, fenceFd));          Mutex::Autolock _l(mDebugLock);          mOutputBuffersWithCodec.remove(header);      } @@ -974,24 +1100,48 @@ status_t OMXNodeInstance::fillBuffer(OMX::buffer_id buffer) {  status_t OMXNodeInstance::emptyBuffer(          OMX::buffer_id buffer,          OMX_U32 rangeOffset, OMX_U32 rangeLength, -        OMX_U32 flags, OMX_TICKS timestamp) { +        OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {      Mutex::Autolock autoLock(mLock);      OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer); -    // rangeLength and rangeOffset must be a subset of the allocated data in the buffer. -    // corner case: we permit rangeOffset == end-of-buffer with rangeLength == 0. -    if (rangeOffset > header->nAllocLen -            || rangeLength > header->nAllocLen - rangeOffset) { -        return BAD_VALUE; -    } -    header->nFilledLen = rangeLength; -    header->nOffset = rangeOffset; -      BufferMeta *buffer_meta =          static_cast<BufferMeta *>(header->pAppPrivate); -    buffer_meta->CopyToOMX(header); +    sp<ABuffer> backup = buffer_meta->getBuffer(header, true /* backup */, false /* limit */); +    sp<ABuffer> codec = buffer_meta->getBuffer(header, false /* backup */, false /* limit */); + +    // convert incoming ANW meta buffers if component is configured for gralloc metadata mode +    // ignore rangeOffset in this case +    if (mMetadataType[kPortIndexInput] == kMetadataBufferTypeGrallocSource +            && backup->capacity() >= sizeof(VideoNativeMetadata) +            && codec->capacity() >= sizeof(VideoGrallocMetadata) +            && ((VideoNativeMetadata *)backup->base())->eType +                    == kMetadataBufferTypeANWBuffer) { +        VideoNativeMetadata &backupMeta = *(VideoNativeMetadata *)backup->base(); +        VideoGrallocMetadata &codecMeta = *(VideoGrallocMetadata *)codec->base(); +        CLOG_BUFFER(emptyBuffer, "converting ANWB %p to handle %p", +                backupMeta.pBuffer, backupMeta.pBuffer->handle); +        codecMeta.pHandle = backupMeta.pBuffer != NULL ? backupMeta.pBuffer->handle : NULL; +        codecMeta.eType = kMetadataBufferTypeGrallocSource; +        header->nFilledLen = rangeLength ? sizeof(codecMeta) : 0; +        header->nOffset = 0; +    } else { +        // rangeLength and rangeOffset must be a subset of the allocated data in the buffer. +        // corner case: we permit rangeOffset == end-of-buffer with rangeLength == 0. +        if (rangeOffset > header->nAllocLen +                || rangeLength > header->nAllocLen - rangeOffset) { +            CLOG_ERROR(emptyBuffer, OMX_ErrorBadParameter, FULL_BUFFER(NULL, header, fenceFd)); +            if (fenceFd >= 0) { +                ::close(fenceFd); +            } +            return BAD_VALUE; +        } +        header->nFilledLen = rangeLength; +        header->nOffset = rangeOffset; -    return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer); +        buffer_meta->CopyToOMX(header); +    } + +    return emptyBuffer_l(header, flags, timestamp, (intptr_t)buffer, fenceFd);  }  // log queued buffer activity for the next few input and/or output frames @@ -1018,11 +1168,62 @@ void OMXNodeInstance::unbumpDebugLevel_l(size_t portIndex) {      }  } +status_t OMXNodeInstance::storeFenceInMeta_l( +        OMX_BUFFERHEADERTYPE *header, int fenceFd, OMX_U32 portIndex) { +    // propagate fence if component supports it; wait for it otherwise +    OMX_U32 metaSize = portIndex == kPortIndexInput ? header->nFilledLen : header->nAllocLen; +    if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer +            && metaSize >= sizeof(VideoNativeMetadata)) { +        VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)(header->pBuffer); +        if (nativeMeta.nFenceFd >= 0) { +            ALOGE("fence (%d) already exists in meta", nativeMeta.nFenceFd); +            if (fenceFd >= 0) { +                ::close(fenceFd); +            } +            return ALREADY_EXISTS; +        } +        nativeMeta.nFenceFd = fenceFd; +    } else if (fenceFd >= 0) { +        CLOG_BUFFER(storeFenceInMeta, "waiting for fence %d", fenceFd); +        sp<Fence> fence = new Fence(fenceFd); +        return fence->wait(IOMX::kFenceTimeoutMs); +    } +    return OK; +} + +int OMXNodeInstance::retrieveFenceFromMeta_l( +        OMX_BUFFERHEADERTYPE *header, OMX_U32 portIndex) { +    OMX_U32 metaSize = portIndex == kPortIndexInput ? header->nAllocLen : header->nFilledLen; +    int fenceFd = -1; +    if (mMetadataType[portIndex] == kMetadataBufferTypeANWBuffer +            && header->nAllocLen >= sizeof(VideoNativeMetadata)) { +        VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)(header->pBuffer); +        if (nativeMeta.eType == kMetadataBufferTypeANWBuffer) { +            fenceFd = nativeMeta.nFenceFd; +            nativeMeta.nFenceFd = -1; +        } +        if (metaSize < sizeof(nativeMeta) && fenceFd >= 0) { +            CLOG_ERROR(foundFenceInEmptyMeta, BAD_VALUE, FULL_BUFFER( +                    NULL, header, nativeMeta.nFenceFd)); +            fenceFd = -1; +        } +    } +    return fenceFd; +} +  status_t OMXNodeInstance::emptyBuffer_l( -        OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp, intptr_t debugAddr) { +        OMX_BUFFERHEADERTYPE *header, OMX_U32 flags, OMX_TICKS timestamp, +        intptr_t debugAddr, int fenceFd) {      header->nFlags = flags;      header->nTimeStamp = timestamp; +    status_t res = storeFenceInMeta_l(header, fenceFd, kPortIndexInput); +    if (res != OK) { +        CLOG_ERROR(emptyBuffer::storeFenceInMeta, res, WITH_STATS( +                FULL_BUFFER(debugAddr, header, fenceFd))); +        return res; +    } +      {          Mutex::Autolock _l(mDebugLock);          mInputBuffersWithCodec.add(header); @@ -1032,11 +1233,11 @@ status_t OMXNodeInstance::emptyBuffer_l(              bumpDebugLevel_l(2 /* numInputBuffers */, 0 /* numOutputBuffers */);          } -        CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header))); +        CLOG_BUMPED_BUFFER(emptyBuffer, WITH_STATS(FULL_BUFFER(debugAddr, header, fenceFd)));      }      OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header); -    CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header)); +    CLOG_IF_ERROR(emptyBuffer, err, FULL_BUFFER(debugAddr, header, fenceFd));      {          Mutex::Autolock _l(mDebugLock); @@ -1051,16 +1252,21 @@ status_t OMXNodeInstance::emptyBuffer_l(  }  // like emptyBuffer, but the data is already in header->pBuffer -status_t OMXNodeInstance::emptyDirectBuffer( -        OMX_BUFFERHEADERTYPE *header, -        OMX_U32 rangeOffset, OMX_U32 rangeLength, -        OMX_U32 flags, OMX_TICKS timestamp) { +status_t OMXNodeInstance::emptyGraphicBuffer( +        OMX_BUFFERHEADERTYPE *header, const sp<GraphicBuffer> &graphicBuffer, +        OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {      Mutex::Autolock autoLock(mLock); +    OMX::buffer_id buffer = findBufferID(header); +    status_t err = updateGraphicBufferInMeta_l(kPortIndexInput, graphicBuffer, buffer, header); +    if (err != OK) { +        CLOG_ERROR(emptyGraphicBuffer, err, FULL_BUFFER( +                (intptr_t)header->pBuffer, header, fenceFd)); +        return err; +    } -    header->nFilledLen = rangeLength; -    header->nOffset = rangeOffset; - -    return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer); +    header->nOffset = 0; +    header->nFilledLen = graphicBuffer == NULL ? 0 : header->nAllocLen; +    return emptyBuffer_l(header, flags, timestamp, (intptr_t)header->pBuffer, fenceFd);  }  status_t OMXNodeInstance::getExtensionIndex( @@ -1079,6 +1285,7 @@ inline static const char *asString(IOMX::InternalOptionType i, const char *def =          case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:              return "REPEAT_PREVIOUS_FRAME_DELAY";          case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP"; +        case IOMX::INTERNAL_OPTION_MAX_FPS:           return "MAX_FPS";          case IOMX::INTERNAL_OPTION_START_TIME:        return "START_TIME";          case IOMX::INTERNAL_OPTION_TIME_LAPSE:        return "TIME_LAPSE";          default:                                      return def; @@ -1096,6 +1303,7 @@ status_t OMXNodeInstance::setInternalOption(          case IOMX::INTERNAL_OPTION_SUSPEND:          case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:          case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: +        case IOMX::INTERNAL_OPTION_MAX_FPS:          case IOMX::INTERNAL_OPTION_START_TIME:          case IOMX::INTERNAL_OPTION_TIME_LAPSE:          { @@ -1133,6 +1341,14 @@ status_t OMXNodeInstance::setInternalOption(                  int64_t maxGapUs = *(int64_t *)data;                  CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);                  return bufferSource->setMaxTimestampGapUs(maxGapUs); +            } else if (type == IOMX::INTERNAL_OPTION_MAX_FPS) { +                if (size != sizeof(float)) { +                    return INVALID_OPERATION; +                } + +                float maxFps = *(float *)data; +                CLOG_CONFIG(setInternalOption, "maxFps=%f", maxFps); +                return bufferSource->setMaxFps(maxFps);              } else if (type == IOMX::INTERNAL_OPTION_START_TIME) {                  if (size != sizeof(int64_t)) {                      return INVALID_OPERATION; @@ -1162,7 +1378,7 @@ status_t OMXNodeInstance::setInternalOption(      }  } -void OMXNodeInstance::onMessage(const omx_message &msg) { +bool OMXNodeInstance::handleMessage(omx_message &msg) {      const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());      if (msg.type == omx_message::FILL_BUFFER_DONE) { @@ -1174,7 +1390,8 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {              mOutputBuffersWithCodec.remove(buffer);              CLOG_BUMPED_BUFFER( -                    FBD, WITH_STATS(FULL_BUFFER(msg.u.extended_buffer_data.buffer, buffer))); +                    FBD, WITH_STATS(FULL_BUFFER( +                            msg.u.extended_buffer_data.buffer, buffer, msg.fenceFd)));              unbumpDebugLevel_l(kPortIndexOutput);          } @@ -1182,16 +1399,18 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {          BufferMeta *buffer_meta =              static_cast<BufferMeta *>(buffer->pAppPrivate); +        if (buffer->nOffset + buffer->nFilledLen < buffer->nOffset +                || buffer->nOffset + buffer->nFilledLen > buffer->nAllocLen) { +            CLOG_ERROR(onFillBufferDone, OMX_ErrorBadParameter, +                    FULL_BUFFER(NULL, buffer, msg.fenceFd)); +        }          buffer_meta->CopyFromOMX(buffer);          if (bufferSource != NULL) {              // fix up the buffer info (especially timestamp) if needed              bufferSource->codecBufferFilled(buffer); -            omx_message newMsg = msg; -            newMsg.u.extended_buffer_data.timestamp = buffer->nTimeStamp; -            mObserver->onMessage(newMsg); -            return; +            msg.u.extended_buffer_data.timestamp = buffer->nTimeStamp;          }      } else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {          OMX_BUFFERHEADERTYPE *buffer = @@ -1202,7 +1421,7 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {              mInputBuffersWithCodec.remove(buffer);              CLOG_BUMPED_BUFFER( -                    EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer))); +                    EBD, WITH_STATS(EMPTY_BUFFER(msg.u.buffer_data.buffer, buffer, msg.fenceFd)));          }          if (bufferSource != NULL) { @@ -1211,12 +1430,26 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {              // Don't dispatch a message back to ACodec, since it doesn't              // know that anyone asked to have the buffer emptied and will              // be very confused. -            bufferSource->codecBufferEmptied(buffer); -            return; +            bufferSource->codecBufferEmptied(buffer, msg.fenceFd); +            return true; +        } +    } + +    return false; +} + +void OMXNodeInstance::onMessages(std::list<omx_message> &messages) { +    for (std::list<omx_message>::iterator it = messages.begin(); it != messages.end(); ) { +        if (handleMessage(*it)) { +            messages.erase(it++); +        } else { +            ++it;          }      } -    mObserver->onMessage(msg); +    if (!messages.empty()) { +        mObserver->onMessages(messages); +    }  }  void OMXNodeInstance::onObserverDied(OMXMaster *master) { @@ -1306,8 +1539,9 @@ OMX_ERRORTYPE OMXNodeInstance::OnEmptyBufferDone(      if (instance->mDying) {          return OMX_ErrorNone;      } +    int fenceFd = instance->retrieveFenceFromMeta_l(pBuffer, kPortIndexOutput);      return instance->owner()->OnEmptyBufferDone(instance->nodeID(), -            instance->findBufferID(pBuffer), pBuffer); +            instance->findBufferID(pBuffer), pBuffer, fenceFd);  }  // static @@ -1319,8 +1553,9 @@ OMX_ERRORTYPE OMXNodeInstance::OnFillBufferDone(      if (instance->mDying) {          return OMX_ErrorNone;      } +    int fenceFd = instance->retrieveFenceFromMeta_l(pBuffer, kPortIndexOutput);      return instance->owner()->OnFillBufferDone(instance->nodeID(), -            instance->findBufferID(pBuffer), pBuffer); +            instance->findBufferID(pBuffer), pBuffer, fenceFd);  }  void OMXNodeInstance::addActiveBuffer(OMX_U32 portIndex, OMX::buffer_id id) { @@ -1381,7 +1616,12 @@ OMX_BUFFERHEADERTYPE *OMXNodeInstance::findBufferHeader(OMX::buffer_id buffer) {          return NULL;      }      Mutex::Autolock autoLock(mBufferIDLock); -    return mBufferIDToBufferHeader.valueFor(buffer); +    ssize_t index = mBufferIDToBufferHeader.indexOfKey(buffer); +    if (index < 0) { +        CLOGW("findBufferHeader: buffer %u not found", buffer); +        return NULL; +    } +    return mBufferIDToBufferHeader.valueAt(index);  }  OMX::buffer_id OMXNodeInstance::findBufferID(OMX_BUFFERHEADERTYPE *bufferHeader) { @@ -1389,7 +1629,12 @@ OMX::buffer_id OMXNodeInstance::findBufferID(OMX_BUFFERHEADERTYPE *bufferHeader)          return 0;      }      Mutex::Autolock autoLock(mBufferIDLock); -    return mBufferHeaderToBufferID.valueFor(bufferHeader); +    ssize_t index = mBufferHeaderToBufferID.indexOfKey(bufferHeader); +    if (index < 0) { +        CLOGW("findBufferID: bufferHeader %p not found", bufferHeader); +        return 0; +    } +    return mBufferHeaderToBufferID.valueAt(index);  }  void OMXNodeInstance::invalidateBufferID(OMX::buffer_id buffer) { @@ -1397,8 +1642,13 @@ void OMXNodeInstance::invalidateBufferID(OMX::buffer_id buffer) {          return;      }      Mutex::Autolock autoLock(mBufferIDLock); -    mBufferHeaderToBufferID.removeItem(mBufferIDToBufferHeader.valueFor(buffer)); -    mBufferIDToBufferHeader.removeItem(buffer); +    ssize_t index = mBufferIDToBufferHeader.indexOfKey(buffer); +    if (index < 0) { +        CLOGW("invalidateBufferID: buffer %u not found", buffer); +        return; +    } +    mBufferHeaderToBufferID.removeItem(mBufferIDToBufferHeader.valueAt(index)); +    mBufferIDToBufferHeader.removeItemsAt(index);  }  }  // namespace android diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp index 7f99dcd..e6a0c49 100644 --- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp +++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp @@ -58,7 +58,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::sendCommand(          OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data) {      CHECK(data == NULL); -    sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler->id()); +    sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler);      msg->setInt32("cmd", cmd);      msg->setInt32("param", param);      msg->post(); @@ -307,7 +307,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::freeBuffer(  OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer(          OMX_BUFFERHEADERTYPE *buffer) { -    sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler->id()); +    sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler);      msg->setPointer("header", buffer);      msg->post(); @@ -316,7 +316,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer(  OMX_ERRORTYPE SimpleSoftOMXComponent::fillThisBuffer(          OMX_BUFFERHEADERTYPE *buffer) { -    sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler->id()); +    sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler);      msg->setPointer("header", buffer);      msg->post(); @@ -505,7 +505,15 @@ void SimpleSoftOMXComponent::onPortFlush(      CHECK_LT(portIndex, mPorts.size());      PortInfo *port = &mPorts.editItemAt(portIndex); -    CHECK_EQ((int)port->mTransition, (int)PortInfo::NONE); +    // Ideally, the port should not in transitioning state when flushing. +    // However, in error handling case, e.g., the client can't allocate buffers +    // when it tries to re-enable the port, the port will be stuck in ENABLING. +    // The client will then transition the component from Executing to Idle, +    // which leads to flushing ports. At this time, it should be ok to notify +    // the client of the error and still clear all buffers on the port. +    if (port->mTransition != PortInfo::NONE) { +        notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); +    }      for (size_t i = 0; i < port->mBuffers.size(); ++i) {          BufferInfo *buffer = &port->mBuffers.editItemAt(i); @@ -598,7 +606,7 @@ void SimpleSoftOMXComponent::checkTransitions() {          if (port->mTransition == PortInfo::DISABLING) {              if (port->mBuffers.empty()) { -                ALOGV("Port %d now disabled.", i); +                ALOGV("Port %zu now disabled.", i);                  port->mTransition = PortInfo::NONE;                  notify(OMX_EventCmdComplete, OMX_CommandPortDisable, i, NULL); @@ -607,7 +615,7 @@ void SimpleSoftOMXComponent::checkTransitions() {              }          } else if (port->mTransition == PortInfo::ENABLING) {              if (port->mDef.bPopulated == OMX_TRUE) { -                ALOGV("Port %d now enabled.", i); +                ALOGV("Port %zu now enabled.", i);                  port->mTransition = PortInfo::NONE;                  port->mDef.bEnabled = OMX_TRUE; @@ -628,14 +636,14 @@ void SimpleSoftOMXComponent::addPort(const OMX_PARAM_PORTDEFINITIONTYPE &def) {      info->mTransition = PortInfo::NONE;  } -void SimpleSoftOMXComponent::onQueueFilled(OMX_U32 portIndex) { +void SimpleSoftOMXComponent::onQueueFilled(OMX_U32 portIndex __unused) {  } -void SimpleSoftOMXComponent::onPortFlushCompleted(OMX_U32 portIndex) { +void SimpleSoftOMXComponent::onPortFlushCompleted(OMX_U32 portIndex __unused) {  }  void SimpleSoftOMXComponent::onPortEnableCompleted( -        OMX_U32 portIndex, bool enabled) { +        OMX_U32 portIndex __unused, bool enabled __unused) {  }  List<SimpleSoftOMXComponent::BufferInfo *> & diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp index d4d6217..8ea7a6e 100644 --- a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp +++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp @@ -34,6 +34,8 @@  #include <ui/GraphicBuffer.h>  #include <ui/GraphicBufferMapper.h> +#include <OMX_IndexExt.h> +  namespace android {  const static OMX_COLOR_FORMATTYPE kSupportedColorFormats[] = { @@ -155,7 +157,7 @@ void SoftVideoEncoderOMXComponent::updatePortParams() {      uint32_t rawBufferSize =          inDef->format.video.nStride * inDef->format.video.nSliceHeight * 3 / 2;      if (inDef->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) { -        inDef->nBufferSize = 4 + max(sizeof(buffer_handle_t), sizeof(GraphicBuffer *)); +        inDef->nBufferSize = max(sizeof(VideoNativeMetadata), sizeof(VideoGrallocMetadata));      } else {          inDef->nBufferSize = rawBufferSize;      } @@ -293,7 +295,7 @@ OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalSetParameter(  OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter(          OMX_INDEXTYPE index, OMX_PTR param) { -    switch (index) { +    switch ((int)index) {          case OMX_IndexParamVideoErrorCorrection:          {              return OMX_ErrorNotImplemented; @@ -343,6 +345,13 @@ OMX_ERRORTYPE SoftVideoEncoderOMXComponent::internalGetParameter(              return OMX_ErrorNone;          } +        case OMX_IndexParamConsumerUsageBits: +        { +            OMX_U32 *usageBits = (OMX_U32 *)param; +            *usageBits = GRALLOC_USAGE_SW_READ_OFTEN; +            return OMX_ErrorNone; +        } +          default:              return SimpleSoftOMXComponent::internalGetParameter(index, param);      } @@ -482,8 +491,8 @@ const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(      size_t dstVStride = height;      MetadataBufferType bufferType = *(MetadataBufferType *)src; -    bool usingGraphicBuffer = bufferType == kMetadataBufferTypeGraphicBuffer; -    if (!usingGraphicBuffer && bufferType != kMetadataBufferTypeGrallocSource) { +    bool usingANWBuffer = bufferType == kMetadataBufferTypeANWBuffer; +    if (!usingANWBuffer && bufferType != kMetadataBufferTypeGrallocSource) {          ALOGE("Unsupported metadata type (%d)", bufferType);          return NULL;      } @@ -499,13 +508,14 @@ const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(      int format;      size_t srcStride;      size_t srcVStride; -    if (usingGraphicBuffer) { -        if (srcSize < sizeof(OMX_U32) + sizeof(GraphicBuffer *)) { -            ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(OMX_U32) + sizeof(GraphicBuffer *)); +    if (usingANWBuffer) { +        if (srcSize < sizeof(VideoNativeMetadata)) { +            ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoNativeMetadata));              return NULL;          } -        GraphicBuffer *buffer = *(GraphicBuffer **)(src + sizeof(OMX_U32)); +        VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)src; +        ANativeWindowBuffer *buffer = nativeMeta.pBuffer;          handle = buffer->handle;          format = buffer->format;          srcStride = buffer->stride; @@ -516,15 +526,26 @@ const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(              // TODO do we need to support other formats?              srcStride *= 4;          } + +        if (nativeMeta.nFenceFd >= 0) { +            sp<Fence> fence = new Fence(nativeMeta.nFenceFd); +            nativeMeta.nFenceFd = -1; +            status_t err = fence->wait(IOMX::kFenceTimeoutMs); +            if (err != OK) { +                ALOGE("Timed out waiting on input fence"); +                return NULL; +            } +        }      } else {          // TODO: remove this part.  Check if anyone uses this. -        if (srcSize < sizeof(OMX_U32) + sizeof(buffer_handle_t)) { -            ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(OMX_U32) + sizeof(buffer_handle_t)); +        if (srcSize < sizeof(VideoGrallocMetadata)) { +            ALOGE("Metadata is too small (%zu vs %zu)", srcSize, sizeof(VideoGrallocMetadata));              return NULL;          } -        handle = *(buffer_handle_t *)(src + sizeof(OMX_U32)); +        VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)(src); +        handle = grallocMeta.pHandle;          // assume HAL_PIXEL_FORMAT_RGBA_8888          // there is no way to get the src stride without the graphic buffer          format = HAL_PIXEL_FORMAT_RGBA_8888; @@ -606,7 +627,7 @@ const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(  OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex(          const char *name, OMX_INDEXTYPE *index) {      if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") || -        !strcmp(name, "OMX.google.android.index.storeGraphicBufferInMetaData")) { +        !strcmp(name, "OMX.google.android.index.storeANWBufferInMetadata")) {          *(int32_t*)index = kStoreMetaDataExtensionIndex;          return OMX_ErrorNone;      } diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk index 447b29e..02e97f1 100644 --- a/media/libstagefright/omx/tests/Android.mk +++ b/media/libstagefright/omx/tests/Android.mk @@ -11,7 +11,8 @@ LOCAL_C_INCLUDES := \  	$(TOP)/frameworks/av/media/libstagefright \  	$(TOP)/frameworks/native/include/media/openmax -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_MODULE := omx_tests @@ -20,3 +21,24 @@ LOCAL_MODULE_TAGS := tests  LOCAL_32_BIT_ONLY := true  include $(BUILD_EXECUTABLE) + +include $(CLEAR_VARS) + +LOCAL_MODULE := FrameDropper_test + +LOCAL_MODULE_TAGS := tests + +LOCAL_SRC_FILES := \ +	FrameDropper_test.cpp \ + +LOCAL_SHARED_LIBRARIES := \ +	libstagefright_omx \ +	libutils \ + +LOCAL_C_INCLUDES := \ +	frameworks/av/media/libstagefright/omx \ + +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true + +include $(BUILD_NATIVE_TEST) diff --git a/media/libstagefright/omx/tests/FrameDropper_test.cpp b/media/libstagefright/omx/tests/FrameDropper_test.cpp new file mode 100644 index 0000000..f966b5e --- /dev/null +++ b/media/libstagefright/omx/tests/FrameDropper_test.cpp @@ -0,0 +1,137 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "FrameDropper_test" +#include <utils/Log.h> + +#include <gtest/gtest.h> + +#include "FrameDropper.h" +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +struct TestFrame { +  int64_t timeUs; +  bool shouldDrop; +}; + +static const TestFrame testFrames20Fps[] = { +    {1000000, false}, {1050000, false}, {1100000, false}, {1150000, false}, +    {1200000, false}, {1250000, false}, {1300000, false}, {1350000, false}, +    {1400000, false}, {1450000, false}, {1500000, false}, {1550000, false}, +    {1600000, false}, {1650000, false}, {1700000, false}, {1750000, false}, +    {1800000, false}, {1850000, false}, {1900000, false}, {1950000, false}, +}; + +static const TestFrame testFrames30Fps[] = { +    {1000000, false}, {1033333, false}, {1066667, false}, {1100000, false}, +    {1133333, false}, {1166667, false}, {1200000, false}, {1233333, false}, +    {1266667, false}, {1300000, false}, {1333333, false}, {1366667, false}, +    {1400000, false}, {1433333, false}, {1466667, false}, {1500000, false}, +    {1533333, false}, {1566667, false}, {1600000, false}, {1633333, false}, +}; + +static const TestFrame testFrames40Fps[] = { +    {1000000, false}, {1025000, true}, {1050000, false}, {1075000, false}, +    {1100000, false}, {1125000, true}, {1150000, false}, {1175000, false}, +    {1200000, false}, {1225000, true}, {1250000, false}, {1275000, false}, +    {1300000, false}, {1325000, true}, {1350000, false}, {1375000, false}, +    {1400000, false}, {1425000, true}, {1450000, false}, {1475000, false}, +}; + +static const TestFrame testFrames60Fps[] = { +    {1000000, false}, {1016667, true}, {1033333, false}, {1050000, true}, +    {1066667, false}, {1083333, true}, {1100000, false}, {1116667, true}, +    {1133333, false}, {1150000, true}, {1166667, false}, {1183333, true}, +    {1200000, false}, {1216667, true}, {1233333, false}, {1250000, true}, +    {1266667, false}, {1283333, true}, {1300000, false}, {1316667, true}, +}; + +static const TestFrame testFramesVariableFps[] = { +    // 40fps +    {1000000, false}, {1025000, true}, {1050000, false}, {1075000, false}, +    {1100000, false}, {1125000, true}, {1150000, false}, {1175000, false}, +    {1200000, false}, {1225000, true}, {1250000, false}, {1275000, false}, +    {1300000, false}, {1325000, true}, {1350000, false}, {1375000, false}, +    {1400000, false}, {1425000, true}, {1450000, false}, {1475000, false}, +    // a timestamp jump plus switch to 20fps +    {2000000, false}, {2050000, false}, {2100000, false}, {2150000, false}, +    {2200000, false}, {2250000, false}, {2300000, false}, {2350000, false}, +    {2400000, false}, {2450000, false}, {2500000, false}, {2550000, false}, +    {2600000, false}, {2650000, false}, {2700000, false}, {2750000, false}, +    {2800000, false}, {2850000, false}, {2900000, false}, {2950000, false}, +    // 60fps +    {2966667, false}, {2983333, true}, {3000000, false}, {3016667, true}, +    {3033333, false}, {3050000, true}, {3066667, false}, {3083333, true}, +    {3100000, false}, {3116667, true}, {3133333, false}, {3150000, true}, +    {3166667, false}, {3183333, true}, {3200000, false}, {3216667, true}, +    {3233333, false}, {3250000, true}, {3266667, false}, {3283333, true}, +}; + +static const int kMaxTestJitterUs = 2000; +// return one of 1000, 0, -1000 as jitter. +static int GetJitter(size_t i) { +    return (1 - (i % 3)) * (kMaxTestJitterUs / 2); +} + +class FrameDropperTest : public ::testing::Test { +public: +    FrameDropperTest() : mFrameDropper(new FrameDropper()) { +        EXPECT_EQ(OK, mFrameDropper->setMaxFrameRate(30.0)); +    } + +protected: +    void RunTest(const TestFrame* frames, size_t size) { +        for (size_t i = 0; i < size; ++i) { +            int jitter = GetJitter(i); +            int64_t testTimeUs = frames[i].timeUs + jitter; +            printf("time %lld, testTime %lld, jitter %d\n", +                    (long long)frames[i].timeUs, (long long)testTimeUs, jitter); +            EXPECT_EQ(frames[i].shouldDrop, mFrameDropper->shouldDrop(testTimeUs)); +        } +    } + +    sp<FrameDropper> mFrameDropper; +}; + +TEST_F(FrameDropperTest, TestInvalidMaxFrameRate) { +    EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(-1.0)); +    EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(0)); +} + +TEST_F(FrameDropperTest, Test20Fps) { +    RunTest(testFrames20Fps, ARRAY_SIZE(testFrames20Fps)); +} + +TEST_F(FrameDropperTest, Test30Fps) { +    RunTest(testFrames30Fps, ARRAY_SIZE(testFrames30Fps)); +} + +TEST_F(FrameDropperTest, Test40Fps) { +    RunTest(testFrames40Fps, ARRAY_SIZE(testFrames40Fps)); +} + +TEST_F(FrameDropperTest, Test60Fps) { +    RunTest(testFrames60Fps, ARRAY_SIZE(testFrames60Fps)); +} + +TEST_F(FrameDropperTest, TestVariableFps) { +    RunTest(testFramesVariableFps, ARRAY_SIZE(testFramesVariableFps)); +} + +} // namespace android diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp index 67ff145..644b6ed 100644 --- a/media/libstagefright/omx/tests/OMXHarness.cpp +++ b/media/libstagefright/omx/tests/OMXHarness.cpp @@ -64,9 +64,11 @@ status_t Harness::initOMX() {      return mOMX != 0 ? OK : NO_INIT;  } -void Harness::onMessage(const omx_message &msg) { +void Harness::onMessages(const std::list<omx_message> &messages) {      Mutex::Autolock autoLock(mLock); -    mMessageQueue.push_back(msg); +    for (std::list<omx_message>::const_iterator it = messages.cbegin(); it != messages.cend(); ) { +        mMessageQueue.push_back(*it++); +    }      mMessageAddedCondition.signal();  } @@ -193,7 +195,7 @@ status_t Harness::allocatePortBuffers(          CHECK(buffer.mMemory != NULL);          err = mOMX->allocateBufferWithBackup( -                node, portIndex, buffer.mMemory, &buffer.mID); +                node, portIndex, buffer.mMemory, &buffer.mID, buffer.mMemory->size());          EXPECT_SUCCESS(err, "allocateBuffer");          buffers->push(buffer); diff --git a/media/libstagefright/omx/tests/OMXHarness.h b/media/libstagefright/omx/tests/OMXHarness.h index bb8fd0c..1ebf3aa 100644 --- a/media/libstagefright/omx/tests/OMXHarness.h +++ b/media/libstagefright/omx/tests/OMXHarness.h @@ -74,7 +74,7 @@ struct Harness : public BnOMXObserver {      status_t testAll(); -    virtual void onMessage(const omx_message &msg); +    virtual void onMessages(const std::list<omx_message> &messages);  protected:      virtual ~Harness(); diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp index a6bd824..a86ab74 100644 --- a/media/libstagefright/rtsp/ARTPConnection.cpp +++ b/media/libstagefright/rtsp/ARTPConnection.cpp @@ -82,7 +82,7 @@ void ARTPConnection::addStream(          size_t index,          const sp<AMessage> ¬ify,          bool injected) { -    sp<AMessage> msg = new AMessage(kWhatAddStream, id()); +    sp<AMessage> msg = new AMessage(kWhatAddStream, this);      msg->setInt32("rtp-socket", rtpSocket);      msg->setInt32("rtcp-socket", rtcpSocket);      msg->setObject("session-desc", sessionDesc); @@ -93,7 +93,7 @@ void ARTPConnection::addStream(  }  void ARTPConnection::removeStream(int rtpSocket, int rtcpSocket) { -    sp<AMessage> msg = new AMessage(kWhatRemoveStream, id()); +    sp<AMessage> msg = new AMessage(kWhatRemoveStream, this);      msg->setInt32("rtp-socket", rtpSocket);      msg->setInt32("rtcp-socket", rtcpSocket);      msg->post(); @@ -233,7 +233,7 @@ void ARTPConnection::postPollEvent() {          return;      } -    sp<AMessage> msg = new AMessage(kWhatPollStreams, id()); +    sp<AMessage> msg = new AMessage(kWhatPollStreams, this);      msg->post();      mPollEventPending = true; @@ -639,7 +639,7 @@ sp<ARTPSource> ARTPConnection::findSource(StreamInfo *info, uint32_t srcId) {  }  void ARTPConnection::injectPacket(int index, const sp<ABuffer> &buffer) { -    sp<AMessage> msg = new AMessage(kWhatInjectPacket, id()); +    sp<AMessage> msg = new AMessage(kWhatInjectPacket, this);      msg->setInt32("index", index);      msg->setBuffer("buffer", buffer);      msg->post(); diff --git a/media/libstagefright/rtsp/ARTPSession.cpp b/media/libstagefright/rtsp/ARTPSession.cpp index ba4e33c..e5acb06 100644 --- a/media/libstagefright/rtsp/ARTPSession.cpp +++ b/media/libstagefright/rtsp/ARTPSession.cpp @@ -82,7 +82,7 @@ status_t ARTPSession::setup(const sp<ASessionDescription> &desc) {          info->mRTPSocket = rtpSocket;          info->mRTCPSocket = rtcpSocket; -        sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, id()); +        sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, this);          notify->setSize("track-index", mTracks.size() - 1);          mRTPConn->addStream( diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp index e1607bf..56c4aa6 100644 --- a/media/libstagefright/rtsp/ARTPWriter.cpp +++ b/media/libstagefright/rtsp/ARTPWriter.cpp @@ -146,7 +146,7 @@ status_t ARTPWriter::start(MetaData * /* params */) {          TRESPASS();      } -    (new AMessage(kWhatStart, mReflector->id()))->post(); +    (new AMessage(kWhatStart, mReflector))->post();      while (!(mFlags & kFlagStarted)) {          mCondition.wait(mLock); @@ -161,7 +161,7 @@ status_t ARTPWriter::stop() {          return OK;      } -    (new AMessage(kWhatStop, mReflector->id()))->post(); +    (new AMessage(kWhatStop, mReflector))->post();      while (mFlags & kFlagStarted) {          mCondition.wait(mLock); @@ -213,8 +213,8 @@ void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) {                  mCondition.signal();              } -            (new AMessage(kWhatRead, mReflector->id()))->post(); -            (new AMessage(kWhatSendSR, mReflector->id()))->post(); +            (new AMessage(kWhatRead, mReflector))->post(); +            (new AMessage(kWhatSendSR, mReflector))->post();              break;          } diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h index fdc8d23..be8bc13 100644 --- a/media/libstagefright/rtsp/ARTPWriter.h +++ b/media/libstagefright/rtsp/ARTPWriter.h @@ -32,7 +32,7 @@  namespace android {  struct ABuffer; -struct MediaBuffer; +class MediaBuffer;  struct ARTPWriter : public MediaWriter {      ARTPWriter(int fd); diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 60b3aaf..855ffdc 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -68,28 +68,28 @@ ARTSPConnection::~ARTSPConnection() {  }  void ARTSPConnection::connect(const char *url, const sp<AMessage> &reply) { -    sp<AMessage> msg = new AMessage(kWhatConnect, id()); +    sp<AMessage> msg = new AMessage(kWhatConnect, this);      msg->setString("url", url);      msg->setMessage("reply", reply);      msg->post();  }  void ARTSPConnection::disconnect(const sp<AMessage> &reply) { -    sp<AMessage> msg = new AMessage(kWhatDisconnect, id()); +    sp<AMessage> msg = new AMessage(kWhatDisconnect, this);      msg->setMessage("reply", reply);      msg->post();  }  void ARTSPConnection::sendRequest(          const char *request, const sp<AMessage> &reply) { -    sp<AMessage> msg = new AMessage(kWhatSendRequest, id()); +    sp<AMessage> msg = new AMessage(kWhatSendRequest, this);      msg->setString("request", request);      msg->setMessage("reply", reply);      msg->post();  }  void ARTSPConnection::observeBinaryData(const sp<AMessage> &reply) { -    sp<AMessage> msg = new AMessage(kWhatObserveBinaryData, id()); +    sp<AMessage> msg = new AMessage(kWhatObserveBinaryData, this);      msg->setMessage("reply", reply);      msg->post();  } @@ -286,7 +286,7 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {      if (err < 0) {          if (errno == EINPROGRESS) { -            sp<AMessage> msg = new AMessage(kWhatCompleteConnection, id()); +            sp<AMessage> msg = new AMessage(kWhatCompleteConnection, this);              msg->setMessage("reply", reply);              msg->setInt32("connection-id", mConnectionID);              msg->post(); @@ -523,7 +523,7 @@ void ARTSPConnection::postReceiveReponseEvent() {          return;      } -    sp<AMessage> msg = new AMessage(kWhatReceiveResponse, id()); +    sp<AMessage> msg = new AMessage(kWhatReceiveResponse, this);      msg->post();      mReceiveResponseEventPending = true; @@ -746,7 +746,7 @@ bool ARTSPConnection::receiveRTSPReponse() {              AString request;              CHECK(reply->findString("original-request", &request)); -            sp<AMessage> msg = new AMessage(kWhatSendRequest, id()); +            sp<AMessage> msg = new AMessage(kWhatSendRequest, this);              msg->setMessage("reply", reply);              msg->setString("request", request.c_str(), request.size()); diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk index 9fedb71..c5e8c35 100644 --- a/media/libstagefright/rtsp/Android.mk +++ b/media/libstagefright/rtsp/Android.mk @@ -31,7 +31,8 @@ ifeq ($(TARGET_ARCH),arm)      LOCAL_CFLAGS += -Wno-psabi  endif -LOCAL_CFLAGS += -Werror +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk @@ -54,7 +55,8 @@ LOCAL_C_INCLUDES:= \  	frameworks/av/media/libstagefright \  	$(TOP)/frameworks/native/include/media/openmax -LOCAL_CFLAGS += -Wno-multichar +LOCAL_CFLAGS += -Wno-multichar -Werror -Wall +LOCAL_CLANG := true  LOCAL_MODULE_TAGS := optional diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 3bf489b..0d0baf3 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -98,6 +98,7 @@ struct MyHandler : public AHandler {      enum {          kWhatConnected                  = 'conn',          kWhatDisconnected               = 'disc', +        kWhatSeekPaused                 = 'spau',          kWhatSeekDone                   = 'sdon',          kWhatAccessUnit                 = 'accU', @@ -169,10 +170,10 @@ struct MyHandler : public AHandler {          looper()->registerHandler(mConn);          (1 ? mNetLooper : looper())->registerHandler(mRTPConn); -        sp<AMessage> notify = new AMessage('biny', id()); +        sp<AMessage> notify = new AMessage('biny', this);          mConn->observeBinaryData(notify); -        sp<AMessage> reply = new AMessage('conn', id()); +        sp<AMessage> reply = new AMessage('conn', this);          mConn->connect(mOriginalSessionURL.c_str(), reply);      } @@ -180,10 +181,10 @@ struct MyHandler : public AHandler {          looper()->registerHandler(mConn);          (1 ? mNetLooper : looper())->registerHandler(mRTPConn); -        sp<AMessage> notify = new AMessage('biny', id()); +        sp<AMessage> notify = new AMessage('biny', this);          mConn->observeBinaryData(notify); -        sp<AMessage> reply = new AMessage('sdpl', id()); +        sp<AMessage> reply = new AMessage('sdpl', this);          reply->setObject("description", desc);          mConn->connect(mOriginalSessionURL.c_str(), reply);      } @@ -210,29 +211,35 @@ struct MyHandler : public AHandler {      }      void disconnect() { -        (new AMessage('abor', id()))->post(); +        (new AMessage('abor', this))->post();      }      void seek(int64_t timeUs) { -        sp<AMessage> msg = new AMessage('seek', id()); +        sp<AMessage> msg = new AMessage('seek', this);          msg->setInt64("time", timeUs);          mPauseGeneration++;          msg->post();      } +    void continueSeekAfterPause(int64_t timeUs) { +        sp<AMessage> msg = new AMessage('see1', this); +        msg->setInt64("time", timeUs); +        msg->post(); +    } +      bool isSeekable() const {          return mSeekable;      }      void pause() { -        sp<AMessage> msg = new AMessage('paus', id()); +        sp<AMessage> msg = new AMessage('paus', this);          mPauseGeneration++;          msg->setInt32("pausecheck", mPauseGeneration);          msg->post(kPauseDelayUs);      }      void resume() { -        sp<AMessage> msg = new AMessage('resu', id()); +        sp<AMessage> msg = new AMessage('resu', this);          mPauseGeneration++;          msg->post();      } @@ -454,10 +461,10 @@ struct MyHandler : public AHandler {                      request.append("Accept: application/sdp\r\n");                      request.append("\r\n"); -                    sp<AMessage> reply = new AMessage('desc', id()); +                    sp<AMessage> reply = new AMessage('desc', this);                      mConn->sendRequest(request.c_str(), reply);                  } else { -                    (new AMessage('disc', id()))->post(); +                    (new AMessage('disc', this))->post();                  }                  break;              } @@ -468,10 +475,10 @@ struct MyHandler : public AHandler {                  int32_t reconnect;                  if (msg->findInt32("reconnect", &reconnect) && reconnect) { -                    sp<AMessage> reply = new AMessage('conn', id()); +                    sp<AMessage> reply = new AMessage('conn', this);                      mConn->connect(mOriginalSessionURL.c_str(), reply);                  } else { -                    (new AMessage('quit', id()))->post(); +                    (new AMessage('quit', this))->post();                  }                  break;              } @@ -514,7 +521,7 @@ struct MyHandler : public AHandler {                              ALOGI("rewritten session url: '%s'", mSessionURL.c_str());                          } -                        sp<AMessage> reply = new AMessage('conn', id()); +                        sp<AMessage> reply = new AMessage('conn', this);                          mConn->connect(mOriginalSessionURL.c_str(), reply);                          break;                      } @@ -586,7 +593,7 @@ struct MyHandler : public AHandler {                  }                  if (result != OK) { -                    sp<AMessage> reply = new AMessage('disc', id()); +                    sp<AMessage> reply = new AMessage('disc', this);                      mConn->disconnect(reply);                  }                  break; @@ -631,7 +638,7 @@ struct MyHandler : public AHandler {                  }                  if (result != OK) { -                    sp<AMessage> reply = new AMessage('disc', id()); +                    sp<AMessage> reply = new AMessage('disc', this);                      mConn->disconnect(reply);                  }                  break; @@ -651,7 +658,7 @@ struct MyHandler : public AHandler {                  int32_t result;                  CHECK(msg->findInt32("result", &result)); -                ALOGI("SETUP(%d) completed with result %d (%s)", +                ALOGI("SETUP(%zu) completed with result %d (%s)",                       index, result, strerror(-result));                  if (result == OK) { @@ -703,7 +710,7 @@ struct MyHandler : public AHandler {                              mSessionID.erase(i, mSessionID.size() - i);                          } -                        sp<AMessage> notify = new AMessage('accu', id()); +                        sp<AMessage> notify = new AMessage('accu', this);                          notify->setSize("track-index", trackIndex);                          i = response->mHeaders.indexOfKey("transport"); @@ -769,10 +776,10 @@ struct MyHandler : public AHandler {                      request.append("\r\n"); -                    sp<AMessage> reply = new AMessage('play', id()); +                    sp<AMessage> reply = new AMessage('play', this);                      mConn->sendRequest(request.c_str(), reply);                  } else { -                    sp<AMessage> reply = new AMessage('disc', id()); +                    sp<AMessage> reply = new AMessage('disc', this);                      mConn->disconnect(reply);                  }                  break; @@ -797,7 +804,7 @@ struct MyHandler : public AHandler {                      } else {                          parsePlayResponse(response); -                        sp<AMessage> timeout = new AMessage('tiou', id()); +                        sp<AMessage> timeout = new AMessage('tiou', this);                          mCheckTimeoutGeneration++;                          timeout->setInt32("tioucheck", mCheckTimeoutGeneration);                          timeout->post(kStartupTimeoutUs); @@ -805,7 +812,7 @@ struct MyHandler : public AHandler {                  }                  if (result != OK) { -                    sp<AMessage> reply = new AMessage('disc', id()); +                    sp<AMessage> reply = new AMessage('disc', this);                      mConn->disconnect(reply);                  } @@ -831,7 +838,7 @@ struct MyHandler : public AHandler {                  request.append("\r\n");                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('opts', id()); +                sp<AMessage> reply = new AMessage('opts', this);                  reply->setInt32("generation", mKeepAliveGeneration);                  mConn->sendRequest(request.c_str(), reply);                  break; @@ -894,7 +901,7 @@ struct MyHandler : public AHandler {                  mPausing = false;                  mSeekable = true; -                sp<AMessage> reply = new AMessage('tear', id()); +                sp<AMessage> reply = new AMessage('tear', this);                  int32_t reconnect;                  if (msg->findInt32("reconnect", &reconnect) && reconnect) { @@ -926,7 +933,7 @@ struct MyHandler : public AHandler {                  ALOGI("TEARDOWN completed with result %d (%s)",                       result, strerror(-result)); -                sp<AMessage> reply = new AMessage('disc', id()); +                sp<AMessage> reply = new AMessage('disc', this);                  int32_t reconnect;                  if (msg->findInt32("reconnect", &reconnect) && reconnect) { @@ -958,7 +965,7 @@ struct MyHandler : public AHandler {                  if (mNumAccessUnitsReceived == 0) {  #if 1                      ALOGI("stream ended? aborting."); -                    (new AMessage('abor', id()))->post(); +                    (new AMessage('abor', this))->post();                      break;  #else                      ALOGI("haven't seen an AU in a looong time."); @@ -1012,7 +1019,7 @@ struct MyHandler : public AHandler {                  int32_t eos;                  if (msg->findInt32("eos", &eos)) { -                    ALOGI("received BYE on track index %d", trackIndex); +                    ALOGI("received BYE on track index %zu", trackIndex);                      if (!mAllTracksHaveTime && dataReceivedOnAllChannels()) {                          ALOGI("No time established => fake existing data"); @@ -1077,7 +1084,7 @@ struct MyHandler : public AHandler {                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('pau2', id()); +                sp<AMessage> reply = new AMessage('pau2', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -1114,7 +1121,7 @@ struct MyHandler : public AHandler {                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('res2', id()); +                sp<AMessage> reply = new AMessage('res2', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -1143,7 +1150,7 @@ struct MyHandler : public AHandler {                          // Post new timeout in order to make sure to use                          // fake timestamps if no new Sender Reports arrive -                        sp<AMessage> timeout = new AMessage('tiou', id()); +                        sp<AMessage> timeout = new AMessage('tiou', this);                          mCheckTimeoutGeneration++;                          timeout->setInt32("tioucheck", mCheckTimeoutGeneration);                          timeout->post(kStartupTimeoutUs); @@ -1152,7 +1159,7 @@ struct MyHandler : public AHandler {                  if (result != OK) {                      ALOGE("resume failed, aborting."); -                    (new AMessage('abor', id()))->post(); +                    (new AMessage('abor', this))->post();                  }                  mPausing = false; @@ -1180,7 +1187,7 @@ struct MyHandler : public AHandler {                  mCheckPending = true;                  ++mCheckGeneration; -                sp<AMessage> reply = new AMessage('see1', id()); +                sp<AMessage> reply = new AMessage('see0', this);                  reply->setInt64("time", timeUs);                  if (mPausing) { @@ -1203,9 +1210,26 @@ struct MyHandler : public AHandler {                  break;              } -            case 'see1': +            case 'see0':              {                  // Session is paused now. +                status_t err = OK; +                msg->findInt32("result", &err); + +                int64_t timeUs; +                CHECK(msg->findInt64("time", &timeUs)); + +                sp<AMessage> notify = mNotify->dup(); +                notify->setInt32("what", kWhatSeekPaused); +                notify->setInt32("err", err); +                notify->setInt64("time", timeUs); +                notify->post(); +                break; + +            } + +            case 'see1': +            {                  for (size_t i = 0; i < mTracks.size(); ++i) {                      TrackInfo *info = &mTracks.editItemAt(i); @@ -1221,7 +1245,7 @@ struct MyHandler : public AHandler {                  // Start new timeoutgeneration to avoid getting timeout                  // before PLAY response arrive -                sp<AMessage> timeout = new AMessage('tiou', id()); +                sp<AMessage> timeout = new AMessage('tiou', this);                  mCheckTimeoutGeneration++;                  timeout->setInt32("tioucheck", mCheckTimeoutGeneration);                  timeout->post(kStartupTimeoutUs); @@ -1243,7 +1267,7 @@ struct MyHandler : public AHandler {                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('see2', id()); +                sp<AMessage> reply = new AMessage('see2', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -1277,7 +1301,7 @@ struct MyHandler : public AHandler {                          // Post new timeout in order to make sure to use                          // fake timestamps if no new Sender Reports arrive -                        sp<AMessage> timeout = new AMessage('tiou', id()); +                        sp<AMessage> timeout = new AMessage('tiou', this);                          mCheckTimeoutGeneration++;                          timeout->setInt32("tioucheck", mCheckTimeoutGeneration);                          timeout->post(kStartupTimeoutUs); @@ -1293,7 +1317,7 @@ struct MyHandler : public AHandler {                  if (result != OK) {                      ALOGE("seek failed, aborting."); -                    (new AMessage('abor', id()))->post(); +                    (new AMessage('abor', this))->post();                  }                  mPausing = false; @@ -1343,12 +1367,12 @@ struct MyHandler : public AHandler {                          mTryTCPInterleaving = true; -                        sp<AMessage> msg = new AMessage('abor', id()); +                        sp<AMessage> msg = new AMessage('abor', this);                          msg->setInt32("reconnect", true);                          msg->post();                      } else {                          ALOGW("Never received any data, disconnecting."); -                        (new AMessage('abor', id()))->post(); +                        (new AMessage('abor', this))->post();                      }                  } else {                      if (!mAllTracksHaveTime) { @@ -1369,7 +1393,7 @@ struct MyHandler : public AHandler {      }      void postKeepAlive() { -        sp<AMessage> msg = new AMessage('aliv', id()); +        sp<AMessage> msg = new AMessage('aliv', this);          msg->setInt32("generation", mKeepAliveGeneration);          msg->post((mKeepAliveTimeoutUs * 9) / 10);      } @@ -1380,7 +1404,7 @@ struct MyHandler : public AHandler {          }          mCheckPending = true; -        sp<AMessage> check = new AMessage('chek', id()); +        sp<AMessage> check = new AMessage('chek', this);          check->setInt32("generation", mCheckGeneration);          check->post(kAccessUnitTimeoutUs);      } @@ -1564,9 +1588,9 @@ private:              new APacketSource(mSessionDesc, index);          if (source->initCheck() != OK) { -            ALOGW("Unsupported format. Ignoring track #%d.", index); +            ALOGW("Unsupported format. Ignoring track #%zu.", index); -            sp<AMessage> reply = new AMessage('setu', id()); +            sp<AMessage> reply = new AMessage('setu', this);              reply->setSize("index", index);              reply->setInt32("result", ERROR_UNSUPPORTED);              reply->post(); @@ -1606,7 +1630,7 @@ private:          info->mTimeScale = timescale;          info->mEOSReceived = false; -        ALOGV("track #%d URL=%s", mTracks.size(), trackURL.c_str()); +        ALOGV("track #%zu URL=%s", mTracks.size(), trackURL.c_str());          AString request = "SETUP ";          request.append(trackURL); @@ -1652,7 +1676,7 @@ private:          request.append("\r\n"); -        sp<AMessage> reply = new AMessage('setu', id()); +        sp<AMessage> reply = new AMessage('setu', this);          reply->setSize("index", index);          reply->setSize("track-index", mTracks.size() - 1);          mConn->sendRequest(request.c_str(), reply); @@ -1673,21 +1697,11 @@ private:          }          size_t n = strlen(baseURL); -        if (baseURL[n - 1] == '/') { -            out->setTo(baseURL); -            out->append(url); -        } else { -            const char *slashPos = strrchr(baseURL, '/'); - -            if (slashPos > &baseURL[6]) { -                out->setTo(baseURL, slashPos - baseURL); -            } else { -                out->setTo(baseURL); -            } - +        out->setTo(baseURL); +        if (baseURL[n - 1] != '/') {              out->append("/"); -            out->append(url);          } +        out->append(url);          return true;      } @@ -1731,8 +1745,8 @@ private:      }      void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) { -        ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx", -             trackIndex, rtpTime, ntpTime); +        ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx", +             trackIndex, rtpTime, (long long)ntpTime);          int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32)); @@ -1747,7 +1761,7 @@ private:          }          if (!mAllTracksHaveTime) { -            bool allTracksHaveTime = true; +            bool allTracksHaveTime = (mTracks.size() > 0);              for (size_t i = 0; i < mTracks.size(); ++i) {                  TrackInfo *track = &mTracks.editItemAt(i);                  if (track->mNTPAnchorUs < 0) { @@ -1851,8 +1865,8 @@ private:              return false;          } -        ALOGV("track %d rtpTime=%d mediaTimeUs = %lld us (%.2f secs)", -             trackIndex, rtpTime, mediaTimeUs, mediaTimeUs / 1E6); +        ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)", +             trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);          accessUnit->meta()->setInt64("timeUs", mediaTimeUs); diff --git a/media/libstagefright/rtsp/MyTransmitter.h b/media/libstagefright/rtsp/MyTransmitter.h index 009a3b1..369f276 100644 --- a/media/libstagefright/rtsp/MyTransmitter.h +++ b/media/libstagefright/rtsp/MyTransmitter.h @@ -100,7 +100,7 @@ struct MyTransmitter : public AHandler {          mLooper->registerHandler(this);          mLooper->registerHandler(mConn); -        sp<AMessage> reply = new AMessage('conn', id()); +        sp<AMessage> reply = new AMessage('conn', this);          mConn->connect(mServerURL.c_str(), reply);  #ifdef ANDROID @@ -229,7 +229,7 @@ struct MyTransmitter : public AHandler {          request.append("\r\n");          request.append(sdp); -        sp<AMessage> reply = new AMessage('anno', id()); +        sp<AMessage> reply = new AMessage('anno', this);          mConn->sendRequest(request.c_str(), reply);      } @@ -350,7 +350,7 @@ struct MyTransmitter : public AHandler {                       << result << " (" << strerror(-result) << ")";                  if (result != OK) { -                    (new AMessage('quit', id()))->post(); +                    (new AMessage('quit', this))->post();                      break;                  } @@ -381,7 +381,7 @@ struct MyTransmitter : public AHandler {                      if (response->mStatusCode == 401) {                          if (mAuthType != NONE) {                              LOG(INFO) << "FAILED to authenticate"; -                            (new AMessage('quit', id()))->post(); +                            (new AMessage('quit', this))->post();                              break;                          } @@ -391,14 +391,14 @@ struct MyTransmitter : public AHandler {                  }                  if (result != OK || response->mStatusCode != 200) { -                    (new AMessage('quit', id()))->post(); +                    (new AMessage('quit', this))->post();                      break;                  }                  unsigned rtpPort;                  ARTPConnection::MakePortPair(&mRTPSocket, &mRTCPSocket, &rtpPort); -                // (new AMessage('poll', id()))->post(); +                // (new AMessage('poll', this))->post();                  AString request;                  request.append("SETUP "); @@ -414,7 +414,7 @@ struct MyTransmitter : public AHandler {                  request.append(";mode=record\r\n");                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('setu', id()); +                sp<AMessage> reply = new AMessage('setu', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -468,7 +468,7 @@ struct MyTransmitter : public AHandler {                  }                  if (result != OK || response->mStatusCode != 200) { -                    (new AMessage('quit', id()))->post(); +                    (new AMessage('quit', this))->post();                      break;                  } @@ -535,7 +535,7 @@ struct MyTransmitter : public AHandler {                  request.append("\r\n");                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('reco', id()); +                sp<AMessage> reply = new AMessage('reco', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -558,13 +558,13 @@ struct MyTransmitter : public AHandler {                  }                  if (result != OK) { -                    (new AMessage('quit', id()))->post(); +                    (new AMessage('quit', this))->post();                      break;                  } -                (new AMessage('more', id()))->post(); -                (new AMessage('sr  ', id()))->post(); -                (new AMessage('aliv', id()))->post(30000000ll); +                (new AMessage('more', this))->post(); +                (new AMessage('sr  ', this))->post(); +                (new AMessage('aliv', this))->post(30000000ll);                  break;              } @@ -586,7 +586,7 @@ struct MyTransmitter : public AHandler {                  request.append("\r\n");                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('opts', id()); +                sp<AMessage> reply = new AMessage('opts', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -603,7 +603,7 @@ struct MyTransmitter : public AHandler {                      break;                  } -                (new AMessage('aliv', id()))->post(30000000ll); +                (new AMessage('aliv', this))->post(30000000ll);                  break;              } @@ -702,7 +702,7 @@ struct MyTransmitter : public AHandler {                      request.append("\r\n");                      request.append("\r\n"); -                    sp<AMessage> reply = new AMessage('paus', id()); +                    sp<AMessage> reply = new AMessage('paus', this);                      mConn->sendRequest(request.c_str(), reply);                  }                  break; @@ -753,7 +753,7 @@ struct MyTransmitter : public AHandler {                  request.append("\r\n");                  request.append("\r\n"); -                sp<AMessage> reply = new AMessage('tear', id()); +                sp<AMessage> reply = new AMessage('tear', this);                  mConn->sendRequest(request.c_str(), reply);                  break;              } @@ -775,7 +775,7 @@ struct MyTransmitter : public AHandler {                      CHECK(response != NULL);                  } -                (new AMessage('quit', id()))->post(); +                (new AMessage('quit', this))->post();                  break;              } @@ -784,14 +784,14 @@ struct MyTransmitter : public AHandler {                  LOG(INFO) << "disconnect completed";                  mConnected = false; -                (new AMessage('quit', id()))->post(); +                (new AMessage('quit', this))->post();                  break;              }              case 'quit':              {                  if (mConnected) { -                    mConn->disconnect(new AMessage('disc', id())); +                    mConn->disconnect(new AMessage('disc', this));                      break;                  } diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp index a24eb69..0f46c83 100644 --- a/media/libstagefright/rtsp/SDPLoader.cpp +++ b/media/libstagefright/rtsp/SDPLoader.cpp @@ -51,7 +51,7 @@ SDPLoader::SDPLoader(  void SDPLoader::load(const char *url, const KeyedVector<String8, String8> *headers) {      mNetLooper->registerHandler(this); -    sp<AMessage> msg = new AMessage(kWhatLoad, id()); +    sp<AMessage> msg = new AMessage(kWhatLoad, this);      msg->setString("url", url);      if (headers != NULL) { diff --git a/media/libstagefright/rtsp/UDPPusher.cpp b/media/libstagefright/rtsp/UDPPusher.cpp index 47ea6f1..5c685a1 100644 --- a/media/libstagefright/rtsp/UDPPusher.cpp +++ b/media/libstagefright/rtsp/UDPPusher.cpp @@ -65,7 +65,7 @@ void UDPPusher::start() {      mFirstTimeMs = fromlel(timeMs);      mFirstTimeUs = ALooper::GetNowUs(); -    (new AMessage(kWhatPush, id()))->post(); +    (new AMessage(kWhatPush, this))->post();  }  bool UDPPusher::onPush() { @@ -103,7 +103,7 @@ bool UDPPusher::onPush() {      timeMs -= mFirstTimeMs;      int64_t whenUs = mFirstTimeUs + timeMs * 1000ll;      int64_t nowUs = ALooper::GetNowUs(); -    (new AMessage(kWhatPush, id()))->post(whenUs - nowUs); +    (new AMessage(kWhatPush, this))->post(whenUs - nowUs);      return true;  } diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk index 8d6ff5b..111e6c5 100644 --- a/media/libstagefright/tests/Android.mk +++ b/media/libstagefright/tests/Android.mk @@ -31,6 +31,9 @@ LOCAL_C_INCLUDES := \  	frameworks/av/media/libstagefright/include \  	$(TOP)/frameworks/native/include/media/openmax \ +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true +  LOCAL_32_BIT_ONLY := true  include $(BUILD_NATIVE_TEST) @@ -60,6 +63,39 @@ LOCAL_C_INCLUDES := \  	frameworks/av/media/libstagefright/include \  	$(TOP)/frameworks/native/include/media/openmax \ +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true + +include $(BUILD_NATIVE_TEST) + +include $(CLEAR_VARS) +LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk + +LOCAL_MODULE := MediaCodecListOverrides_test + +LOCAL_MODULE_TAGS := tests + +LOCAL_SRC_FILES := \ +	MediaCodecListOverrides_test.cpp \ + +LOCAL_SHARED_LIBRARIES := \ +	libmedia \ +	libstagefright \ +	libstagefright_foundation \ +	libstagefright_omx \ +	libutils \ +	liblog + +LOCAL_C_INCLUDES := \ +	frameworks/av/media/libstagefright \ +	frameworks/av/media/libstagefright/include \ +	frameworks/native/include/media/openmax \ + +LOCAL_32_BIT_ONLY := true + +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true +  include $(BUILD_NATIVE_TEST)  # Include subdirectory makefiles diff --git a/media/libstagefright/tests/DummyRecorder.h b/media/libstagefright/tests/DummyRecorder.h index 1cbea1b..cd4d0ee 100644 --- a/media/libstagefright/tests/DummyRecorder.h +++ b/media/libstagefright/tests/DummyRecorder.h @@ -24,7 +24,7 @@  namespace android { -class MediaSource; +struct MediaSource;  class MediaBuffer;  class DummyRecorder { diff --git a/media/libstagefright/tests/MediaCodecListOverrides_test.cpp b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp new file mode 100644 index 0000000..ab547be --- /dev/null +++ b/media/libstagefright/tests/MediaCodecListOverrides_test.cpp @@ -0,0 +1,162 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *      http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 +#define LOG_TAG "MediaCodecListOverrides_test" +#include <utils/Log.h> + +#include <gtest/gtest.h> + +#include "MediaCodecListOverrides.h" + +#include <media/MediaCodecInfo.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaCodecList.h> + +namespace android { + +static const char kTestOverridesStr[] = +"<MediaCodecs>\n" +"    <Settings>\n" +"        <Setting name=\"supports-multiple-secure-codecs\" value=\"false\" />\n" +"        <Setting name=\"supports-secure-with-non-secure-codec\" value=\"true\" />\n" +"    </Settings>\n" +"    <Encoders>\n" +"        <MediaCodec name=\"OMX.qcom.video.encoder.avc\" type=\"video/avc\" update=\"true\" >\n" +"            <Limit name=\"max-supported-instances\" value=\"4\" />\n" +"        </MediaCodec>\n" +"        <MediaCodec name=\"OMX.qcom.video.encoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n" +"            <Limit name=\"max-supported-instances\" value=\"4\" />\n" +"        </MediaCodec>\n" +"    </Encoders>\n" +"    <Decoders>\n" +"        <MediaCodec name=\"OMX.qcom.video.decoder.avc.secure\" type=\"video/avc\" update=\"true\" >\n" +"            <Limit name=\"max-supported-instances\" value=\"1\" />\n" +"        </MediaCodec>\n" +"        <MediaCodec name=\"OMX.qcom.video.decoder.h263\" type=\"video/3gpp\" update=\"true\" >\n" +"            <Limit name=\"max-supported-instances\" value=\"4\" />\n" +"        </MediaCodec>\n" +"        <MediaCodec name=\"OMX.qcom.video.decoder.mpeg2\" type=\"video/mpeg2\" update=\"true\" >\n" +"            <Limit name=\"max-supported-instances\" value=\"3\" />\n" +"        </MediaCodec>\n" +"        <MediaCodec name=\"OMX.qcom.video.decoder.mpeg4\" type=\"video/mp4v-es\" update=\"true\" >\n" +"            <Limit name=\"max-supported-instances\" value=\"3\" />\n" +"        </MediaCodec>\n" +"    </Decoders>\n" +"</MediaCodecs>\n"; + +class MediaCodecListOverridesTest : public ::testing::Test { +public: +    MediaCodecListOverridesTest() {} + +    void addMaxInstancesSetting( +            const AString &key, +            const AString &value, +            KeyedVector<AString, CodecSettings> *results) { +        CodecSettings settings; +        settings.add("max-supported-instances", value); +        results->add(key, settings); +    } + +    void verifyProfileResults(const KeyedVector<AString, CodecSettings> &results) { +        EXPECT_LT(0u, results.size()); +        for (size_t i = 0; i < results.size(); ++i) { +            AString key = results.keyAt(i); +            CodecSettings settings = results.valueAt(i); +            EXPECT_EQ(1u, settings.size()); +            EXPECT_TRUE(settings.keyAt(0) == "max-supported-instances"); +            AString valueS = settings.valueAt(0); +            int32_t value = strtol(valueS.c_str(), NULL, 10); +            EXPECT_LT(0, value); +            ALOGV("profileCodecs results %s %s", key.c_str(), valueS.c_str()); +        } +    } + +    void exportTestResultsToXML(const char *fileName) { +        CodecSettings gR; +        gR.add("supports-multiple-secure-codecs", "false"); +        gR.add("supports-secure-with-non-secure-codec", "true"); +        KeyedVector<AString, CodecSettings> eR; +        addMaxInstancesSetting("OMX.qcom.video.encoder.avc video/avc", "4", &eR); +        addMaxInstancesSetting("OMX.qcom.video.encoder.mpeg4 video/mp4v-es", "4", &eR); +        KeyedVector<AString, CodecSettings> dR; +        addMaxInstancesSetting("OMX.qcom.video.decoder.avc.secure video/avc", "1", &dR); +        addMaxInstancesSetting("OMX.qcom.video.decoder.h263 video/3gpp", "4", &dR); +        addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg2 video/mpeg2", "3", &dR); +        addMaxInstancesSetting("OMX.qcom.video.decoder.mpeg4 video/mp4v-es", "3", &dR); + +        exportResultsToXML(fileName, gR, eR, dR); +    } +}; + +TEST_F(MediaCodecListOverridesTest, splitString) { +    AString s = "abc123"; +    AString delimiter = " "; +    AString s1; +    AString s2; +    EXPECT_FALSE(splitString(s, delimiter, &s1, &s2)); +    s = "abc 123"; +    EXPECT_TRUE(splitString(s, delimiter, &s1, &s2)); +    EXPECT_TRUE(s1 == "abc"); +    EXPECT_TRUE(s2 == "123"); +} + +// TODO: the codec component never returns OMX_EventCmdComplete in unit test. +TEST_F(MediaCodecListOverridesTest, DISABLED_profileCodecs) { +    sp<IMediaCodecList> list = MediaCodecList::getInstance(); +    Vector<sp<MediaCodecInfo>> infos; +    for (size_t i = 0; i < list->countCodecs(); ++i) { +        infos.push_back(list->getCodecInfo(i)); +    } +    CodecSettings global_results; +    KeyedVector<AString, CodecSettings> encoder_results; +    KeyedVector<AString, CodecSettings> decoder_results; +    profileCodecs( +            infos, &global_results, &encoder_results, &decoder_results, true /* forceToMeasure */); +    verifyProfileResults(encoder_results); +    verifyProfileResults(decoder_results); +} + +TEST_F(MediaCodecListOverridesTest, exportTestResultsToXML) { +    const char *fileName = "/sdcard/mediacodec_list_overrides_test.xml"; +    remove(fileName); + +    exportTestResultsToXML(fileName); + +    // verify +    AString overrides; +    FILE *f = fopen(fileName, "rb"); +    ASSERT_TRUE(f != NULL); +    fseek(f, 0, SEEK_END); +    long size = ftell(f); +    rewind(f); + +    char *buf = (char *)malloc(size); +    EXPECT_EQ((size_t)1, fread(buf, size, 1, f)); +    overrides.setTo(buf, size); +    fclose(f); +    free(buf); + +    AString expected; +    expected.append(getProfilingVersionString()); +    expected.append("\n"); +    expected.append(kTestOverridesStr); +    EXPECT_TRUE(overrides == expected); + +    remove(fileName); +} + +} // namespace android diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp index fd889f9..3860e9b 100644 --- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp +++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp @@ -19,6 +19,7 @@  #include <gtest/gtest.h>  #include <utils/String8.h> +#include <utils/String16.h>  #include <utils/Errors.h>  #include <fcntl.h>  #include <unistd.h> @@ -466,7 +467,7 @@ void SurfaceMediaSourceGLTest::oneBufferPassGL(int num) {  // Set up the MediaRecorder which runs in the same process as mediaserver  sp<MediaRecorder> SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int videoSource,          int outputFormat, int videoEncoder, int width, int height, int fps) { -    sp<MediaRecorder> mr = new MediaRecorder(); +    sp<MediaRecorder> mr = new MediaRecorder(String16());      mr->setVideoSource(videoSource);      mr->setOutputFormat(outputFormat);      mr->setVideoEncoder(videoEncoder); diff --git a/media/libstagefright/tests/Utils_test.cpp b/media/libstagefright/tests/Utils_test.cpp index c1e663c..d736501 100644 --- a/media/libstagefright/tests/Utils_test.cpp +++ b/media/libstagefright/tests/Utils_test.cpp @@ -109,21 +109,21 @@ TEST_F(UtilsTest, TestStringUtils) {  TEST_F(UtilsTest, TestDebug) {  #define LVL(x) (ADebug::Level)(x) -    ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "", LVL(5)), LVL(5)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "   \t  \n ", LVL(2)), LVL(2)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3", LVL(5)), LVL(3)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo", LVL(5)), LVL(3)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString( +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "", LVL(5)), LVL(5)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "   \t  \n ", LVL(2)), LVL(2)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "3", LVL(5)), LVL(3)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "3:*deo", LVL(5)), LVL(3)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString(              "video", "\t\n 3 \t\n:\t\n video \t\n", LVL(5)), LVL(3)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "3:*deo,2:vid*", LVL(5)), LVL(2)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString( +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "3:*deo,2:vid*", LVL(5)), LVL(2)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString(              "avideo", "\t\n 3 \t\n:\t\n avideo \t\n,\t\n 2 \t\n:\t\n video \t\n", LVL(5)), LVL(3)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString( +    ASSERT_EQ(ADebug::GetLevelFromSettingsString(              "audio.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(2)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString( +    ASSERT_EQ(ADebug::GetLevelFromSettingsString(              "video.omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3)); -    ASSERT_EQ(ADebug::GetDebugLevelFromString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("video", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(3)); +    ASSERT_EQ(ADebug::GetLevelFromSettingsString("omx", "4:*omx,3:*d*o*,2:audio*", LVL(5)), LVL(4));  #undef LVL  } diff --git a/media/libstagefright/timedtext/Android.mk b/media/libstagefright/timedtext/Android.mk index 6a8b9fc..58fb12f 100644 --- a/media/libstagefright/timedtext/Android.mk +++ b/media/libstagefright/timedtext/Android.mk @@ -9,7 +9,8 @@ LOCAL_SRC_FILES:=                 \          TimedTextSRTSource.cpp    \          TimedTextPlayer.cpp -LOCAL_CFLAGS += -Wno-multichar -Werror +LOCAL_CFLAGS += -Wno-multichar -Werror -Wall +LOCAL_CLANG := true  LOCAL_C_INCLUDES:= \          $(TOP)/frameworks/av/include/media/stagefright/timedtext \ diff --git a/media/libstagefright/timedtext/TimedTextPlayer.cpp b/media/libstagefright/timedtext/TimedTextPlayer.cpp index a070487..aecf666 100644 --- a/media/libstagefright/timedtext/TimedTextPlayer.cpp +++ b/media/libstagefright/timedtext/TimedTextPlayer.cpp @@ -56,25 +56,25 @@ TimedTextPlayer::~TimedTextPlayer() {  }  void TimedTextPlayer::start() { -    (new AMessage(kWhatStart, id()))->post(); +    (new AMessage(kWhatStart, this))->post();  }  void TimedTextPlayer::pause() { -    (new AMessage(kWhatPause, id()))->post(); +    (new AMessage(kWhatPause, this))->post();  }  void TimedTextPlayer::resume() { -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();  }  void TimedTextPlayer::seekToAsync(int64_t timeUs) { -    sp<AMessage> msg = new AMessage(kWhatSeek, id()); +    sp<AMessage> msg = new AMessage(kWhatSeek, this);      msg->setInt64("seekTimeUs", timeUs);      msg->post();  }  void TimedTextPlayer::setDataSource(sp<TimedTextSource> source) { -    sp<AMessage> msg = new AMessage(kWhatSetSource, id()); +    sp<AMessage> msg = new AMessage(kWhatSetSource, this);      msg->setObject("source", source);      msg->post();  } @@ -231,7 +231,7 @@ void TimedTextPlayer::doRead(MediaSource::ReadOptions* options) {      status_t err = mSource->read(&startTimeUs, &endTimeUs,                                   &(parcelEvent->parcel), options);      if (err == WOULD_BLOCK) { -        sp<AMessage> msg = new AMessage(kWhatRetryRead, id()); +        sp<AMessage> msg = new AMessage(kWhatRetryRead, this);          if (options != NULL) {              int64_t seekTimeUs = kInvalidTimeUs;              MediaSource::ReadOptions::SeekMode seekMode = @@ -259,7 +259,7 @@ void TimedTextPlayer::doRead(MediaSource::ReadOptions* options) {  void TimedTextPlayer::postTextEvent(const sp<ParcelEvent>& parcel, int64_t timeUs) {      int64_t delayUs = delayUsFromCurrentTime(timeUs); -    sp<AMessage> msg = new AMessage(kWhatSendSubtitle, id()); +    sp<AMessage> msg = new AMessage(kWhatSendSubtitle, this);      msg->setInt32("generation", mSendSubtitleGeneration);      if (parcel != NULL) {          msg->setObject("subtitle", parcel); diff --git a/media/libstagefright/timedtext/test/Android.mk b/media/libstagefright/timedtext/test/Android.mk index 9a9fde2..e0e0e0d 100644 --- a/media/libstagefright/timedtext/test/Android.mk +++ b/media/libstagefright/timedtext/test/Android.mk @@ -26,4 +26,7 @@ LOCAL_SHARED_LIBRARIES := \      libstagefright_foundation \      libutils +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true +  include $(BUILD_NATIVE_TEST) diff --git a/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp b/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp index 3a06d61..211e732 100644 --- a/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp +++ b/media/libstagefright/timedtext/test/TimedTextSRTSource_test.cpp @@ -63,10 +63,10 @@ public:      }      virtual ssize_t readAt(off64_t offset, void *data, size_t size) { -        if (offset >= mSize) return 0; +        if ((size_t)offset >= mSize) return 0;          ssize_t avail = mSize - offset; -        if (avail > size) { +        if ((size_t)avail > size) {              avail = size;          }          memcpy(data, mData + offset, avail); diff --git a/media/libstagefright/webm/Android.mk b/media/libstagefright/webm/Android.mk index 7081463..bc53c56 100644 --- a/media/libstagefright/webm/Android.mk +++ b/media/libstagefright/webm/Android.mk @@ -1,8 +1,10 @@  LOCAL_PATH:= $(call my-dir)  include $(CLEAR_VARS) -LOCAL_CPPFLAGS += -D__STDINT_LIMITS \ -                  -Werror +LOCAL_CPPFLAGS += -D__STDINT_LIMITS + +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  LOCAL_SRC_FILES:= EbmlUtil.cpp        \                    WebmElement.cpp     \ diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp index 069961b..737f144 100644 --- a/media/libstagefright/webm/WebmWriter.cpp +++ b/media/libstagefright/webm/WebmWriter.cpp @@ -80,38 +80,6 @@ WebmWriter::WebmWriter(int fd)              mCuePoints);  } -WebmWriter::WebmWriter(const char *filename) -    : mInitCheck(NO_INIT), -      mTimeCodeScale(1000000), -      mStartTimestampUs(0), -      mStartTimeOffsetMs(0), -      mSegmentOffset(0), -      mSegmentDataStart(0), -      mInfoOffset(0), -      mInfoSize(0), -      mTracksOffset(0), -      mCuesOffset(0), -      mPaused(false), -      mStarted(false), -      mIsFileSizeLimitExplicitlyRequested(false), -      mIsRealTimeRecording(false), -      mStreamableFile(true), -      mEstimatedCuesSize(0) { -    mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); -    if (mFd >= 0) { -        ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0)); -        mInitCheck = OK; -    } -    mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack); -    mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack); -    mSinkThread = new WebmFrameSinkThread( -            mFd, -            mSegmentDataStart, -            mStreams[kVideoIndex].mSink, -            mStreams[kAudioIndex].mSink, -            mCuePoints); -} -  // static  sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {      int32_t width, height; diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h index 36b6965..4ad770e 100644 --- a/media/libstagefright/webm/WebmWriter.h +++ b/media/libstagefright/webm/WebmWriter.h @@ -37,7 +37,6 @@ namespace android {  class WebmWriter : public MediaWriter {  public:      WebmWriter(int fd); -    WebmWriter(const char *filename);      ~WebmWriter() { reset(); } diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk index f70454a..fb28624 100644 --- a/media/libstagefright/wifi-display/Android.mk +++ b/media/libstagefright/wifi-display/Android.mk @@ -30,6 +30,9 @@ LOCAL_SHARED_LIBRARIES:= \          libui                           \          libutils                        \ +LOCAL_CFLAGS += -Wno-multichar -Werror -Wall +LOCAL_CLANG := true +  LOCAL_MODULE:= libstagefright_wfd  LOCAL_MODULE_TAGS:= optional diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index b1cdec0..6f0087f 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -121,7 +121,7 @@ status_t MediaSender::initAsync(          }          if (err == OK) { -            sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); +            sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);              notify->setInt32("generation", mGeneration);              mTSSender = new RTPSender(mNetSession, notify);              looper()->registerHandler(mTSSender); @@ -170,7 +170,7 @@ status_t MediaSender::initAsync(          return INVALID_OPERATION;      } -    sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatSenderNotify, this);      notify->setInt32("generation", mGeneration);      notify->setSize("trackIndex", trackIndex); diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp index 2f4af5b..dbc511c 100644 --- a/media/libstagefright/wifi-display/VideoFormats.cpp +++ b/media/libstagefright/wifi-display/VideoFormats.cpp @@ -248,8 +248,8 @@ void VideoFormats::getProfileLevel(      }      if (bestProfile == -1 || bestLevel == -1) { -        ALOGE("Profile or level not set for resolution type %d, index %d", -              type, index); +        ALOGE("Profile or level not set for resolution type %d, index %zu", +                type, index);          bestProfile = PROFILE_CBP;          bestLevel = LEVEL_31;      } @@ -382,7 +382,6 @@ bool VideoFormats::parseFormatSpec(const char *spec) {      disableAll();      unsigned native, dummy; -    unsigned res[3];      size_t size = strlen(spec);      size_t offset = 0; @@ -507,7 +506,7 @@ bool VideoFormats::PickBestFormat(                  continue;              } -            ALOGV("type %u, index %u, %u x %u %c%u supported", +            ALOGV("type %zu, index %zu, %zu x %zu %c%zu supported",                    i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond);              uint32_t score = width * height * framesPerSecond; diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index e88a3bd..c66a898 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -95,11 +95,11 @@ status_t RTPSender::initAsync(          return INVALID_OPERATION;      } -    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id()); +    sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, this);      sp<AMessage> rtcpNotify;      if (remoteRTCPPort >= 0) { -        rtcpNotify = new AMessage(kWhatRTCPNotify, id()); +        rtcpNotify = new AMessage(kWhatRTCPNotify, this);      }      CHECK_EQ(mRTPSessionID, 0); @@ -252,8 +252,6 @@ status_t RTPSender::queueTSPackets(      int64_t timeUs;      CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs)); -    const size_t numTSPackets = tsPackets->size() / 188; -      size_t srcOffset = 0;      while (srcOffset < tsPackets->size()) {          sp<ABuffer> udpPacket = @@ -672,8 +670,8 @@ status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) {              default:              { -                ALOGW("Unknown RTCP packet type %u of size %d", -                     (unsigned)data[1], headerLength); +                ALOGW("Unknown RTCP packet type %u of size %zu", +                        (unsigned)data[1], headerLength);                  break;              }          } @@ -764,7 +762,7 @@ status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) {      return OK;  } -status_t RTPSender::parseAPP(const uint8_t *data, size_t size) { +status_t RTPSender::parseAPP(const uint8_t *data, size_t size __unused) {      if (!memcmp("late", &data[8], 4)) {          int64_t avgLatencyUs = (int64_t)U64_AT(&data[12]);          int64_t maxLatencyUs = (int64_t)U64_AT(&data[20]); diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 2834a66..471152e 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -93,7 +93,7 @@ Converter::~Converter() {  void Converter::shutdownAsync() {      ALOGV("shutdown"); -    (new AMessage(kWhatShutdown, id()))->post(); +    (new AMessage(kWhatShutdown, this))->post();  }  status_t Converter::init() { @@ -482,11 +482,11 @@ void Converter::scheduleDoMoreWork() {  #if 1      if (mEncoderActivityNotify == NULL) { -        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, id()); +        mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, this);      }      mEncoder->requestActivityNotification(mEncoderActivityNotify->dup());  #else -    sp<AMessage> notify = new AMessage(kWhatEncoderActivity, id()); +    sp<AMessage> notify = new AMessage(kWhatEncoderActivity, this);      notify->setInt64("whenUs", ALooper::GetNowUs());      mEncoder->requestActivityNotification(notify);  #endif @@ -731,8 +731,7 @@ status_t Converter::doMoreWork() {                  // MediaSender will post the following message when HDCP                  // is done, to release the output buffer back to encoder. -                sp<AMessage> notify(new AMessage( -                        kWhatReleaseOutputBuffer, id())); +                sp<AMessage> notify(new AMessage(kWhatReleaseOutputBuffer, this));                  notify->setInt32("bufferIndex", bufferIndex);                  buffer = new ABuffer( @@ -748,7 +747,7 @@ status_t Converter::doMoreWork() {              buffer->meta()->setInt64("timeUs", timeUs);              ALOGV("[%s] time %lld us (%.2f secs)", -                  mIsVideo ? "video" : "audio", timeUs, timeUs / 1E6); +                    mIsVideo ? "video" : "audio", (long long)timeUs, timeUs / 1E6);              memcpy(buffer->data(), outbuf->base() + offset, size); @@ -787,18 +786,18 @@ status_t Converter::doMoreWork() {  }  void Converter::requestIDRFrame() { -    (new AMessage(kWhatRequestIDRFrame, id()))->post(); +    (new AMessage(kWhatRequestIDRFrame, this))->post();  }  void Converter::dropAFrame() {      // Unsupported in surface input mode.      CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT)); -    (new AMessage(kWhatDropAFrame, id()))->post(); +    (new AMessage(kWhatDropAFrame, this))->post();  }  void Converter::suspendEncoding(bool suspend) { -    sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, id()); +    sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, this);      msg->setInt32("suspend", suspend);      msg->post();  } diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h index 5876e07..b182990 100644 --- a/media/libstagefright/wifi-display/source/Converter.h +++ b/media/libstagefright/wifi-display/source/Converter.h @@ -23,7 +23,7 @@  namespace android {  struct ABuffer; -struct IGraphicBufferProducer; +class IGraphicBufferProducer;  struct MediaCodec;  #define ENABLE_SILENCE_DETECTION        0 diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp index 86b918f..ce07a4e 100644 --- a/media/libstagefright/wifi-display/source/MediaPuller.cpp +++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp @@ -63,21 +63,21 @@ status_t MediaPuller::postSynchronouslyAndReturnError(  }  status_t MediaPuller::start() { -    return postSynchronouslyAndReturnError(new AMessage(kWhatStart, id())); +    return postSynchronouslyAndReturnError(new AMessage(kWhatStart, this));  }  void MediaPuller::stopAsync(const sp<AMessage> ¬ify) { -    sp<AMessage> msg = new AMessage(kWhatStop, id()); +    sp<AMessage> msg = new AMessage(kWhatStop, this);      msg->setMessage("notify", notify);      msg->post();  }  void MediaPuller::pause() { -    (new AMessage(kWhatPause, id()))->post(); +    (new AMessage(kWhatPause, this))->post();  }  void MediaPuller::resume() { -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();  }  void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { @@ -105,7 +105,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {              sp<AMessage> response = new AMessage;              response->setInt32("err", err); -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              response->postReply(replyID);              break; @@ -215,7 +215,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) {  }  void MediaPuller::schedulePull() { -    sp<AMessage> msg = new AMessage(kWhatPull, id()); +    sp<AMessage> msg = new AMessage(kWhatPull, this);      msg->setInt32("generation", mPullGeneration);      msg->post();  } diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 2cb4786..ed5a404 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -214,7 +214,7 @@ void WifiDisplaySource::PlaybackSession::Track::stopAsync() {          mConverter->shutdownAsync();      } -    sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, id()); +    sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, this);      if (mStarted && mMediaPuller != NULL) {          if (mRepeaterSource != NULL) { @@ -345,12 +345,14 @@ bool WifiDisplaySource::PlaybackSession::Track::isSuspended() const {  ////////////////////////////////////////////////////////////////////////////////  WifiDisplaySource::PlaybackSession::PlaybackSession( +        const String16 &opPackageName,          const sp<ANetworkSession> &netSession,          const sp<AMessage> ¬ify,          const in_addr &interfaceAddr,          const sp<IHDCP> &hdcp,          const char *path) -    : mNetSession(netSession), +    : mOpPackageName(opPackageName), +      mNetSession(netSession),        mNotify(notify),        mInterfaceAddr(interfaceAddr),        mHDCP(hdcp), @@ -382,7 +384,7 @@ status_t WifiDisplaySource::PlaybackSession::init(          size_t videoResolutionIndex,          VideoFormats::ProfileType videoProfileType,          VideoFormats::LevelType videoLevelType) { -    sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, this);      mMediaSender = new MediaSender(mNetSession, notify);      looper()->registerHandler(mMediaSender); @@ -440,7 +442,7 @@ void WifiDisplaySource::PlaybackSession::updateLiveness() {  status_t WifiDisplaySource::PlaybackSession::play() {      updateLiveness(); -    (new AMessage(kWhatResume, id()))->post(); +    (new AMessage(kWhatResume, this))->post();      return OK;  } @@ -460,7 +462,7 @@ status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() {  status_t WifiDisplaySource::PlaybackSession::pause() {      updateLiveness(); -    (new AMessage(kWhatPause, id()))->post(); +    (new AMessage(kWhatPause, this))->post();      return OK;  } @@ -508,7 +510,7 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(              } else if (what == Converter::kWhatEOS) {                  CHECK_EQ(what, Converter::kWhatEOS); -                ALOGI("output EOS on track %d", trackIndex); +                ALOGI("output EOS on track %zu", trackIndex);                  ssize_t index = mTracks.indexOfKey(trackIndex);                  CHECK_GE(index, 0); @@ -581,7 +583,7 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(              CHECK(msg->findSize("trackIndex", &trackIndex));              if (what == Track::kWhatStopped) { -                ALOGI("Track %d stopped", trackIndex); +                ALOGI("Track %zu stopped", trackIndex);                  sp<Track> track = mTracks.valueFor(trackIndex);                  looper()->unregisterHandler(track->id()); @@ -786,7 +788,7 @@ status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(          size_t trackIndex = mTracks.size(); -        sp<AMessage> notify = new AMessage(kWhatTrackNotify, id()); +        sp<AMessage> notify = new AMessage(kWhatTrackNotify, this);          notify->setSize("trackIndex", trackIndex);          sp<Track> track = new Track(notify, format); @@ -821,21 +823,27 @@ void WifiDisplaySource::PlaybackSession::schedulePullExtractor() {          return;      } +    int64_t delayUs = 1000000; // default delay is 1 sec      int64_t sampleTimeUs;      status_t err = mExtractor->getSampleTime(&sampleTimeUs); -    int64_t nowUs = ALooper::GetNowUs(); +    if (err == OK) { +        int64_t nowUs = ALooper::GetNowUs(); -    if (mFirstSampleTimeRealUs < 0ll) { -        mFirstSampleTimeRealUs = nowUs; -        mFirstSampleTimeUs = sampleTimeUs; -    } +        if (mFirstSampleTimeRealUs < 0ll) { +            mFirstSampleTimeRealUs = nowUs; +            mFirstSampleTimeUs = sampleTimeUs; +        } -    int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs; +        int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs; +        delayUs = whenUs - nowUs; +    } else { +        ALOGW("could not get sample time (%d)", err); +    } -    sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, id()); +    sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, this);      msg->setInt32("generation", mPullExtractorGeneration); -    msg->post(whenUs - nowUs); +    msg->post(delayUs);      mPullExtractorPending = true;  } @@ -857,7 +865,7 @@ void WifiDisplaySource::PlaybackSession::onPullExtractor() {      size_t trackIndex;      CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex)); -    sp<AMessage> msg = new AMessage(kWhatConverterNotify, id()); +    sp<AMessage> msg = new AMessage(kWhatConverterNotify, this);      msg->setSize(              "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex)); @@ -955,7 +963,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource(                      ? MEDIA_MIMETYPE_AUDIO_RAW : MEDIA_MIMETYPE_AUDIO_AAC);      } -    notify = new AMessage(kWhatConverterNotify, id()); +    notify = new AMessage(kWhatConverterNotify, this);      notify->setSize("trackIndex", trackIndex);      sp<Converter> converter = new Converter(notify, codecLooper, format); @@ -970,7 +978,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource(          return err;      } -    notify = new AMessage(Converter::kWhatMediaPullerNotify, converter->id()); +    notify = new AMessage(Converter::kWhatMediaPullerNotify, converter);      notify->setSize("trackIndex", trackIndex);      sp<MediaPuller> puller = new MediaPuller(source, notify); @@ -980,7 +988,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource(          *numInputBuffers = converter->getInputBufferCount();      } -    notify = new AMessage(kWhatTrackNotify, id()); +    notify = new AMessage(kWhatTrackNotify, this);      notify->setSize("trackIndex", trackIndex);      sp<Track> track = new Track( @@ -1063,6 +1071,7 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource(  status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {      sp<AudioSource> audioSource = new AudioSource(              AUDIO_SOURCE_REMOTE_SUBMIX, +            mOpPackageName,              48000 /* sampleRate */,              2 /* channelCount */); diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h index 2824143..f6673df 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.h +++ b/media/libstagefright/wifi-display/source/PlaybackSession.h @@ -22,11 +22,13 @@  #include "VideoFormats.h"  #include "WifiDisplaySource.h" +#include <utils/String16.h> +  namespace android {  struct ABuffer;  struct IHDCP; -struct IGraphicBufferProducer; +class IGraphicBufferProducer;  struct MediaPuller;  struct MediaSource;  struct MediaSender; @@ -36,6 +38,7 @@ struct NuMediaExtractor;  // display.  struct WifiDisplaySource::PlaybackSession : public AHandler {      PlaybackSession( +            const String16 &opPackageName,              const sp<ANetworkSession> &netSession,              const sp<AMessage> ¬ify,              const struct in_addr &interfaceAddr, @@ -96,6 +99,8 @@ private:          kWhatPullExtractorSample,      }; +    String16 mOpPackageName; +      sp<ANetworkSession> mNetSession;      sp<AMessage> mNotify;      in_addr mInterfaceAddr; diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp index 59d7e6e..af6b663 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp +++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp @@ -173,7 +173,7 @@ status_t RepeaterSource::read(  }  void RepeaterSource::postRead() { -    (new AMessage(kWhatRead, mReflector->id()))->post(); +    (new AMessage(kWhatRead, mReflector))->post();  }  void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) { diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 7eb8b73..e26165e 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -50,14 +50,16 @@ const int64_t WifiDisplaySource::kPlaybackSessionTimeoutUs;  const AString WifiDisplaySource::sUserAgent = MakeUserAgent();  WifiDisplaySource::WifiDisplaySource( +        const String16 &opPackageName,          const sp<ANetworkSession> &netSession,          const sp<IRemoteDisplayClient> &client,          const char *path) -    : mState(INITIALIZED), +    : mOpPackageName(opPackageName), +      mState(INITIALIZED),        mNetSession(netSession),        mClient(client),        mSessionID(0), -      mStopReplyID(0), +      mStopReplyID(NULL),        mChosenRTPPort(-1),        mUsingPCMAudio(false),        mClientSessionID(0), @@ -106,7 +108,7 @@ static status_t PostAndAwaitResponse(  status_t WifiDisplaySource::start(const char *iface) {      CHECK_EQ(mState, INITIALIZED); -    sp<AMessage> msg = new AMessage(kWhatStart, id()); +    sp<AMessage> msg = new AMessage(kWhatStart, this);      msg->setString("iface", iface);      sp<AMessage> response; @@ -114,21 +116,21 @@ status_t WifiDisplaySource::start(const char *iface) {  }  status_t WifiDisplaySource::stop() { -    sp<AMessage> msg = new AMessage(kWhatStop, id()); +    sp<AMessage> msg = new AMessage(kWhatStop, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  }  status_t WifiDisplaySource::pause() { -    sp<AMessage> msg = new AMessage(kWhatPause, id()); +    sp<AMessage> msg = new AMessage(kWhatPause, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response);  }  status_t WifiDisplaySource::resume() { -    sp<AMessage> msg = new AMessage(kWhatResume, id()); +    sp<AMessage> msg = new AMessage(kWhatResume, this);      sp<AMessage> response;      return PostAndAwaitResponse(msg, &response); @@ -138,7 +140,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {      switch (msg->what()) {          case kWhatStart:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              AString iface; @@ -167,7 +169,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {              if (err == OK) {                  if (inet_aton(iface.c_str(), &mInterfaceAddr) != 0) { -                    sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id()); +                    sp<AMessage> notify = new AMessage(kWhatRTSPNotify, this);                      err = mNetSession->createRTSPServer(                              mInterfaceAddr, port, notify, &mSessionID); @@ -310,7 +312,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {                  if (err == OK) {                      mState = AWAITING_CLIENT_TEARDOWN; -                    (new AMessage(kWhatTeardownTriggerTimedOut, id()))->post( +                    (new AMessage(kWhatTeardownTriggerTimedOut, this))->post(                              kTeardownTriggerTimeouSecs * 1000000ll);                      break; @@ -325,7 +327,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {          case kWhatPause:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              status_t err = OK; @@ -345,7 +347,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {          case kWhatResume:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              CHECK(msg->senderAwaitsResponse(&replyID));              status_t err = OK; @@ -492,7 +494,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {              if (mState == AWAITING_CLIENT_TEARDOWN) {                  ALOGI("TEARDOWN trigger timed out, forcing disconnection."); -                CHECK_NE(mStopReplyID, 0); +                CHECK(mStopReplyID != NULL);                  finishStop();                  break;              } @@ -529,7 +531,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {                      // HDCPObserver::notify is completely handled before                      // we clear the HDCP instance and unload the shared                      // library :( -                    (new AMessage(kWhatFinishStop2, id()))->post(300000ll); +                    (new AMessage(kWhatFinishStop2, this))->post(300000ll);                      break;                  } @@ -881,7 +883,7 @@ status_t WifiDisplaySource::onReceiveM3Response(                      &framesPerSecond,                      &interlaced)); -        ALOGI("Picked video resolution %u x %u %c%u", +        ALOGI("Picked video resolution %zu x %zu %c%zu",                width, height, interlaced ? 'i' : 'p', framesPerSecond);          ALOGI("Picked AVC profile %d, level %d", @@ -1027,7 +1029,7 @@ void WifiDisplaySource::scheduleReaper() {      }      mReaperPending = true; -    (new AMessage(kWhatReapDeadClients, id()))->post(kReaperIntervalUs); +    (new AMessage(kWhatReapDeadClients, this))->post(kReaperIntervalUs);  }  void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) { @@ -1035,7 +1037,7 @@ void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) {      // expire, make sure the timeout is greater than 5 secs to begin with.      CHECK_GT(kPlaybackSessionTimeoutUs, 5000000ll); -    sp<AMessage> msg = new AMessage(kWhatKeepAlive, id()); +    sp<AMessage> msg = new AMessage(kWhatKeepAlive, this);      msg->setInt32("sessionID", sessionID);      msg->post(kPlaybackSessionTimeoutUs - 5000000ll);  } @@ -1239,13 +1241,13 @@ status_t WifiDisplaySource::onSetupRequest(      int32_t playbackSessionID = makeUniquePlaybackSessionID(); -    sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, this);      notify->setInt32("playbackSessionID", playbackSessionID);      notify->setInt32("sessionID", sessionID);      sp<PlaybackSession> playbackSession =          new PlaybackSession( -                mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str()); +                mOpPackageName, mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str());      looper()->registerHandler(playbackSession); @@ -1470,7 +1472,7 @@ status_t WifiDisplaySource::onTeardownRequest(      mNetSession->sendRequest(sessionID, response.c_str());      if (mState == AWAITING_CLIENT_TEARDOWN) { -        CHECK_NE(mStopReplyID, 0); +        CHECK(mStopReplyID != NULL);          finishStop();      } else {          mClient->onDisplayError(IRemoteDisplayClient::kDisplayErrorUnknown); @@ -1707,7 +1709,7 @@ status_t WifiDisplaySource::makeHDCP() {          return ERROR_UNSUPPORTED;      } -    sp<AMessage> notify = new AMessage(kWhatHDCPNotify, id()); +    sp<AMessage> notify = new AMessage(kWhatHDCPNotify, this);      mHDCPObserver = new HDCPObserver(notify);      status_t err = mHDCP->setObserver(mHDCPObserver); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 750265f..c25a675 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -25,10 +25,13 @@  #include <netinet/in.h> +#include <utils/String16.h> +  namespace android { +struct AReplyToken;  struct IHDCP; -struct IRemoteDisplayClient; +class IRemoteDisplayClient;  struct ParsedMessage;  // Represents the RTSP server acting as a wifi display source. @@ -37,6 +40,7 @@ struct WifiDisplaySource : public AHandler {      static const unsigned kWifiDisplayDefaultPort = 7236;      WifiDisplaySource( +            const String16 &opPackageName,              const sp<ANetworkSession> &netSession,              const sp<IRemoteDisplayClient> &client,              const char *path = NULL); @@ -113,6 +117,8 @@ private:      static const AString sUserAgent; +    String16 mOpPackageName; +      State mState;      VideoFormats mSupportedSourceVideoFormats;      sp<ANetworkSession> mNetSession; @@ -121,7 +127,7 @@ private:      struct in_addr mInterfaceAddr;      int32_t mSessionID; -    uint32_t mStopReplyID; +    sp<AReplyToken> mStopReplyID;      AString mWfdClientRtpPorts;      int32_t mChosenRTPPort;  // extracted from "wfd_client_rtp_ports" diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk index bb86dfc..dc67288 100644 --- a/media/libstagefright/yuv/Android.mk +++ b/media/libstagefright/yuv/Android.mk @@ -12,7 +12,7 @@ LOCAL_SHARED_LIBRARIES :=       \  LOCAL_MODULE:= libstagefright_yuv -LOCAL_CFLAGS += -Werror - +LOCAL_CFLAGS += -Werror -Wall +LOCAL_CLANG := true  include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp index bb3e2fd..c098135 100644 --- a/media/libstagefright/yuv/YUVImage.cpp +++ b/media/libstagefright/yuv/YUVImage.cpp @@ -374,13 +374,13 @@ uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {  void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,          uint8_t *r, uint8_t *g, uint8_t *b) const { -    *r = yValue + (1.370705 * (vValue-128)); -    *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128)); -    *b = yValue + (1.732446 * (uValue-128)); +    int rTmp = yValue + (1.370705 * (vValue-128)); +    int gTmp = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128)); +    int bTmp = yValue + (1.732446 * (uValue-128)); -    *r = clamp(*r, 0, 255); -    *g = clamp(*g, 0, 255); -    *b = clamp(*b, 0, 255); +    *r = clamp(rTmp, 0, 255); +    *g = clamp(gTmp, 0, 255); +    *b = clamp(bTmp, 0, 255);  }  bool YUVImage::writeToPPM(const char *filename) const { diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index 3a280f0..b6de0d9 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -11,14 +11,16 @@ endif  include $(CLEAR_VARS)  LOCAL_SRC_FILES:= \ -	main_mediaserver.cpp  +	main_mediaserver.cpp  LOCAL_SHARED_LIBRARIES := \  	libaudioflinger \  	libaudiopolicyservice \  	libcamera_metadata\  	libcameraservice \ +	libicuuc \  	libmedialogservice \ +	libresourcemanagerservice \  	libcutils \  	libnbaio \  	libmedia \ @@ -26,19 +28,27 @@ LOCAL_SHARED_LIBRARIES := \  	libutils \  	liblog \  	libbinder \ -	libsoundtriggerservice +	libsoundtriggerservice \ +	libradioservice  LOCAL_STATIC_LIBRARIES := \ -	libregistermsext +        libicuandroid_utils \ +        libregistermsext  LOCAL_C_INCLUDES := \      frameworks/av/media/libmediaplayerservice \      frameworks/av/services/medialog \      frameworks/av/services/audioflinger \      frameworks/av/services/audiopolicy \ +    frameworks/av/services/audiopolicy/common/managerdefinitions/include \ +    frameworks/av/services/audiopolicy/common/include \ +    frameworks/av/services/audiopolicy/engine/interface \      frameworks/av/services/camera/libcameraservice \ +    frameworks/av/services/mediaresourcemanager \      $(call include-path-for, audio-utils) \ -    frameworks/av/services/soundtrigger +    frameworks/av/services/soundtrigger \ +    frameworks/av/services/radio \ +    external/sonic  LOCAL_MODULE:= mediaserver  LOCAL_32_BIT_ONLY := true diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index af1c9e6..4a485ed 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -31,10 +31,13 @@  // from LOCAL_C_INCLUDES  #include "AudioFlinger.h"  #include "CameraService.h" +#include "IcuUtils.h"  #include "MediaLogService.h"  #include "MediaPlayerService.h" -#include "AudioPolicyService.h" +#include "ResourceManagerService.h" +#include "service/AudioPolicyService.h"  #include "SoundTriggerHwService.h" +#include "RadioService.h"  using namespace android; @@ -122,14 +125,17 @@ int main(int argc __unused, char** argv)              prctl(PR_SET_PDEATHSIG, SIGKILL);   // if parent media.log dies before me, kill me also              setpgid(0, 0);                      // but if I die first, don't kill my parent          } +        InitializeIcuOrDie();          sp<ProcessState> proc(ProcessState::self());          sp<IServiceManager> sm = defaultServiceManager();          ALOGI("ServiceManager: %p", sm.get());          AudioFlinger::instantiate();          MediaPlayerService::instantiate(); +        ResourceManagerService::instantiate();          CameraService::instantiate();          AudioPolicyService::instantiate();          SoundTriggerHwService::instantiate(); +        RadioService::instantiate();          registerExtensions();          ProcessState::self()->startThreadPool();          IPCThreadState::self()->joinThreadPool(); diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp index ed00b72..cd0c462 100644 --- a/media/ndk/NdkMediaCodec.cpp +++ b/media/ndk/NdkMediaCodec.cpp @@ -116,7 +116,7 @@ void CodecHandler::onMessageReceived(const sp<AMessage> &msg) {          case kWhatStopActivityNotifications:          { -            uint32_t replyID; +            sp<AReplyToken> replyID;              msg->senderAwaitsResponse(&replyID);              mCodec->mGeneration++; @@ -136,7 +136,7 @@ void CodecHandler::onMessageReceived(const sp<AMessage> &msg) {  static void requestActivityNotification(AMediaCodec *codec) { -    (new AMessage(kWhatRequestActivityNotifications, codec->mHandler->id()))->post(); +    (new AMessage(kWhatRequestActivityNotifications, codec->mHandler))->post();  }  extern "C" { @@ -154,6 +154,10 @@ static AMediaCodec * createAMediaCodec(const char *name, bool name_is_type, bool      } else {          mData->mCodec = android::MediaCodec::CreateByComponentName(mData->mLooper, name);      } +    if (mData->mCodec == NULL) {  // failed to create codec +        AMediaCodec_delete(mData); +        return NULL; +    }      mData->mHandler = new CodecHandler(mData);      mData->mLooper->registerHandler(mData->mHandler);      mData->mGeneration = 1; @@ -180,17 +184,21 @@ AMediaCodec* AMediaCodec_createEncoderByType(const char *name) {  EXPORT  media_status_t AMediaCodec_delete(AMediaCodec *mData) { -    if (mData->mCodec != NULL) { -        mData->mCodec->release(); -        mData->mCodec.clear(); -    } +    if (mData != NULL) { +        if (mData->mCodec != NULL) { +            mData->mCodec->release(); +            mData->mCodec.clear(); +        } -    if (mData->mLooper != NULL) { -        mData->mLooper->unregisterHandler(mData->mHandler->id()); -        mData->mLooper->stop(); -        mData->mLooper.clear(); +        if (mData->mLooper != NULL) { +            if (mData->mHandler != NULL) { +                mData->mLooper->unregisterHandler(mData->mHandler->id()); +            } +            mData->mLooper->stop(); +            mData->mLooper.clear(); +        } +        delete mData;      } -    delete mData;      return AMEDIA_OK;  } @@ -219,7 +227,7 @@ media_status_t AMediaCodec_start(AMediaCodec *mData) {      if (ret != OK) {          return translate_error(ret);      } -    mData->mActivityNotification = new AMessage(kWhatActivityNotify, mData->mHandler->id()); +    mData->mActivityNotification = new AMessage(kWhatActivityNotify, mData->mHandler);      mData->mActivityNotification->setInt32("generation", mData->mGeneration);      requestActivityNotification(mData);      return AMEDIA_OK; @@ -229,7 +237,7 @@ EXPORT  media_status_t AMediaCodec_stop(AMediaCodec *mData) {      media_status_t ret = translate_error(mData->mCodec->stop()); -    sp<AMessage> msg = new AMessage(kWhatStopActivityNotifications, mData->mHandler->id()); +    sp<AMessage> msg = new AMessage(kWhatStopActivityNotifications, mData->mHandler);      sp<AMessage> response;      msg->postAndAwaitResponse(&response);      mData->mActivityNotification.clear(); @@ -352,7 +360,8 @@ media_status_t AMediaCodec_releaseOutputBufferAtTime(  }  //EXPORT -media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, void *userdata) { +media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, +        void *userdata) {      mData->mCallback = callback;      mData->mCallbackUserData = userdata;      return AMEDIA_OK; diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp index 7a1048c..83a5ba1 100644 --- a/media/ndk/NdkMediaDrm.cpp +++ b/media/ndk/NdkMediaDrm.cpp @@ -312,8 +312,10 @@ media_status_t AMediaDrm_getKeyRequest(AMediaDrm *mObj, const AMediaDrmScope *sc                  String8(optionalParameters[i].mValue));      }      String8 defaultUrl; +    DrmPlugin::KeyRequestType keyRequestType;      status_t status = mObj->mDrm->getKeyRequest(*iter, mdInit, String8(mimeType), -            mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl); +            mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl, +            &keyRequestType);      if (status != OK) {          return translateStatus(status);      } else { @@ -725,4 +727,3 @@ media_status_t AMediaDrm_verify(AMediaDrm *mObj, const AMediaDrmSessionId *sessi  }  } // extern "C" - diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp index db57d0b..0ecd64f 100644 --- a/media/ndk/NdkMediaExtractor.cpp +++ b/media/ndk/NdkMediaExtractor.cpp @@ -70,7 +70,8 @@ media_status_t AMediaExtractor_delete(AMediaExtractor *mData) {  }  EXPORT -media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, off64_t length) { +media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, +        off64_t length) {      ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);      return translate_error(mData->mImpl->setDataSource(fd, offset, length));  } diff --git a/media/utils/Android.mk b/media/utils/Android.mk new file mode 100644 index 0000000..dfadbc8 --- /dev/null +++ b/media/utils/Android.mk @@ -0,0 +1,39 @@ +# Copyright 2015 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +#      http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ +  BatteryNotifier.cpp \ + +LOCAL_SHARED_LIBRARIES := \ +  libbinder \ +  libcutils \ +  liblog \ +  libutils \ + +LOCAL_C_INCLUDES := $(LOCAL_PATH)/include + +LOCAL_CFLAGS += \ +  -Wall \ +  -Wextra \ +  -Werror \ + +LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include + +LOCAL_MODULE := libmediautils + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/utils/BatteryNotifier.cpp b/media/utils/BatteryNotifier.cpp new file mode 100644 index 0000000..7f9cd7a --- /dev/null +++ b/media/utils/BatteryNotifier.cpp @@ -0,0 +1,213 @@ +/* + * Copyright 2015, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *     http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "include/mediautils/BatteryNotifier.h" + +#include <binder/IServiceManager.h> +#include <utils/Log.h> +#include <private/android_filesystem_config.h> + +namespace android { + +void BatteryNotifier::DeathNotifier::binderDied(const wp<IBinder>& /*who*/) { +    BatteryNotifier::getInstance().onBatteryStatServiceDied(); +} + +BatteryNotifier::BatteryNotifier() : mVideoRefCount(0), mAudioRefCount(0) {} + +BatteryNotifier::~BatteryNotifier() { +    Mutex::Autolock _l(mLock); +    if (mDeathNotifier != nullptr) { +        IInterface::asBinder(mBatteryStatService)->unlinkToDeath(mDeathNotifier); +    } +} + +void BatteryNotifier::noteStartVideo() { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    if (mVideoRefCount == 0 && batteryService != nullptr) { +        batteryService->noteStartVideo(AID_MEDIA); +    } +    mVideoRefCount++; +} + +void BatteryNotifier::noteStopVideo() { +    Mutex::Autolock _l(mLock); +    if (mVideoRefCount == 0) { +        ALOGW("%s: video refcount is broken.", __FUNCTION__); +        return; +    } + +    sp<IBatteryStats> batteryService = getBatteryService_l(); + +    mVideoRefCount--; +    if (mVideoRefCount == 0 && batteryService != nullptr) { +        batteryService->noteStopVideo(AID_MEDIA); +    } +} + +void BatteryNotifier::noteResetVideo() { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    mVideoRefCount = 0; +    if (batteryService != nullptr) { +        batteryService->noteResetAudio(); +    } +} + +void BatteryNotifier::noteStartAudio() { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    if (mAudioRefCount == 0 && batteryService != nullptr) { +        batteryService->noteStartAudio(AID_MEDIA); +    } +    mAudioRefCount++; +} + +void BatteryNotifier::noteStopAudio() { +    Mutex::Autolock _l(mLock); +    if (mAudioRefCount == 0) { +        ALOGW("%s: audio refcount is broken.", __FUNCTION__); +        return; +    } + +    sp<IBatteryStats> batteryService = getBatteryService_l(); + +    mAudioRefCount--; +    if (mAudioRefCount == 0 && batteryService != nullptr) { +        batteryService->noteStopAudio(AID_MEDIA); +    } +} + +void BatteryNotifier::noteResetAudio() { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    mAudioRefCount = 0; +    if (batteryService != nullptr) { +        batteryService->noteResetAudio(); +    } +} + +void BatteryNotifier::noteFlashlightOn(const String8& id, int uid) { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); + +    std::pair<String8, int> k = std::make_pair(id, uid); +    if (!mFlashlightState[k]) { +        mFlashlightState[k] = true; +        if (batteryService != nullptr) { +            batteryService->noteFlashlightOn(uid); +        } +    } +} + +void BatteryNotifier::noteFlashlightOff(const String8& id, int uid) { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); + +    std::pair<String8, int> k = std::make_pair(id, uid); +    if (mFlashlightState[k]) { +        mFlashlightState[k] = false; +        if (batteryService != nullptr) { +            batteryService->noteFlashlightOff(uid); +        } +    } +} + +void BatteryNotifier::noteResetFlashlight() { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    mFlashlightState.clear(); +    if (batteryService != nullptr) { +        batteryService->noteResetFlashlight(); +    } +} + +void BatteryNotifier::noteStartCamera(const String8& id, int uid) { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    std::pair<String8, int> k = std::make_pair(id, uid); +    if (!mCameraState[k]) { +        mCameraState[k] = true; +        if (batteryService != nullptr) { +            batteryService->noteStartCamera(uid); +        } +    } +} + +void BatteryNotifier::noteStopCamera(const String8& id, int uid) { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    std::pair<String8, int> k = std::make_pair(id, uid); +    if (mCameraState[k]) { +        mCameraState[k] = false; +        if (batteryService != nullptr) { +            batteryService->noteStopCamera(uid); +        } +    } +} + +void BatteryNotifier::noteResetCamera() { +    Mutex::Autolock _l(mLock); +    sp<IBatteryStats> batteryService = getBatteryService_l(); +    mCameraState.clear(); +    if (batteryService != nullptr) { +        batteryService->noteResetCamera(); +    } +} + +void BatteryNotifier::onBatteryStatServiceDied() { +    Mutex::Autolock _l(mLock); +    mBatteryStatService.clear(); +    mDeathNotifier.clear(); +    // Do not reset mVideoRefCount and mAudioRefCount here. The ref +    // counting is independent of the battery service availability. +    // We need this if battery service becomes available after media +    // started. + +} + +sp<IBatteryStats> BatteryNotifier::getBatteryService_l() { +    if (mBatteryStatService != nullptr) { +        return mBatteryStatService; +    } +    // Get battery service from service manager +    const sp<IServiceManager> sm(defaultServiceManager()); +    if (sm != nullptr) { +        const String16 name("batterystats"); +        mBatteryStatService = interface_cast<IBatteryStats>(sm->checkService(name)); +        if (mBatteryStatService == nullptr) { +            ALOGE("batterystats service unavailable!"); +            return nullptr; +        } + +        mDeathNotifier = new DeathNotifier(); +        IInterface::asBinder(mBatteryStatService)->linkToDeath(mDeathNotifier); + +        // Notify start now if media already started +        if (mVideoRefCount > 0) { +            mBatteryStatService->noteStartVideo(AID_MEDIA); +        } +        if (mAudioRefCount > 0) { +            mBatteryStatService->noteStartAudio(AID_MEDIA); +        } +    } +    return mBatteryStatService; +} + +ANDROID_SINGLETON_STATIC_INSTANCE(BatteryNotifier); + +}  // namespace android diff --git a/media/utils/README b/media/utils/README new file mode 100644 index 0000000..65ab0b8 --- /dev/null +++ b/media/utils/README @@ -0,0 +1,4 @@ +This is a common shared library for media utility classes. + +Consider adding your utility class/function here if it will +be used across several of the media libraries. diff --git a/media/utils/include/mediautils/BatteryNotifier.h b/media/utils/include/mediautils/BatteryNotifier.h new file mode 100644 index 0000000..4904804 --- /dev/null +++ b/media/utils/include/mediautils/BatteryNotifier.h @@ -0,0 +1,73 @@ +/* + * Copyright 2015, The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + *     http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_BATTERY_NOTIFIER_H +#define MEDIA_BATTERY_NOTIFIER_H + +#include <binder/IBatteryStats.h> +#include <utils/Singleton.h> +#include <utils/String8.h> + +#include <map> +#include <utility> + +namespace android { + +/** + * Class used for logging battery life events in mediaserver. + */ +class BatteryNotifier : public Singleton<BatteryNotifier> { + +    friend class Singleton<BatteryNotifier>; +    BatteryNotifier(); + +public: +    ~BatteryNotifier(); + +    void noteStartVideo(); +    void noteStopVideo(); +    void noteResetVideo(); +    void noteStartAudio(); +    void noteStopAudio(); +    void noteResetAudio(); +    void noteFlashlightOn(const String8& id, int uid); +    void noteFlashlightOff(const String8& id, int uid); +    void noteResetFlashlight(); +    void noteStartCamera(const String8& id, int uid); +    void noteStopCamera(const String8& id, int uid); +    void noteResetCamera(); + +private: +    void onBatteryStatServiceDied(); + +    class DeathNotifier : public IBinder::DeathRecipient { +        virtual void binderDied(const wp<IBinder>& /*who*/); +    }; + +    Mutex mLock; +    int mVideoRefCount; +    int mAudioRefCount; +    std::map<std::pair<String8, int>, bool> mFlashlightState; +    std::map<std::pair<String8, int>, bool> mCameraState; +    sp<IBatteryStats> mBatteryStatService; +    sp<DeathNotifier> mDeathNotifier; + +    sp<IBatteryStats> getBatteryService_l(); +}; + +}  // namespace android + +#endif // MEDIA_BATTERY_NOTIFIER_H  | 
