summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--CleanSpec.mk5
-rw-r--r--core/Makefile40
-rw-r--r--core/android_manifest.mk22
-rw-r--r--core/binary.mk19
-rw-r--r--core/cleanbuild.mk1
-rw-r--r--core/clear_vars.mk3
-rw-r--r--core/combo/HOST_windows-x86.mk2
-rw-r--r--core/java.mk3
-rw-r--r--core/main.mk3
-rw-r--r--core/package_internal.mk4
-rw-r--r--core/prebuilt_internal.mk35
-rw-r--r--core/version_defaults.mk4
-rw-r--r--envsetup.sh90
-rw-r--r--target/board/generic_arm64/BoardConfig.mk5
-rw-r--r--target/product/core.mk5
-rw-r--r--target/product/core_tiny.mk1
-rw-r--r--target/product/security/verity.pk8bin0 -> 1219 bytes
-rw-r--r--target/product/security/verity.x509.pem24
-rw-r--r--target/product/security/verity_keybin524 -> 524 bytes
-rw-r--r--target/product/security/verity_private_dev_key28
-rw-r--r--target/product/verity.mk6
-rw-r--r--tools/droiddoc/templates-sdk/assets/css/default.css9
-rw-r--r--tools/droiddoc/templates-sdk/assets/js/docs.js44
-rw-r--r--tools/droiddoc/templates-sdk/components/masthead.cs50
-rw-r--r--tools/droiddoc/templates-sdk/customizations.cs4
-rw-r--r--tools/droiddoc/templates-sdk/docpage.cs8
-rw-r--r--tools/droiddoc/templates-sdk/head_tag.cs15
-rwxr-xr-xtools/releasetools/add_img_to_target_files.py98
-rw-r--r--tools/releasetools/blockimgdiff.py243
-rwxr-xr-xtools/releasetools/build_image.py7
-rw-r--r--tools/releasetools/common.py11
-rw-r--r--tools/releasetools/edify_generator.py23
-rwxr-xr-xtools/releasetools/ota_from_target_files62
-rw-r--r--tools/releasetools/rangelib.py86
34 files changed, 802 insertions, 158 deletions
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 3e5d199..56a1cad 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -302,6 +302,11 @@ $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+# API 22!
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
# Move to libc++ as the default STL.
$(call add-clean-step, rm -rf $(OUT_DIR))
diff --git a/core/Makefile b/core/Makefile
index 43be804..0b24dd5 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -491,6 +491,11 @@ ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
tmp_dir_for_image := $(call intermediates-dir-for,EXECUTABLES,boot_img)/bootimg
INTERNAL_BOOTIMAGE_ARGS += --tmpdir $(tmp_dir_for_image)
INTERNAL_BOOTIMAGE_ARGS += --genext2fs $(MKEXT2IMG)
+
+ifeq ($(TARGET_BOOTIMAGE_USE_EXTLINUX),true)
+INTERNAL_BOOTIMAGE_ARGS += --extlinuxconf $(TARGET_BOOTIMAGE_EXTLINUX_CONFIG)
+endif
+
$(INSTALLED_BOOTIMAGE_TARGET): $(MKEXT2IMG) $(INTERNAL_BOOTIMAGE_FILES)
$(call pretty,"Target boot image: $@")
$(hide) $(MKEXT2BOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) --output $@
@@ -505,14 +510,14 @@ else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)) # TARGE
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
$(call pretty,"Target boot image: $@")
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $@
+ $(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
@echo "make $@: ignoring dependencies"
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $(INSTALLED_BOOTIMAGE_TARGET)
+ $(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
else # PRODUCT_SUPPORTS_VERITY != true
@@ -674,6 +679,13 @@ endif
endif
endif
+# These options tell the recovery updater/installer how to mount the partitions writebale.
+# <fstype>=<fstype_opts>[|<fstype_opts>]...
+# fstype_opts := <opt>[,<opt>]...
+# opt := <name>[=<value>]
+# The following worked on Nexus devices with Kernel 3.1, 3.4, 3.10
+DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS := ext4=max_batch_time=0,commit=1,data=ordered,barrier=1,errors=panic,nodelalloc
+
ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
endif
@@ -702,13 +714,16 @@ INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
define generate-userimage-prop-dictionary
$(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_SYSTEMIMAGE_JOURNAL_SIZE),$(hide) echo "system_journal_size=$(BOARD_SYSTEMIMAGE_JOURNAL_SIZE)" >> $(1))
$(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),$(hide) echo "cache_size=$(BOARD_CACHEIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_VENDORIMAGE_JOURNAL_SIZE),$(hide) echo "vendor_journal_size=$(BOARD_VENDORIMAGE_JOURNAL_SIZE)" >> $(1))
$(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
$(if $(mkyaffs2_extra_flags),$(hide) echo "mkyaffs2_extra_flags=$(mkyaffs2_extra_flags)" >> $(1))
$(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(1)
@@ -857,7 +872,7 @@ $(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
$(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
$(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
- $(BOOT_SIGNER) /recovery $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $@
+ $(BOOT_SIGNER) /recovery $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
endif
$(hide) $(call assert-max-image-size,$@,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))
@echo ----- Made recovery image: $@ --------
@@ -1230,6 +1245,12 @@ vendorimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
endif # BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
# -----------------------------------------------------------------
+# bring in the installer image generation defines if necessary
+ifeq ($(TARGET_USE_DISKINSTALLER),true)
+include bootable/diskinstaller/config.mk
+endif
+
+# -----------------------------------------------------------------
# host tools needed to build dist and OTA packages
DISTTOOLS := $(HOST_OUT_EXECUTABLES)/minigzip \
@@ -1396,6 +1417,12 @@ endif
ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
$(hide) echo "recovery_size=$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
endif
+ifdef TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS
+ @# TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS can be empty to indicate that nothing but defaults should be used.
+ $(hide) echo "recovery_mount_options=$(TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
+else
+ $(hide) echo "recovery_mount_options=$(DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
+endif
$(hide) echo "tool_extensions=$(tool_extensions)" >> $(zip_root)/META/misc_info.txt
$(hide) echo "default_system_dev_certificate=$(DEFAULT_SYSTEM_DEV_CERTIFICATE)" >> $(zip_root)/META/misc_info.txt
ifdef PRODUCT_EXTRA_RECOVERY_KEYS
@@ -1405,6 +1432,7 @@ endif
$(hide) echo "use_set_metadata=1" >> $(zip_root)/META/misc_info.txt
$(hide) echo "multistage_support=1" >> $(zip_root)/META/misc_info.txt
$(hide) echo "update_rename_support=1" >> $(zip_root)/META/misc_info.txt
+ $(hide) echo "blockimgdiff_versions=1,2" >> $(zip_root)/META/misc_info.txt
ifneq ($(OEM_THUMBPRINT_PROPERTIES),)
# OTA scripts are only interested in fingerprint related properties
$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
@@ -1497,10 +1525,14 @@ endif
name := $(name)-symbols-$(FILE_NAME_TAG)
SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
+# For apps_only build we'll establish the dependency later in build/core/main.mk.
+ifndef TARGET_BUILD_APPS
$(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_BOOTIMAGE_TARGET)
+endif
+$(SYMBOLS_ZIP):
@echo "Package symbols: $@"
$(hide) rm -rf $@
- $(hide) mkdir -p $(dir $@)
+ $(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED)
$(hide) zip -qr $@ $(TARGET_OUT_UNSTRIPPED)
# -----------------------------------------------------------------
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 21b95c2..c641b75 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -11,13 +11,27 @@ else
full_android_manifest := $(LOCAL_PATH)/$(LOCAL_MANIFEST_FILE)
endif
+my_full_libs_manifest_files := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
+my_full_libs_manifest_deps := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
+
+# Set up dependency on aar libraries
+ifdef LOCAL_STATIC_JAVA_AAR_LIBRARIES
+my_full_libs_manifest_deps += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/classes.jar)
+my_full_libs_manifest_files += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/AndroidManifest.xml)
+
+LOCAL_RESOURCE_DIR += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
+ $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/res)
+endif
+
# Set up rules to merge library manifest files
-ifdef LOCAL_FULL_LIBS_MANIFEST_FILES
+ifdef my_full_libs_manifest_files
main_android_manifest := $(full_android_manifest)
full_android_manifest := $(intermediates.COMMON)/AndroidManifest.xml
-$(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(LOCAL_FULL_LIBS_MANIFEST_FILES)
-$(full_android_manifest) : $(main_android_manifest) $(LOCAL_FULL_LIBS_MANIFEST_FILES)
- @echo "Merge android manifest files: $@ <-- $^"
+$(full_android_manifest): PRIVATE_LIBS_MANIFESTS := $(my_full_libs_manifest_files)
+$(full_android_manifest) : $(main_android_manifest) $(my_full_libs_manifest_deps)
+ @echo "Merge android manifest files: $@ <-- $< $(PRIVATE_LIBS_MANIFESTS)"
@mkdir -p $(dir $@)
$(hide) $(ANDROID_MANIFEST_MERGER) --main $< --libs $(PRIVATE_LIBS_MANIFESTS) \
--out $@
diff --git a/core/binary.mk b/core/binary.mk
index 08e290a..75ec3cf 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -218,7 +218,9 @@ endif
####################################################
## Add FDO flags if FDO is turned on and supported
-####################################################
+## Please note that we will do option filtering during FDO build.
+## i.e. Os->O2, remove -fno-early-inline and -finline-limit.
+##################################################################
ifeq ($(strip $(LOCAL_FDO_SUPPORT)), true)
ifeq ($(strip $(LOCAL_IS_HOST_MODULE)),)
my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_CFLAGS)
@@ -997,6 +999,21 @@ my_asflags := $(call $(LOCAL_2ND_ARCH_VAR_PREFIX)convert-to-$(my_host)clang-flag
my_ldflags := $(call $(LOCAL_2ND_ARCH_VAR_PREFIX)convert-to-$(my_host)clang-flags,$(my_ldflags))
endif
+ifeq ($(LOCAL_FDO_SUPPORT), true)
+ build_with_fdo := false
+ ifeq ($(BUILD_FDO_INSTRUMENT), true)
+ build_with_fdo := true
+ endif
+ ifeq ($(BUILD_FDO_OPTIMIZE), true)
+ build_with_fdo := true
+ endif
+ ifeq ($(build_with_fdo), true)
+ my_cflags := $(patsubst -Os,-O2,$(my_cflags))
+ fdo_incompatible_flags=-fno-early-inlining -finline-limit=%
+ my_cflags := $(filter-out $(fdo_incompatible_flags),$(my_cflags))
+ endif
+endif
+
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_YACCFLAGS := $(LOCAL_YACCFLAGS)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASFLAGS := $(my_asflags)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CONLYFLAGS := $(LOCAL_CONLYFLAGS)
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 1bada38..c820ad5 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -220,6 +220,7 @@ installclean_files := \
$(PRODUCT_OUT)/obj/JAVA_LIBRARIES \
$(PRODUCT_OUT)/obj/FAKE \
$(PRODUCT_OUT)/obj/EXECUTABLES/adbd_intermediates \
+ $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libfs_mgr_intermediates \
$(PRODUCT_OUT)/obj/EXECUTABLES/init_intermediates \
$(PRODUCT_OUT)/obj/ETC/mac_permissions.xml_intermediates \
$(PRODUCT_OUT)/obj/ETC/sepolicy_intermediates \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 8ee7734..f3f4ce9 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -30,6 +30,7 @@ LOCAL_MODULE_TAGS:=
LOCAL_SRC_FILES:=
LOCAL_PREBUILT_OBJ_FILES:=
LOCAL_STATIC_JAVA_LIBRARIES:=
+LOCAL_STATIC_JAVA_AAR_LIBRARIES:=
LOCAL_STATIC_LIBRARIES:=
# Group static libraries with "-Wl,--start-group" and "-Wl,--end-group" when linking.
LOCAL_GROUP_STATIC_LIBRARIES:=
@@ -166,6 +167,8 @@ LOCAL_MODULE_HOST_ARCH:=
LOCAL_NO_FPIE :=
LOCAL_CXX_STL := default
LOCAL_NATIVE_COVERAGE :=
+LOCAL_DPI_VARIANTS:=
+LOCAL_DPI_FILE_STEM:=
# arch specific variables
LOCAL_SRC_FILES_$(TARGET_ARCH):=
diff --git a/core/combo/HOST_windows-x86.mk b/core/combo/HOST_windows-x86.mk
index d4acfe9..b71ac16 100644
--- a/core/combo/HOST_windows-x86.mk
+++ b/core/combo/HOST_windows-x86.mk
@@ -27,7 +27,7 @@ ifneq ($(findstring Linux,$(UNAME)),)
ifdef USE_MINGW
HOST_ACP_UNAVAILABLE := true
TOOLS_EXE_SUFFIX :=
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -DUSE_MINGW
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -DUSE_MINGW -DWIN32_LEAN_AND_MEAN
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -Wno-unused-parameter
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += --sysroot=prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -m32
diff --git a/core/java.mk b/core/java.mk
index 98d5138..bb634ac 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -64,6 +64,9 @@ else
endif
endif
+# LOCAL_STATIC_JAVA_AAR_LIBRARIES are special LOCAL_STATIC_JAVA_LIBRARIES
+LOCAL_STATIC_JAVA_LIBRARIES := $(strip $(LOCAL_STATIC_JAVA_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES))
+
LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
LOCAL_BUILT_MODULE_STEM := $(strip $(LOCAL_BUILT_MODULE_STEM))
diff --git a/core/main.mk b/core/main.mk
index 7f4b217..53df32c 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -927,6 +927,9 @@ ifneq ($(TARGET_BUILD_APPS),)
$(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
$(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
+ $(SYMBOLS_ZIP) : $(apps_only_installed_files)
+ $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
+
.PHONY: apps_only
apps_only: $(unbundled_build_modules)
diff --git a/core/package_internal.mk b/core/package_internal.mk
index bb458d4..e2c3cf4 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -192,6 +192,8 @@ endif # LOCAL_EMMA_INSTRUMENT
rs_compatibility_jni_libs :=
+include $(BUILD_SYSTEM)/android_manifest.mk
+
#################################
include $(BUILD_SYSTEM)/java.mk
#################################
@@ -201,8 +203,6 @@ ifeq ($(LOCAL_SDK_RES_VERSION),)
LOCAL_SDK_RES_VERSION:=$(LOCAL_SDK_VERSION)
endif
-include $(BUILD_SYSTEM)/android_manifest.mk
-
$(LOCAL_INTERMEDIATE_TARGETS): \
PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 2e0d2ef..c19b278 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -117,6 +117,19 @@ endif # LOCAL_STRIP_MODULE not true
ifeq ($(LOCAL_MODULE_CLASS),APPS)
PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
+# Select dpi-specific source
+ifdef LOCAL_DPI_VARIANTS
+my_dpi := $(filter $(LOCAL_DPI_VARIANTS),$(PRODUCT_AAPT_PREF_CONFIG))
+ifdef my_dpi
+ifdef LOCAL_DPI_FILE_STEM
+my_prebuilt_dpi_file_stem := $(LOCAL_DPI_FILE_STEM)
+else
+my_prebuilt_dpi_file_stem := $(LOCAL_MODULE)_%.apk
+endif
+my_prebuilt_src_file := $(dir $(my_prebuilt_src_file))$(subst %,$(my_dpi),$(my_prebuilt_dpi_file_stem))
+endif # my_dpi
+endif # LOCAL_DPI_VARIANTS
+
rs_compatibility_jni_libs :=
include $(BUILD_SYSTEM)/install_jni_libs.mk
@@ -250,10 +263,26 @@ ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
# while the deps should be in the common dir, so we make a copy in the common dir.
# For nonstatic library, $(common_javalib_jar) is the dependency file,
# while $(common_classes_jar) is used to link.
-common_classes_jar := $(call intermediates-dir-for,JAVA_LIBRARIES,$(LOCAL_MODULE),,COMMON)/classes.jar
-common_javalib_jar := $(dir $(common_classes_jar))javalib.jar
+common_classes_jar := $(intermediates.COMMON)/classes.jar
+common_javalib_jar := $(intermediates.COMMON)/javalib.jar
+
+$(common_classes_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
+
+ifneq ($(filter %.aar, $(my_prebuilt_src_file)),)
+# This is .aar file, archive of classes.jar and Android resources.
+my_src_jar := $(intermediates.COMMON)/aar/classes.jar
-$(common_classes_jar) : $(my_prebuilt_src_file) | $(ACP)
+$(my_src_jar) : $(my_prebuilt_src_file)
+ $(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
+ $(hide) unzip -qo -d $(dir $@) $<
+ # Make sure the extracted classes.jar has a new timestamp.
+ $(hide) touch $@
+
+else
+# This is jar file.
+my_src_jar := $(my_prebuilt_src_file)
+endif
+$(common_classes_jar) : $(my_src_jar) | $(ACP)
$(transform-prebuilt-to-target)
$(common_javalib_jar) : $(common_classes_jar) | $(ACP)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index d2bef14..1acc320 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -41,7 +41,7 @@ ifeq "" "$(PLATFORM_VERSION)"
# which is the version that we reveal to the end user.
# Update this value when the platform version changes (rather
# than overriding it somewhere else). Can be an arbitrary string.
- PLATFORM_VERSION := 5.0.50.50.50.50
+ PLATFORM_VERSION := LollipopMR1
endif
ifeq "" "$(PLATFORM_SDK_VERSION)"
@@ -53,7 +53,7 @@ ifeq "" "$(PLATFORM_SDK_VERSION)"
# intermediate builds). During development, this number remains at the
# SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
# the code-name of the new development work.
- PLATFORM_SDK_VERSION := 21
+ PLATFORM_SDK_VERSION := 22
endif
ifeq "" "$(PLATFORM_VERSION_CODENAME)"
diff --git a/envsetup.sh b/envsetup.sh
index 0e853fe..972578f 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -570,7 +570,8 @@ function tapas()
{
local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
- local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+ local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+ local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
if [ $(echo $arch | wc -w) -gt 1 ]; then
echo "tapas: Error: Multiple build archs supplied: $arch"
@@ -580,6 +581,10 @@ function tapas()
echo "tapas: Error: Multiple build variants supplied: $variant"
return
fi
+ if [ $(echo $density | wc -w) -gt 1 ]; then
+ echo "tapas: Error: Multiple densities supplied: $density"
+ return
+ fi
local product=full
case $arch in
@@ -596,9 +601,13 @@ function tapas()
if [ -z "$apps" ]; then
apps=all
fi
+ if [ -z "$density" ]; then
+ density=alldpi
+ fi
export TARGET_PRODUCT=$product
export TARGET_BUILD_VARIANT=$variant
+ export TARGET_BUILD_DENSITY=$density
export TARGET_BUILD_TYPE=release
export TARGET_BUILD_APPS=$apps
@@ -884,6 +893,85 @@ function pid()
fi
}
+# coredump_setup - enable core dumps globally for any process
+# that has the core-file-size limit set correctly
+#
+# NOTE: You must call also coredump_enable for a specific process
+# if its core-file-size limit is not set already.
+# NOTE: Core dumps are written to ramdisk; they will not survive a reboot!
+
+function coredump_setup()
+{
+ echo "Getting root...";
+ adb root;
+ adb wait-for-device;
+
+ echo "Remounting root parition read-write...";
+ adb shell mount -w -o remount -t rootfs rootfs;
+ sleep 1;
+ adb wait-for-device;
+ adb shell mkdir -p /cores;
+ adb shell mount -t tmpfs tmpfs /cores;
+ adb shell chmod 0777 /cores;
+
+ echo "Granting SELinux permission to dump in /cores...";
+ adb shell restorecon -R /cores;
+
+ echo "Set core pattern.";
+ adb shell 'echo /cores/core.%p > /proc/sys/kernel/core_pattern';
+
+ echo "Done."
+}
+
+# coredump_enable - enable core dumps for the specified process
+# $1 = PID of process (e.g., $(pid mediaserver))
+#
+# NOTE: coredump_setup must have been called as well for a core
+# dump to actually be generated.
+
+function coredump_enable()
+{
+ local PID=$1;
+ if [ -z "$PID" ]; then
+ printf "Expecting a PID!\n";
+ return;
+ fi;
+ echo "Setting core limit for $PID to infinite...";
+ adb shell prlimit $PID 4 -1 -1
+}
+
+# core - send SIGV and pull the core for process
+# $1 = PID of process (e.g., $(pid mediaserver))
+#
+# NOTE: coredump_setup must be called once per boot for core dumps to be
+# enabled globally.
+
+function core()
+{
+ local PID=$1;
+
+ if [ -z "$PID" ]; then
+ printf "Expecting a PID!\n";
+ return;
+ fi;
+
+ local CORENAME=core.$PID;
+ local COREPATH=/cores/$CORENAME;
+ local SIG=SEGV;
+
+ coredump_enable $1;
+
+ local done=0;
+ while [ $(adb shell "[ -d /proc/$PID ] && echo -n yes") ]; do
+ printf "\tSending SIG%s to %d...\n" $SIG $PID;
+ adb shell kill -$SIG $PID;
+ sleep 1;
+ done;
+
+ adb shell "while [ ! -f $COREPATH ] ; do echo waiting for $COREPATH to be generated; sleep 1; done"
+ echo "Done: core is under $COREPATH on device.";
+}
+
# systemstack - dump the current stack trace of all threads in the system process
# to the usual ANR traces file
function systemstack()
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index c57447d..818f857 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -25,13 +25,13 @@ TARGET_2ND_ARCH := arm
TARGET_2ND_CPU_ABI := armeabi-v7a
TARGET_2ND_CPU_ABI2 := armeabi
-ifdef TARGET_BUILD_APPS
+ifneq ($(TARGET_BUILD_APPS)$(filter cts,$(MAKECMDGOALS)),)
# DO NOT USE
# DO NOT USE
#
# This architecture / CPU variant must NOT be used for any 64 bit
# platform builds. It is the lowest common denominator required
-# to build an unbundled application for all supported 32 and 64 bit
+# to build an unbundled application or cts for all supported 32 and 64 bit
# platforms.
#
# If you're building a 64 bit platform (and not an application) the
@@ -82,4 +82,3 @@ BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
BOARD_FLASH_BLOCK_SIZE := 512
TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-
diff --git a/target/product/core.mk b/target/product/core.mk
index 876a536..519dbb8 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -15,9 +15,10 @@
#
# Base configuration for communication-oriented android devices
-# (phones, tablets, etc.). If you want a change to apply to ALL
+# (phones, tablets, etc.). If you want a change to apply to ALMOST ALL
# devices (including non-phones and non-tablets), modify
-# core_minimal.mk instead.
+# core_minimal.mk instead. If you care about wearables, you need to modify
+# core_tiny.mk in addition to core_minimal.mk.
PRODUCT_PACKAGES += \
BasicDreams \
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index d6dbe98..9637e34 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -43,6 +43,7 @@ PRODUCT_PACKAGES += \
DefaultContainerService \
SettingsProvider \
Shell \
+ bcc \
bu \
com.android.location.provider \
com.android.location.provider.xml \
diff --git a/target/product/security/verity.pk8 b/target/product/security/verity.pk8
new file mode 100644
index 0000000..bebf216
--- /dev/null
+++ b/target/product/security/verity.pk8
Binary files differ
diff --git a/target/product/security/verity.x509.pem b/target/product/security/verity.x509.pem
new file mode 100644
index 0000000..86399c3
--- /dev/null
+++ b/target/product/security/verity.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
index 8db965f..31982d9 100644
--- a/target/product/security/verity_key
+++ b/target/product/security/verity_key
Binary files differ
diff --git a/target/product/security/verity_private_dev_key b/target/product/security/verity_private_dev_key
deleted file mode 100644
index 92528e9..0000000
--- a/target/product/security/verity_private_dev_key
+++ /dev/null
@@ -1,28 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDQxdVrH2RB1eg5
-17/gBmLzW1Ds10RG6ctNZMhxppMOLnEZViKGv1VNRhxqK/JKTv2UujgZ94SJcDub
-G+DwAwaGZKQqDYUa0VU2cng8TYPcnYGPdJ7Usckp6tdg64vns7e+VVf0dOyEovR+
-JyeYUz05OhUMYP9xJIhpA2XnXe5Ekb9iTFSYo9uBpoXDD4IY7aOqUxSbv9wMtyIp
-dl+oTm0+kqRRi4KoxGHV0CzDseEUuWG/Kp/7aVF9Sg45NcC6KYvrGysUKA+Bt09O
-feDn/HRpT9SfRElJa5DRms33UBUtnom15F4yd4vvFgubB0nkPOUuwfZhTFfgeuY4
-H2bHkjKbAgMBAAECggEAMpFYqkPGQvQO9cO+ZALoAM4Dgfp6PTrv1WUt7+lLAUpa
-dqqYXk8F2Fu9EjJm03ziix237QI5Bhk7Nsy/5SK2d+L0qILx1JcTrsZ3PRQBdnRo
-J1k2B4qwkQii9oTXNF4hiWaekUWo7E+ULOJLAuhWkf/xjTgJZ1xT9iuuiSYFSnIa
-9ABNH0vCaKEkW/4ri6fdtXmO26C/ltJlnozl86x07PIFh4uBas7/40E8ykFP00CS
-zdhMh+2DGyCb1Q0eJ1IfGILNatkLNEd2BHgQ7qNBkN9yShZfhvIPblr5gSUlZplX
-diV20ZGLAfByKWgZZWKkwl9KzaisL/J/4dr2UlSVEQKBgQDxAYTsgoTkkP0TKzr3
-i3ljT8OuVOj6TwZVBJYe2MIJ3veivS3gWB53FpsKthbib7y8ifIakn15mQkNCK5R
-7H7F5lvZCNnB6shY5Dz7nLJxKLALcAg+d12l3gTbFQeFDs0iQQJF7P8hs/GPF7kY
-Layb7EF0uzYjyHJCKtFdaZaeZwKBgQDdwvCb7NJVeGTcE97etL+8acu9y4GlqKEF
-o0Vkw8TjNKj/KuDkbkAk9hXxU1ZCmDU3y6r8CVHYl0Sqh08plEhkYB/j3sFy81zY
-3xu/rLFysBwjeJHHlPjRTYkdKr9pABmm8NIEShvu9u8i+mpOhjbX72HxZL+i4Fou
-gz58wEdBrQKBgG8CfyKdn+7UJe3tbLTXRquK8xxauhGJ0uXYPfmpZ/8596C7OOVs
-UWQTQoj1hKb6RtolRCIfNbKL3hJl3D2aDG7Fg6r9m6fpqCzhvIE9FShwUF6EVRfI
-zZb4JA5xqkwMnEpZ3V0uI/p3Mx3xFG3ho+8SLLhC/1YOHysBI/y+BQWjAoGAYiqQ
-PkXYWhOAeleleeqDUdF3al3y1zVNimRbLJ7owjcmdEYz5YrUhEgXMIvWjIY6UKes
-2gL6IynbMK3TIjHM1fojQ8jw04TdXfdtnizBJGbHHgCab8IHXwe2oZ2xu7ZapKbI
-ITP5J5BSDabSdk49attB/Qy/NEeiRCK+/5RSNsUCgYAg6vX9VqMEkhPHeoFfdLGD
-EQPPN6QLrQ4Zif0GKxH96znNSv0rXdNp9t0kyapdgzMuCwIEuOkCSiKgmfjTWnYO
-qh5HMUuD2VbfWwI9jVujQMRmqiaFF7VxxA1bP5j1hJlI6cn1Fjlpi+NsNZN4nm3Q
-92SEwX2vDgjrU0NAtFFL1Q==
------END PRIVATE KEY-----
diff --git a/target/product/verity.mk b/target/product/verity.mk
index 4a1ca5e..0361b64 100644
--- a/target/product/verity.mk
+++ b/target/product/verity.mk
@@ -17,7 +17,11 @@
# Provides dependencies necessary for verified boot
PRODUCT_SUPPORTS_VERITY := true
-PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity_private_dev_key
+
+# The dev key is used to sign boot and recovery images, and the verity
+# metadata table. Actual product deliverables will be re-signed by hand.
+# We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
+PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity
PRODUCT_PACKAGES += \
verity_key
diff --git a/tools/droiddoc/templates-sdk/assets/css/default.css b/tools/droiddoc/templates-sdk/assets/css/default.css
index 96259a7..8cfb45d 100644
--- a/tools/droiddoc/templates-sdk/assets/css/default.css
+++ b/tools/droiddoc/templates-sdk/assets/css/default.css
@@ -3152,6 +3152,7 @@ div#deprecatedSticker {
-webkit-box-shadow:-5px 5px 10px #ccc;
}
+div#langMessage,
div#naMessage {
display:none;
width:555px;
@@ -3159,6 +3160,8 @@ div#naMessage {
margin:0 auto;
}
+
+div#langMessage>div,
div#naMessage div {
z-index:99;
width:450px;
@@ -3172,12 +3175,16 @@ div#naMessage div {
-webkit-box-shadow:-10px 10px 40px #888;
}
/* IE6 can't position fixed */
+* html div#langMessage>div,
* html div#naMessage div { position:absolute; }
div#naMessage strong {
font-size:1.1em;
}
+div#langMessage .lang {
+ display:none;
+}
/* --------------------------------------------------------------------------
Slideshow Controls & Next/Prev
@@ -7389,4 +7396,4 @@ a.home-new-cta-btn:hover,
.resource-card-6x2 > .card-bg.helpouts-card-bg:after {
display:none;
-} \ No newline at end of file
+}
diff --git a/tools/droiddoc/templates-sdk/assets/js/docs.js b/tools/droiddoc/templates-sdk/assets/js/docs.js
index a556f4f..a3c24f4 100644
--- a/tools/droiddoc/templates-sdk/assets/js/docs.js
+++ b/tools/droiddoc/templates-sdk/assets/js/docs.js
@@ -20,6 +20,17 @@ $.ajaxSetup({
/****** ON LOAD SET UP STUFF *********/
$(document).ready(function() {
+
+ // show lang dialog if the URL includes /intl/
+ //if (location.pathname.substring(0,6) == "/intl/") {
+ // var lang = location.pathname.split('/')[2];
+ // if (lang != getLangPref()) {
+ // $("#langMessage a.yes").attr("onclick","changeLangPref('" + lang
+ // + "', true); $('#langMessage').hide(); return false;");
+ // $("#langMessage .lang." + lang).show();
+ // $("#langMessage").show();
+ // }
+ //}
// load json file for JD doc search suggestions
$.getScript(toRoot + 'jd_lists_unified.js');
@@ -658,7 +669,7 @@ function toggleFullscreen(enable) {
setTimeout(updateSidenavFixedWidth,delay); // need to wait a moment for css to switch
enabled = false;
}
- writeCookie("fullscreen", enabled, null, null);
+ writeCookie("fullscreen", enabled, null);
setNavBarLeftPos();
resizeNav(delay);
updateSideNavPosition();
@@ -819,7 +830,7 @@ function reInitScrollbars() {
function saveNavPanels() {
var basePath = getBaseUri(location.pathname);
var section = basePath.substring(1,basePath.indexOf("/",1));
- writeCookie("height", resizePackagesNav.css("height"), section, null);
+ writeCookie("height", resizePackagesNav.css("height"), section);
}
@@ -900,16 +911,12 @@ function readCookie(cookie) {
return 0;
}
-function writeCookie(cookie, val, section, expiration) {
+function writeCookie(cookie, val, section) {
if (val==undefined) return;
section = section == null ? "_" : "_"+section+"_";
- if (expiration == null) {
- var date = new Date();
- date.setTime(date.getTime()+(10*365*24*60*60*1000)); // default expiration is one week
- expiration = date.toGMTString();
- }
+ var age = 2*365*24*60*60; // set max-age to 2 years
var cookieValue = cookie_namespace + section + cookie + "=" + val
- + "; expires=" + expiration+"; path=/";
+ + "; max-age=" + age +"; path=/";
document.cookie = cookieValue;
}
@@ -1149,9 +1156,7 @@ function swapNav() {
nav_pref = NAV_PREF_TREE;
init_default_navtree(toRoot);
}
- var date = new Date();
- date.setTime(date.getTime()+(10*365*24*60*60*1000)); // keep this for 10 years
- writeCookie("nav", nav_pref, "reference", date.toGMTString());
+ writeCookie("nav", nav_pref, "reference");
$("#nav-panels").toggle();
$("#panel-link").toggle();
@@ -1219,11 +1224,7 @@ function changeNavLang(lang) {
}
function changeLangPref(lang, submit) {
- var date = new Date();
- expires = date.toGMTString(date.setTime(date.getTime()+(10*365*24*60*60*1000)));
- // keep this for 50 years
- //alert("expires: " + expires)
- writeCookie("pref_lang", lang, null, expires);
+ writeCookie("pref_lang", lang, null);
// ####### TODO: Remove this condition once we're stable on devsite #######
// This condition is only needed if we still need to support legacy GAE server
@@ -1988,7 +1989,7 @@ function search_changed(e, kd, toroot)
// Search for matching JD docs
- if (text.length >= 3) {
+ if (text.length >= 2) {
// Regex to match only the beginning of a word
var textRegex = new RegExp("\\b" + text.toLowerCase(), "g");
@@ -2732,10 +2733,7 @@ function changeApiLevel() {
selectedLevel = parseInt($("#apiLevelSelector option:selected").val());
toggleVisisbleApis(selectedLevel, "body");
- var date = new Date();
- date.setTime(date.getTime()+(10*365*24*60*60*1000)); // keep this for 10 years
- var expiration = date.toGMTString();
- writeCookie(API_LEVEL_COOKIE, selectedLevel, null, expiration);
+ writeCookie(API_LEVEL_COOKIE, selectedLevel, null);
if (selectedLevel < minLevel) {
var thing = ($("#jd-header").html().indexOf("package") != -1) ? "package" : "class";
@@ -4216,4 +4214,4 @@ function showSamples() {
}
}
}
-})(); \ No newline at end of file
+})();
diff --git a/tools/droiddoc/templates-sdk/components/masthead.cs b/tools/droiddoc/templates-sdk/components/masthead.cs
index 2dde104..d07b378 100644
--- a/tools/droiddoc/templates-sdk/components/masthead.cs
+++ b/tools/droiddoc/templates-sdk/components/masthead.cs
@@ -3,6 +3,52 @@
<?cs call:preview_masthead() ?>
<?cs else ?>
<a name="top"></a>
+
+<!-- dialog to prompt lang pref change when loaded from hardcoded URL
+<div id="langMessage" style="display:none">
+ <div>
+ <div class="lang en">
+ <p>You requested a page in English, would you like to proceed with this language setting?</p>
+ </div>
+ <div class="lang es">
+ <p>You requested a page in Spanish (Español), would you like to proceed with this language setting?</p>
+ </div>
+ <div class="lang ja">
+ <p>You requested a page in Japanese (日本語), would you like to proceed with this language setting?</p>
+ </div>
+ <div class="lang ko">
+ <p>You requested a page in Korean (한국어), would you like to proceed with this language setting?</p>
+ </div>
+ <div class="lang ru">
+ <p>You requested a page in Russian (Русский), would you like to proceed with this language setting?</p>
+ </div>
+ <div class="lang zh-cn">
+ <p>You requested a page in Simplified Chinese (简体中文), would you like to proceed with this language setting?</p>
+ </div>
+ <div class="lang zh-tw">
+ <p>You requested a page in Traditional Chinese (繁體中文), would you like to proceed with this language setting?</p>
+ </div>
+ <a href="#" class="button yes" onclick="return false;">
+ <span class="lang en">Yes</span>
+ <span class="lang es">Sí</span>
+ <span class="lang ja">Yes</span>
+ <span class="lang ko">Yes</span>
+ <span class="lang ru">Yes</span>
+ <span class="lang zh-cn">是的</span>
+ <span class="lang zh-tw">没有</span>
+ </a>
+ <a href="#" class="button" onclick="$('#langMessage').hide();return false;">
+ <span class="lang en">No</span>
+ <span class="lang es">No</span>
+ <span class="lang ja">No</span>
+ <span class="lang ko">No</span>
+ <span class="lang ru">No</span>
+ <span class="lang zh-cn">没有</span>
+ <span class="lang zh-tw">没有</span>
+ </a>
+ </div>
+</div> -->
+
<?cs if:!devsite ?><?cs # leave out the global header for devsite; it is in devsite template ?>
<!-- Header -->
<div id="header-wrapper">
@@ -160,6 +206,7 @@
<div class="wrap" style="position:relative;z-index:1">
<?cs if:reference ?>
+ <?cs # HIDE HELPOUTS RECRUIT BANNER
<a id="helpoutsLink" class="resource resource-card resource-card-6x2x3 resource-card-6x2 helpouts-card"
href="http://helpouts.google.com/partner/landing/provider/googledevelopers" target="_blank">
<div class="card-bg helpouts-card-bg"></div>
@@ -185,6 +232,7 @@
$("span#helpoutsLinkText").text(textB);
}
</script>
+ END HIDE HELPOUTS ?>
<?cs /if ?>
<ul class="nav-x col-9 develop" style="width:100%">
@@ -346,4 +394,4 @@ color:#666;font-weight:100;font-size:27px;">L Developer Preview</h1></div>
?>
-<?cs /def ?> \ No newline at end of file
+<?cs /def ?>
diff --git a/tools/droiddoc/templates-sdk/customizations.cs b/tools/droiddoc/templates-sdk/customizations.cs
index e0e3ca1..03e5079 100644
--- a/tools/droiddoc/templates-sdk/customizations.cs
+++ b/tools/droiddoc/templates-sdk/customizations.cs
@@ -449,8 +449,8 @@ def:header_search_widget() ?>
<option value="ja">日本語</option>
<option value="ko">한국어</option>
<option value="ru">Русский</option>
- <option value="zh-cn">中文 (中国)</option>
- <option value="zh-tw">中文 (台灣)</option>
+ <option value="zh-cn">中文(简体)</option>
+ <option value="zh-tw">中文(繁體)</option>
</select>
</div>
<script type="text/javascript">
diff --git a/tools/droiddoc/templates-sdk/docpage.cs b/tools/droiddoc/templates-sdk/docpage.cs
index 7cd9d43..a5440d4 100644
--- a/tools/droiddoc/templates-sdk/docpage.cs
+++ b/tools/droiddoc/templates-sdk/docpage.cs
@@ -194,10 +194,10 @@ include:"header.cs" ?>
<?cs include:"trailer.cs" ?>
<script src="https://developer.android.com/ytblogger_lists_unified.js" type="text/javascript"></script>
- <script src="<?cs var:toroot ?>jd_lists_unified.js?v=3" type="text/javascript"></script>
- <script src="<?cs var:toroot ?>jd_extras.js?v=4" type="text/javascript"></script>
- <script src="<?cs var:toroot ?>jd_collections.js?v=4" type="text/javascript"></script>
- <script src="<?cs var:toroot ?>jd_tag_helpers.js?v=3" type="text/javascript"></script>
+ <script src="<?cs var:toroot ?>jd_lists_unified.js?v=7" type="text/javascript"></script>
+ <script src="<?cs var:toroot ?>jd_extras.js?v=8" type="text/javascript"></script>
+ <script src="<?cs var:toroot ?>jd_collections.js?v=8" type="text/javascript"></script>
+ <script src="<?cs var:toroot ?>jd_tag_helpers.js?v=5" type="text/javascript"></script>
</body>
</html>
diff --git a/tools/droiddoc/templates-sdk/head_tag.cs b/tools/droiddoc/templates-sdk/head_tag.cs
index 7ecb7f9..005bb8e 100644
--- a/tools/droiddoc/templates-sdk/head_tag.cs
+++ b/tools/droiddoc/templates-sdk/head_tag.cs
@@ -24,8 +24,8 @@
<meta name="Description" content="<?cs var:page.metaDescription ?>"><?cs
/if ?>
<link rel="shortcut icon" type="image/x-icon" href="<?cs var:toroot ?>favicon.ico" />
-<title><?cs
- if:page.title ?><?cs
+<title><?cs
+ if:page.title ?><?cs
var:page.title ?> | <?cs
/if ?>Android Developers</title>
@@ -38,7 +38,7 @@ if:android.whichdoc != 'online' ?>http:<?cs
if:android.whichdoc != 'online' ?>http:<?cs
/if ?>//fonts.googleapis.com/css?family=Roboto:light,regular,medium,thin,italic,mediumitalic,bold"
title="roboto">
-<link href="<?cs var:toroot ?>assets/css/default.css?v=2" rel="stylesheet" type="text/css">
+<link href="<?cs var:toroot ?>assets/css/default.css?v=3" rel="stylesheet" type="text/css">
<?cs if:reference && !(reference.gms || reference.gcm || preview) ?>
<!-- FULLSCREEN STYLESHEET -->
@@ -62,7 +62,14 @@ else
var metaTags = [<?cs var:meta.tags ?>];
var devsite = <?cs if:devsite ?>true<?cs else ?>false<?cs /if ?>;
</script>
-<script src="<?cs var:toroot ?>assets/js/docs.js?v=2" type="text/javascript"></script>
+<script src="<?cs var:toroot ?>assets/js/docs.js?v=3" type="text/javascript"></script>
+
+<?cs if:helpoutsWidget ?>
+<script type="text/javascript" src="https://helpouts.google.com/ps/res/embed.js" defer async
+ data-helpouts-embed data-helpouts-vertical="programming"
+ data-helpouts-tags="<?cs var:page.tags ?>" data-helpouts-prefix="android"
+ data-helpouts-standalone="true"></script>
+<?cs /if ?>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index bf217e0..e98e4b6 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -45,10 +45,28 @@ import common
OPTIONS = common.OPTIONS
+OPTIONS.add_missing = False
+OPTIONS.rebuild_recovery = False
-def AddSystem(output_zip, prefix="IMAGES/"):
+def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
"""Turn the contents of SYSTEM into a system image and store it in
output_zip."""
+
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "system.img")
+ if os.path.exists(prebuilt_path):
+ print "system.img already exists in %s, no need to rebuild..." % (prefix,)
+ return
+
+ def output_sink(fn, data):
+ ofile = open(os.path.join(OPTIONS.input_tmp,"SYSTEM",fn), "w")
+ ofile.write(data)
+ ofile.close()
+
+ if OPTIONS.rebuild_recovery:
+ print("Building new recovery patch")
+ common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img, boot_img,
+ info_dict=OPTIONS.info_dict)
+
block_list = common.MakeTempFile(prefix="system-blocklist-", suffix=".map")
imgname = BuildSystem(OPTIONS.input_tmp, OPTIONS.info_dict,
block_list=block_list)
@@ -67,6 +85,12 @@ def BuildSystem(input_dir, info_dict, block_list=None):
def AddVendor(output_zip, prefix="IMAGES/"):
"""Turn the contents of VENDOR into a vendor image and store in it
output_zip."""
+
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "vendor.img")
+ if os.path.exists(prebuilt_path):
+ print "vendor.img already exists in %s, no need to rebuild..." % (prefix,)
+ return
+
block_list = common.MakeTempFile(prefix="vendor-blocklist-", suffix=".map")
imgname = BuildVendor(OPTIONS.input_tmp, OPTIONS.info_dict,
block_list=block_list)
@@ -131,6 +155,11 @@ def CreateImage(input_dir, info_dict, what, block_list=None):
def AddUserdata(output_zip, prefix="IMAGES/"):
"""Create an empty userdata image and store it in output_zip."""
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "userdata.img")
+ if os.path.exists(prebuilt_path):
+ print "userdata.img already exists in %s, no need to rebuild..." % (prefix,)
+ return
+
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
"data")
# We only allow yaffs to have a 0/missing partition_size.
@@ -165,6 +194,11 @@ def AddUserdata(output_zip, prefix="IMAGES/"):
def AddCache(output_zip, prefix="IMAGES/"):
"""Create an empty cache image and store it in output_zip."""
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "cache.img")
+ if os.path.exists(prebuilt_path):
+ print "cache.img already exists in %s, no need to rebuild..." % (prefix,)
+ return
+
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
"cache")
# The build system has to explicitly request for cache.img.
@@ -197,10 +231,11 @@ def AddCache(output_zip, prefix="IMAGES/"):
def AddImagesToTargetFiles(filename):
OPTIONS.input_tmp, input_zip = common.UnzipTemp(filename)
- for n in input_zip.namelist():
- if n.startswith("IMAGES/"):
- print "target_files appears to already contain images."
- sys.exit(1)
+ if not OPTIONS.add_missing:
+ for n in input_zip.namelist():
+ if n.startswith("IMAGES/"):
+ print "target_files appears to already contain images."
+ sys.exit(1)
try:
input_zip.getinfo("VENDOR/")
@@ -221,19 +256,35 @@ def AddImagesToTargetFiles(filename):
print "\n\n++++ " + s + " ++++\n\n"
banner("boot")
- boot_image = common.GetBootableImage(
- "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
- if boot_image:
- boot_image.AddToZip(output_zip)
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "boot.img")
+ boot_image = None
+ if os.path.exists(prebuilt_path):
+ print "boot.img already exists in IMAGES/, no need to rebuild..."
+ if OPTIONS.rebuild_recovery:
+ boot_image = common.GetBootableImage(
+ "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+ else:
+ boot_image = common.GetBootableImage(
+ "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+ if boot_image:
+ boot_image.AddToZip(output_zip)
banner("recovery")
- recovery_image = common.GetBootableImage(
- "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
- if recovery_image:
- recovery_image.AddToZip(output_zip)
+ recovery_image = None
+ prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "recovery.img")
+ if os.path.exists(prebuilt_path):
+ print "recovery.img already exists in IMAGES/, no need to rebuild..."
+ if OPTIONS.rebuild_recovery:
+ recovery_image = common.GetBootableImage(
+ "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
+ else:
+ recovery_image = common.GetBootableImage(
+ "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
+ if recovery_image:
+ recovery_image.AddToZip(output_zip)
banner("system")
- AddSystem(output_zip)
+ AddSystem(output_zip, recovery_img=recovery_image, boot_img=boot_image)
if has_vendor:
banner("vendor")
AddVendor(output_zip)
@@ -244,9 +295,24 @@ def AddImagesToTargetFiles(filename):
output_zip.close()
-
def main(argv):
- args = common.ParseOptions(argv, __doc__)
+
+ def option_handler(o, a):
+ if o in ("-a", "--add_missing"):
+ OPTIONS.add_missing = True
+ elif o in ("-r", "--rebuild_recovery",):
+ OPTIONS.rebuild_recovery = True
+ else:
+ return False
+ return True
+
+ args = common.ParseOptions(argv, __doc__,
+ extra_opts="ar",
+ extra_long_opts=["add_missing",
+ "rebuild_recovery",
+ ],
+ extra_option_handler=option_handler)
+
if len(args) != 1:
common.Usage(__doc__)
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 216486c..8b179d5 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -16,6 +16,7 @@ from __future__ import print_function
from collections import deque, OrderedDict
from hashlib import sha1
+import heapq
import itertools
import multiprocessing
import os
@@ -142,9 +143,16 @@ class Transfer(object):
self.goes_before = {}
self.goes_after = {}
+ self.stash_before = []
+ self.use_stash = []
+
self.id = len(by_id)
by_id.append(self)
+ def NetStashChange(self):
+ return (sum(sr.size() for (_, sr) in self.stash_before) -
+ sum(sr.size() for (_, sr) in self.use_stash))
+
def __str__(self):
return (str(self.id) + ": <" + str(self.src_ranges) + " " + self.style +
" to " + str(self.tgt_ranges) + ">")
@@ -182,11 +190,14 @@ class Transfer(object):
# original image.
class BlockImageDiff(object):
- def __init__(self, tgt, src=None, threads=None):
+ def __init__(self, tgt, src=None, threads=None, version=2):
if threads is None:
threads = multiprocessing.cpu_count() // 2
if threads == 0: threads = 1
self.threads = threads
+ self.version = version
+
+ assert version in (1, 2)
self.tgt = tgt
if src is None:
@@ -221,7 +232,12 @@ class BlockImageDiff(object):
self.FindVertexSequence()
# Fix up the ordering dependencies that the sequence didn't
# satisfy.
- self.RemoveBackwardEdges()
+ if self.version == 1:
+ self.RemoveBackwardEdges()
+ else:
+ self.ReverseBackwardEdges()
+ self.ImproveVertexSequence()
+
# Double-check our work.
self.AssertSequenceGood()
@@ -231,18 +247,87 @@ class BlockImageDiff(object):
def WriteTransfers(self, prefix):
out = []
- out.append("1\n") # format version number
total = 0
performs_read = False
+ stashes = {}
+ stashed_blocks = 0
+ max_stashed_blocks = 0
+
+ free_stash_ids = []
+ next_stash_id = 0
+
for xf in self.transfers:
- # zero [rangeset]
- # new [rangeset]
- # bsdiff patchstart patchlen [src rangeset] [tgt rangeset]
- # imgdiff patchstart patchlen [src rangeset] [tgt rangeset]
- # move [src rangeset] [tgt rangeset]
- # erase [rangeset]
+ if self.version < 2:
+ assert not xf.stash_before
+ assert not xf.use_stash
+
+ for s, sr in xf.stash_before:
+ assert s not in stashes
+ if free_stash_ids:
+ sid = heapq.heappop(free_stash_ids)
+ else:
+ sid = next_stash_id
+ next_stash_id += 1
+ stashes[s] = sid
+ stashed_blocks += sr.size()
+ out.append("stash %d %s\n" % (sid, sr.to_string_raw()))
+
+ if stashed_blocks > max_stashed_blocks:
+ max_stashed_blocks = stashed_blocks
+
+ if self.version == 1:
+ src_string = xf.src_ranges.to_string_raw()
+ elif self.version == 2:
+
+ # <# blocks> <src ranges>
+ # OR
+ # <# blocks> <src ranges> <src locs> <stash refs...>
+ # OR
+ # <# blocks> - <stash refs...>
+
+ size = xf.src_ranges.size()
+ src_string = [str(size)]
+
+ unstashed_src_ranges = xf.src_ranges
+ mapped_stashes = []
+ for s, sr in xf.use_stash:
+ sid = stashes.pop(s)
+ stashed_blocks -= sr.size()
+ unstashed_src_ranges = unstashed_src_ranges.subtract(sr)
+ sr = xf.src_ranges.map_within(sr)
+ mapped_stashes.append(sr)
+ src_string.append("%d:%s" % (sid, sr.to_string_raw()))
+ heapq.heappush(free_stash_ids, sid)
+
+ if unstashed_src_ranges:
+ src_string.insert(1, unstashed_src_ranges.to_string_raw())
+ if xf.use_stash:
+ mapped_unstashed = xf.src_ranges.map_within(unstashed_src_ranges)
+ src_string.insert(2, mapped_unstashed.to_string_raw())
+ mapped_stashes.append(mapped_unstashed)
+ self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
+ else:
+ src_string.insert(1, "-")
+ self.AssertPartition(RangeSet(data=(0, size)), mapped_stashes)
+
+ src_string = " ".join(src_string)
+
+ # both versions:
+ # zero <rangeset>
+ # new <rangeset>
+ # erase <rangeset>
+ #
+ # version 1:
+ # bsdiff patchstart patchlen <src rangeset> <tgt rangeset>
+ # imgdiff patchstart patchlen <src rangeset> <tgt rangeset>
+ # move <src rangeset> <tgt rangeset>
+ #
+ # version 2:
+ # bsdiff patchstart patchlen <tgt rangeset> <src_string>
+ # imgdiff patchstart patchlen <tgt rangeset> <src_string>
+ # move <tgt rangeset> <src_string>
tgt_size = xf.tgt_ranges.size()
@@ -255,17 +340,27 @@ class BlockImageDiff(object):
assert xf.tgt_ranges
assert xf.src_ranges.size() == tgt_size
if xf.src_ranges != xf.tgt_ranges:
- out.append("%s %s %s\n" % (
- xf.style,
- xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ if self.version == 1:
+ out.append("%s %s %s\n" % (
+ xf.style,
+ xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ elif self.version == 2:
+ out.append("%s %s %s\n" % (
+ xf.style,
+ xf.tgt_ranges.to_string_raw(), src_string))
total += tgt_size
elif xf.style in ("bsdiff", "imgdiff"):
performs_read = True
assert xf.tgt_ranges
assert xf.src_ranges
- out.append("%s %d %d %s %s\n" % (
- xf.style, xf.patch_start, xf.patch_len,
- xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ if self.version == 1:
+ out.append("%s %d %d %s %s\n" % (
+ xf.style, xf.patch_start, xf.patch_len,
+ xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+ elif self.version == 2:
+ out.append("%s %d %d %s %s\n" % (
+ xf.style, xf.patch_start, xf.patch_len,
+ xf.tgt_ranges.to_string_raw(), src_string))
total += tgt_size
elif xf.style == "zero":
assert xf.tgt_ranges
@@ -276,7 +371,10 @@ class BlockImageDiff(object):
else:
raise ValueError, "unknown transfer style '%s'\n" % (xf.style,)
- out.insert(1, str(total) + "\n")
+
+ # sanity check: abort if we're going to need more than 512 MB if
+ # stash space
+ assert max_stashed_blocks * self.tgt.blocksize < (512 << 20)
all_tgt = RangeSet(data=(0, self.tgt.total_blocks))
if performs_read:
@@ -289,12 +387,24 @@ class BlockImageDiff(object):
else:
# if nothing is read (ie, this is a full OTA), then we can start
# by erasing the entire partition.
- out.insert(2, "erase %s\n" % (all_tgt.to_string_raw(),))
+ out.insert(0, "erase %s\n" % (all_tgt.to_string_raw(),))
+
+ out.insert(0, "%d\n" % (self.version,)) # format version number
+ out.insert(1, str(total) + "\n")
+ if self.version >= 2:
+ # version 2 only: after the total block count, we give the number
+ # of stash slots needed, and the maximum size needed (in blocks)
+ out.insert(2, str(next_stash_id) + "\n")
+ out.insert(3, str(max_stashed_blocks) + "\n")
with open(prefix + ".transfer.list", "wb") as f:
for i in out:
f.write(i)
+ if self.version >= 2:
+ print("max stashed blocks: %d (%d bytes)\n" % (
+ max_stashed_blocks, max_stashed_blocks * self.tgt.blocksize))
+
def ComputePatches(self, prefix):
print("Reticulating splines...")
diff_q = []
@@ -409,7 +519,13 @@ class BlockImageDiff(object):
# Imagine processing the transfers in order.
for xf in self.transfers:
# Check that the input blocks for this transfer haven't yet been touched.
- assert not touched.overlaps(xf.src_ranges)
+
+ x = xf.src_ranges
+ if self.version >= 2:
+ for _, sr in xf.use_stash:
+ x = x.subtract(sr)
+
+ assert not touched.overlaps(x)
# Check that the output blocks for this transfer haven't yet been touched.
assert not touched.overlaps(xf.tgt_ranges)
# Touch all the blocks written by this transfer.
@@ -418,6 +534,47 @@ class BlockImageDiff(object):
# Check that we've written every target block.
assert touched == self.tgt.care_map
+ def ImproveVertexSequence(self):
+ print("Improving vertex order...")
+
+ # At this point our digraph is acyclic; we reversed any edges that
+ # were backwards in the heuristically-generated sequence. The
+ # previously-generated order is still acceptable, but we hope to
+ # find a better order that needs less memory for stashed data.
+ # Now we do a topological sort to generate a new vertex order,
+ # using a greedy algorithm to choose which vertex goes next
+ # whenever we have a choice.
+
+ # Make a copy of the edge set; this copy will get destroyed by the
+ # algorithm.
+ for xf in self.transfers:
+ xf.incoming = xf.goes_after.copy()
+ xf.outgoing = xf.goes_before.copy()
+
+ L = [] # the new vertex order
+
+ # S is the set of sources in the remaining graph; we always choose
+ # the one that leaves the least amount of stashed data after it's
+ # executed.
+ S = [(u.NetStashChange(), u.order, u) for u in self.transfers
+ if not u.incoming]
+ heapq.heapify(S)
+
+ while S:
+ _, _, xf = heapq.heappop(S)
+ L.append(xf)
+ for u in xf.outgoing:
+ del u.incoming[xf]
+ if not u.incoming:
+ heapq.heappush(S, (u.NetStashChange(), u.order, u))
+
+ # if this fails then our graph had a cycle.
+ assert len(L) == len(self.transfers)
+
+ self.transfers = L
+ for i, xf in enumerate(L):
+ xf.order = i
+
def RemoveBackwardEdges(self):
print("Removing backward edges...")
in_order = 0
@@ -425,19 +582,17 @@ class BlockImageDiff(object):
lost_source = 0
for xf in self.transfers:
- io = 0
- ooo = 0
lost = 0
size = xf.src_ranges.size()
for u in xf.goes_before:
# xf should go before u
if xf.order < u.order:
# it does, hurray!
- io += 1
+ in_order += 1
else:
# it doesn't, boo. trim the blocks that u writes from xf's
# source, so that xf can go after u.
- ooo += 1
+ out_of_order += 1
assert xf.src_ranges.overlaps(u.tgt_ranges)
xf.src_ranges = xf.src_ranges.subtract(u.tgt_ranges)
xf.intact = False
@@ -448,8 +603,6 @@ class BlockImageDiff(object):
lost = size - xf.src_ranges.size()
lost_source += lost
- in_order += io
- out_of_order += ooo
print((" %d/%d dependencies (%.2f%%) were violated; "
"%d source blocks removed.") %
@@ -458,6 +611,48 @@ class BlockImageDiff(object):
if (in_order + out_of_order) else 0.0,
lost_source))
+ def ReverseBackwardEdges(self):
+ print("Reversing backward edges...")
+ in_order = 0
+ out_of_order = 0
+ stashes = 0
+ stash_size = 0
+
+ for xf in self.transfers:
+ lost = 0
+ size = xf.src_ranges.size()
+ for u in xf.goes_before.copy():
+ # xf should go before u
+ if xf.order < u.order:
+ # it does, hurray!
+ in_order += 1
+ else:
+ # it doesn't, boo. modify u to stash the blocks that it
+ # writes that xf wants to read, and then require u to go
+ # before xf.
+ out_of_order += 1
+
+ overlap = xf.src_ranges.intersect(u.tgt_ranges)
+ assert overlap
+
+ u.stash_before.append((stashes, overlap))
+ xf.use_stash.append((stashes, overlap))
+ stashes += 1
+ stash_size += overlap.size()
+
+ # reverse the edge direction; now xf must go after u
+ del xf.goes_before[u]
+ del u.goes_after[xf]
+ xf.goes_after[u] = None # value doesn't matter
+ u.goes_before[xf] = None
+
+ print((" %d/%d dependencies (%.2f%%) were violated; "
+ "%d source blocks stashed.") %
+ (out_of_order, in_order + out_of_order,
+ (out_of_order * 100.0 / (in_order + out_of_order))
+ if (in_order + out_of_order) else 0.0,
+ stash_size))
+
def FindVertexSequence(self):
print("Finding vertex sequence...")
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index a010e84..302aa0c 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -160,7 +160,7 @@ def MakeVerityEnabledImage(out_file, prop_dict):
# get properties
image_size = prop_dict["partition_size"]
block_dev = prop_dict["verity_block_device"]
- signer_key = prop_dict["verity_key"]
+ signer_key = prop_dict["verity_key"] + ".pk8"
signer_path = prop_dict["verity_signer_cmd"]
# make a tempdir
@@ -240,6 +240,8 @@ def BuildImage(in_dir, prop_dict, out_file,
build_command.extend([in_dir, out_file, fs_type,
prop_dict["mount_point"]])
build_command.append(prop_dict["partition_size"])
+ if "journal_size" in prop_dict:
+ build_command.extend(["-j", prop_dict["journal_size"]])
if "timestamp" in prop_dict:
build_command.extend(["-T", str(prop_dict["timestamp"])])
if fs_config is not None:
@@ -319,6 +321,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
if mount_point == "system":
copy_prop("fs_type", "fs_type")
copy_prop("system_size", "partition_size")
+ copy_prop("system_journal_size", "journal_size")
copy_prop("system_verity_block_device", "verity_block_device")
elif mount_point == "data":
# Copy the generic fs type first, override with specific one if available.
@@ -331,10 +334,12 @@ def ImagePropFromGlobalDict(glob_dict, mount_point):
elif mount_point == "vendor":
copy_prop("vendor_fs_type", "fs_type")
copy_prop("vendor_size", "partition_size")
+ copy_prop("vendor_journal_size", "journal_size")
copy_prop("vendor_verity_block_device", "verity_block_device")
elif mount_point == "oem":
copy_prop("fs_type", "fs_type")
copy_prop("oem_size", "partition_size")
+ copy_prop("oem_journal_size", "journal_size")
return d
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 815c76c..6865a5d 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -347,7 +347,7 @@ def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
if info_dict.get("verity_key", None):
path = "/" + os.path.basename(sourcedir).lower()
- cmd = ["boot_signer", path, img.name, info_dict["verity_key"], img.name]
+ cmd = ["boot_signer", path, img.name, info_dict["verity_key"] + ".pk8", info_dict["verity_key"] + ".x509.pem", img.name]
p = Run(cmd, stdout=subprocess.PIPE)
p.communicate()
assert p.returncode == 0, "boot_signer of %s image failed" % path
@@ -1030,7 +1030,14 @@ class BlockDifference:
self.partition = partition
self.check_first_block = check_first_block
- b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads)
+ version = 1
+ if OPTIONS.info_dict:
+ version = max(
+ int(i) for i in
+ OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
+
+ b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
+ version=version)
tmpdir = tempfile.mkdtemp()
OPTIONS.tempfiles.append(tmpdir)
self.path = os.path.join(tmpdir, partition)
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 2bd071d..7d318a3 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -164,14 +164,25 @@ class EdifyGenerator(object):
self.script.append(('apply_patch_space(%d) || abort("Not enough free space '
'on /system to apply patches.");') % (amount,))
- def Mount(self, mount_point):
- """Mount the partition with the given mount_point."""
+ def Mount(self, mount_point, mount_options_by_format=""):
+ """Mount the partition with the given mount_point.
+ mount_options_by_format:
+ [fs_type=option[,option]...[|fs_type=option[,option]...]...]
+ where option is optname[=optvalue]
+ E.g. ext4=barrier=1,nodelalloc,errors=panic|f2fs=errors=recover
+ """
fstab = self.info.get("fstab", None)
if fstab:
p = fstab[mount_point]
- self.script.append('mount("%s", "%s", "%s", "%s");' %
+ mount_dict = {}
+ if mount_options_by_format is not None:
+ for option in mount_options_by_format.split("|"):
+ if "=" in option:
+ key, value = option.split("=", 1)
+ mount_dict[key] = value
+ self.script.append('mount("%s", "%s", "%s", "%s", "%s");' %
(p.fs_type, common.PARTITION_TYPES[p.fs_type],
- p.device, p.mount_point))
+ p.device, p.mount_point, mount_dict.get(p.fs_type, "")))
self.mounts.add(p.mount_point)
def UnpackPackageDir(self, src, dst):
@@ -312,6 +323,10 @@ class EdifyGenerator(object):
"""Append text verbatim to the output script."""
self.script.append(extra)
+ def Unmount(self, mount_point):
+ self.script.append('unmount("%s");' % (mount_point,))
+ self.mounts.remove(mount_point);
+
def UnmountAll(self):
for p in sorted(self.mounts):
self.script.append('unmount("%s");' % (p,))
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
index e8dff5a..755e5c2 100755
--- a/tools/releasetools/ota_from_target_files
+++ b/tools/releasetools/ota_from_target_files
@@ -37,6 +37,10 @@ Usage: ota_from_target_files [flags] input_target_files output_ota_package
Generate an incremental OTA using the given target-files zip as
the starting build.
+ -v (--verify)
+ Remount and verify the checksums of the files written to the
+ system and vendor (if used) partitions. Incremental builds only.
+
-o (--oem_settings) <file>
Use the file to specify the expected OEM-specific properties
on the OEM partition of the intended device.
@@ -104,6 +108,7 @@ import sparse_img
OPTIONS = common.OPTIONS
OPTIONS.package_key = None
OPTIONS.incremental_source = None
+OPTIONS.verify = False
OPTIONS.require_verbatim = set()
OPTIONS.prohibit_verbatim = set(("system/build.prop",))
OPTIONS.patch_threshold = 0.95
@@ -462,11 +467,12 @@ def WriteFullOTAPackage(input_zip, output_zip):
script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+ recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
oem_dict = None
if oem_props is not None and len(oem_props) > 0:
if OPTIONS.oem_source is None:
raise common.ExternalError("OEM source required for this build")
- script.Mount("/oem")
+ script.Mount("/oem", recovery_mount_options)
oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
metadata = {"post-build": CalculateFingerprint(
@@ -527,13 +533,13 @@ def WriteFullOTAPackage(input_zip, output_zip):
bcb_dev = {"bcb_dev": fs.device}
common.ZipWriteStr(output_zip, "recovery.img", recovery_img.data)
script.AppendExtra("""
-if get_stage("%(bcb_dev)s", "stage") == "2/3" then
+if get_stage("%(bcb_dev)s") == "2/3" then
""" % bcb_dev)
script.WriteRawImage("/recovery", "recovery.img")
script.AppendExtra("""
set_stage("%(bcb_dev)s", "3/3");
reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s", "stage") == "3/3" then
+else if get_stage("%(bcb_dev)s") == "3/3" then
""" % bcb_dev)
device_specific.FullOTA_InstallBegin()
@@ -548,6 +554,8 @@ else if get_stage("%(bcb_dev)s", "stage") == "3/3" then
if "selinux_fc" in OPTIONS.info_dict:
WritePolicyConfig(OPTIONS.info_dict["selinux_fc"], output_zip)
+ recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
+
system_items = ItemSet("system", "META/filesystem_config.txt")
script.ShowProgress(system_progress, 0)
if block_based:
@@ -561,7 +569,7 @@ else if get_stage("%(bcb_dev)s", "stage") == "3/3" then
system_diff.WriteScript(script, output_zip)
else:
script.FormatPartition("/system")
- script.Mount("/system")
+ script.Mount("/system", recovery_mount_options)
if not has_recovery_patch:
script.UnpackPackageDir("recovery", "/system")
script.UnpackPackageDir("system", "/system")
@@ -594,7 +602,7 @@ else if get_stage("%(bcb_dev)s", "stage") == "3/3" then
vendor_diff.WriteScript(script, output_zip)
else:
script.FormatPartition("/vendor")
- script.Mount("/vendor")
+ script.Mount("/vendor", recovery_mount_options)
script.UnpackPackageDir("vendor", "/vendor")
symlinks = CopyPartitionFiles(vendor_items, input_zip, output_zip)
@@ -745,11 +753,12 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
vendor_diff = None
oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
+ recovery_mount_options = OPTIONS.target_info_dict.get("recovery_mount_options")
oem_dict = None
if oem_props is not None and len(oem_props) > 0:
if OPTIONS.oem_source is None:
raise common.ExternalError("OEM source required for this build")
- script.Mount("/oem")
+ script.Mount("/oem", recovery_mount_options)
oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
@@ -785,14 +794,14 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
bcb_dev = {"bcb_dev": fs.device}
common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
script.AppendExtra("""
-if get_stage("%(bcb_dev)s", "stage") == "2/3" then
+if get_stage("%(bcb_dev)s") == "2/3" then
""" % bcb_dev)
script.AppendExtra("sleep(20);\n");
script.WriteRawImage("/recovery", "recovery.img")
script.AppendExtra("""
set_stage("%(bcb_dev)s", "3/3");
reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s", "stage") != "3/3" then
+else if get_stage("%(bcb_dev)s") != "3/3" then
""" % bcb_dev)
script.Print("Verifying current system...")
@@ -935,7 +944,7 @@ class FileDifference:
raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
print "send", fn, "verbatim"
tf.AddToZip(output_zip)
- verbatim_targets.append((fn, tf.size))
+ verbatim_targets.append((fn, tf.size, tf.sha1))
if fn in target_data.keys():
AddToKnownPaths(fn, known_paths)
elif tf.sha1 != sf.sha1:
@@ -956,7 +965,7 @@ class FileDifference:
# or a patch + rename cannot take place due to the target
# directory not existing
tf.AddToZip(output_zip)
- verbatim_targets.append((tf.name, tf.size))
+ verbatim_targets.append((tf.name, tf.size, tf.sha1))
if sf.name in renames:
del renames[sf.name]
AddToKnownPaths(tf.name, known_paths)
@@ -976,6 +985,13 @@ class FileDifference:
so_far += sf.size
return so_far
+ def EmitExplicitTargetVerification(self, script):
+ for fn, size, sha1 in self.verbatim_targets:
+ if (fn[-1] != "/"):
+ script.FileCheck("/"+fn, sha1)
+ for tf, _, _, _ in self.patch_list:
+ script.FileCheck(tf.name, tf.sha1)
+
def RemoveUnneededFiles(self, script, extras=()):
script.DeleteFiles(["/"+i[0] for i in self.verbatim_targets] +
["/"+i for i in sorted(self.source_data)
@@ -1035,11 +1051,12 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
OPTIONS.target_info_dict)
oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+ recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
oem_dict = None
if oem_props is not None and len(oem_props) > 0:
if OPTIONS.oem_source is None:
raise common.ExternalError("OEM source required for this build")
- script.Mount("/oem")
+ script.Mount("/oem", recovery_mount_options)
oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
metadata = {"pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
@@ -1059,10 +1076,10 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
info_dict=OPTIONS.info_dict)
system_diff = FileDifference("system", source_zip, target_zip, output_zip)
- script.Mount("/system")
+ script.Mount("/system", recovery_mount_options)
if HasVendorPartition(target_zip):
vendor_diff = FileDifference("vendor", source_zip, target_zip, output_zip)
- script.Mount("/vendor")
+ script.Mount("/vendor", recovery_mount_options)
else:
vendor_diff = None
@@ -1133,14 +1150,14 @@ def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
bcb_dev = {"bcb_dev": fs.device}
common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
script.AppendExtra("""
-if get_stage("%(bcb_dev)s", "stage") == "2/3" then
+if get_stage("%(bcb_dev)s") == "2/3" then
""" % bcb_dev)
script.AppendExtra("sleep(20);\n");
script.WriteRawImage("/recovery", "recovery.img")
script.AppendExtra("""
set_stage("%(bcb_dev)s", "3/3");
reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s", "stage") != "3/3" then
+else if get_stage("%(bcb_dev)s") != "3/3" then
""" % bcb_dev)
script.Print("Verifying current system...")
@@ -1346,7 +1363,19 @@ endif;
endif;
""" % bcb_dev)
+ if OPTIONS.verify and system_diff:
+ script.Print("Remounting and verifying system partition files...")
+ script.Unmount("/system")
+ script.Mount("/system")
+ system_diff.EmitExplicitTargetVerification(script)
+
+ if OPTIONS.verify and vendor_diff:
+ script.Print("Remounting and verifying vendor partition files...")
+ script.Unmount("/vendor")
+ script.Mount("/vendor")
+ vendor_diff.EmitExplicitTargetVerification(script)
script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+
WriteMetadata(metadata, output_zip)
@@ -1382,6 +1411,8 @@ def main(argv):
OPTIONS.two_step = True
elif o == "--no_signing":
OPTIONS.no_signing = True
+ elif o in ("--verify"):
+ OPTIONS.verify = True
elif o == "--block":
OPTIONS.block_based = True
elif o in ("-b", "--binary"):
@@ -1407,6 +1438,7 @@ def main(argv):
"block",
"binary=",
"oem_settings=",
+ "verify",
"no_fallback_to_full",
],
extra_option_handler=option_handler)
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 8a85d2d..7279c60 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -24,7 +24,9 @@ class RangeSet(object):
lots of runs."""
def __init__(self, data=None):
- if data:
+ if isinstance(data, str):
+ self._parse_internal(data)
+ elif data:
self.data = tuple(self._remove_pairs(data))
else:
self.data = ()
@@ -46,6 +48,9 @@ class RangeSet(object):
else:
return self.to_string()
+ def __repr__(self):
+ return '<RangeSet("' + self.to_string() + '")>'
+
@classmethod
def parse(cls, text):
"""Parse a text string consisting of a space-separated list of
@@ -59,7 +64,9 @@ class RangeSet(object):
"15-20 30 10-14" is not, even though they represent the same set
of blocks (and the two RangeSets will compare equal with ==).
"""
+ return cls(text)
+ def _parse_internal(self, text):
data = []
last = -1
monotonic = True
@@ -81,9 +88,8 @@ class RangeSet(object):
else:
monotonic = True
data.sort()
- r = RangeSet(cls._remove_pairs(data))
- r.monotonic = monotonic
- return r
+ self.data = tuple(self._remove_pairs(data))
+ self.monotonic = monotonic
@staticmethod
def _remove_pairs(source):
@@ -113,7 +119,13 @@ class RangeSet(object):
def union(self, other):
"""Return a new RangeSet representing the union of this RangeSet
- with the argument."""
+ with the argument.
+
+ >>> RangeSet("10-19 30-34").union(RangeSet("18-29"))
+ <RangeSet("10-34")>
+ >>> RangeSet("10-19 30-34").union(RangeSet("22 32"))
+ <RangeSet("10-19 22 30-34")>
+ """
out = []
z = 0
for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
@@ -125,7 +137,13 @@ class RangeSet(object):
def intersect(self, other):
"""Return a new RangeSet representing the intersection of this
- RangeSet with the argument."""
+ RangeSet with the argument.
+
+ >>> RangeSet("10-19 30-34").intersect(RangeSet("18-32"))
+ <RangeSet("18-19 30-32")>
+ >>> RangeSet("10-19 30-34").intersect(RangeSet("22-28"))
+ <RangeSet("")>
+ """
out = []
z = 0
for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
@@ -137,7 +155,13 @@ class RangeSet(object):
def subtract(self, other):
"""Return a new RangeSet representing subtracting the argument
- from this RangeSet."""
+ from this RangeSet.
+
+ >>> RangeSet("10-19 30-34").subtract(RangeSet("18-32"))
+ <RangeSet("10-17 33-34")>
+ >>> RangeSet("10-19 30-34").subtract(RangeSet("22-28"))
+ <RangeSet("10-19 30-34")>
+ """
out = []
z = 0
@@ -150,7 +174,13 @@ class RangeSet(object):
def overlaps(self, other):
"""Returns true if the argument has a nonempty overlap with this
- RangeSet."""
+ RangeSet.
+
+ >>> RangeSet("10-19 30-34").overlaps(RangeSet("18-32"))
+ True
+ >>> RangeSet("10-19 30-34").overlaps(RangeSet("22-28"))
+ False
+ """
# This is like intersect, but we can stop as soon as we discover the
# output is going to be nonempty.
@@ -164,7 +194,11 @@ class RangeSet(object):
def size(self):
"""Returns the total size of the RangeSet (ie, how many integers
- are in the set)."""
+ are in the set).
+
+ >>> RangeSet("10-19 30-34").size()
+ 15
+ """
total = 0
for i, p in enumerate(self.data):
@@ -173,3 +207,37 @@ class RangeSet(object):
else:
total -= p
return total
+
+ def map_within(self, other):
+ """'other' should be a subset of 'self'. Returns a RangeSet
+ representing what 'other' would get translated to if the integers
+ of 'self' were translated down to be contiguous starting at zero.
+
+ >>> RangeSet("0-9").map_within(RangeSet("3-4"))
+ <RangeSet("3-4")>
+ >>> RangeSet("10-19").map_within(RangeSet("13-14"))
+ <RangeSet("3-4")>
+ >>> RangeSet("10-19 30-39").map_within(RangeSet("17-19 30-32"))
+ <RangeSet("7-12")>
+ >>> RangeSet("10-19 30-39").map_within(RangeSet("12-13 17-19 30-32"))
+ <RangeSet("2-3 7-12")>
+ """
+
+ out = []
+ offset = 0
+ start = None
+ for p, d in heapq.merge(zip(self.data, itertools.cycle((-5, +5))),
+ zip(other.data, itertools.cycle((-1, +1)))):
+ if d == -5:
+ start = p
+ elif d == +5:
+ offset += p-start
+ start = None
+ else:
+ out.append(offset + p - start)
+ return RangeSet(data=out)
+
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod()