diff options
27 files changed, 625 insertions, 159 deletions
diff --git a/core/Makefile b/core/Makefile index 02535c23c1..9471148229 100644 --- a/core/Makefile +++ b/core/Makefile @@ -162,11 +162,19 @@ endif $(call dist-for-goals,sdk,$(API_FINGERPRINT)) INSTALLED_RECOVERYIMAGE_TARGET := +# Build recovery image if +# BUILDING_RECOVERY_IMAGE && !BOARD_USES_RECOVERY_AS_BOOT && !BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT. +# If BOARD_USES_RECOVERY_AS_BOOT is true, leave empty because INSTALLED_BOOTIMAGE_TARGET is built +# with recovery resources. +# If BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT is true, leave empty to build recovery resources +# but not the final recovery image. ifdef BUILDING_RECOVERY_IMAGE ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true) +ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true) INSTALLED_RECOVERYIMAGE_TARGET := $(PRODUCT_OUT)/recovery.img endif endif +endif include $(BUILD_SYSTEM)/sysprop.mk @@ -746,7 +754,19 @@ endif INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID)) -ifndef BUILDING_VENDOR_BOOT_IMAGE +boot_uses_generic_kernel_image := +ifdef BUILDING_VENDOR_BOOT_IMAGE + # building vendor boot image, dtb/base/pagesize go there + boot_uses_generic_kernel_image := true +else ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE)) + boot_uses_generic_kernel_image := true +endif + +ifeq (true,$(boot_uses_generic_kernel_image)) + ifdef GENERIC_KERNEL_CMDLINE + INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(GENERIC_KERNEL_CMDLINE)" + endif +else # boot_uses_generic_kernel_image != true ifdef BOARD_KERNEL_BASE INTERNAL_BOOTIMAGE_ARGS += --base $(BOARD_KERNEL_BASE) endif @@ -756,12 +776,8 @@ endif ifdef INTERNAL_KERNEL_CMDLINE INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)" endif -else -# building vendor boot image, dtb/base/pagesize go there -ifdef GENERIC_KERNEL_CMDLINE - INTERNAL_BOOTIMAGE_ARGS += --cmdline "$(GENERIC_KERNEL_CMDLINE)" -endif -endif +endif # boot_uses_generic_kernel_image == true +boot_uses_generic_kernel_image := INTERNAL_MKBOOTIMG_VERSION_ARGS := \ --os_version $(PLATFORM_VERSION_LAST_STABLE) \ @@ -869,6 +885,12 @@ endif # BOARD_PREBUILT_BOOTIMAGE endif # TARGET_NO_KERNEL # ----------------------------------------------------------------- +# declare recovery ramdisk files +ifeq ($(BUILDING_RECOVERY_IMAGE),true) +INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP := $(call intermediates-dir-for,PACKAGING,recovery)/ramdisk_files-timestamp +endif + +# ----------------------------------------------------------------- # vendor boot image ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true) @@ -881,8 +903,14 @@ INTERNAL_VENDOR_RAMDISK_FILES := $(filter $(TARGET_VENDOR_RAMDISK_OUT)/%, \ $(ALL_DEFAULT_INSTALLED_MODULES)) INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor-boot)/vendor-ramdisk.cpio$(RAMDISK_EXT) + +ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)) +$(INTERNAL_VENDOR_RAMDISK_TARGET): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP) +$(INTERNAL_VENDOR_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT) +endif + $(INTERNAL_VENDOR_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS) - $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@ + $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@ ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG INTERNAL_VENDOR_BOOTIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET) @@ -1335,6 +1363,7 @@ $(if $(filter $(2),userdata),\ $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1)) $(if $(PRODUCT_FS_CASEFOLD),$(hide) echo "needs_casefold=$(PRODUCT_FS_CASEFOLD)" >> $(1)) $(if $(PRODUCT_QUOTA_PROJID),$(hide) echo "needs_projid=$(PRODUCT_QUOTA_PROJID)" >> $(1)) + $(if $(PRODUCT_FS_COMPRESSION),$(hide) echo "needs_compress=$(PRODUCT_FS_COMPRESSION)" >> $(1)) $(hide) echo "userdata_selinux_fc=$(SELINUX_FC)" >> $(1) $(hide) echo "building_userdata_image=$(BUILDING_USERDATA_IMAGE)" >> $(1) ) @@ -1866,17 +1895,23 @@ endif ifeq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(AB_OTA_UPDATER))) INTERNAL_RECOVERYIMAGE_ARGS := --ramdisk $(recovery_ramdisk) + +ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)) ifdef GENERIC_KERNEL_CMDLINE INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(GENERIC_KERNEL_CMDLINE)" -endif +endif # GENERIC_KERNEL_CMDLINE != "" +endif # BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE != true + else # not (BUILDING_VENDOR_BOOT_IMAGE and AB_OTA_UPDATER) INTERNAL_RECOVERYIMAGE_ARGS := \ $(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \ --ramdisk $(recovery_ramdisk) # Assumes this has already been stripped +ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)) ifdef INTERNAL_KERNEL_CMDLINE INTERNAL_RECOVERYIMAGE_ARGS += --cmdline "$(INTERNAL_KERNEL_CMDLINE)" -endif +endif # INTERNAL_KERNEL_CMDLINE != "" +endif # BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE != true ifdef BOARD_KERNEL_BASE INTERNAL_RECOVERYIMAGE_ARGS += --base $(BOARD_KERNEL_BASE) endif @@ -1901,7 +1936,7 @@ ifndef BOARD_RECOVERY_MKBOOTIMG_ARGS BOARD_RECOVERY_MKBOOTIMG_ARGS := $(BOARD_MKBOOTIMG_ARGS) endif -$(recovery_ramdisk): $(MKBOOTFS) $(COMPRESSION_COMMAND_DEPS) \ +$(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP): $(MKBOOTFS) $(COMPRESSION_COMMAND_DEPS) \ $(INTERNAL_ROOT_FILES) \ $(INSTALLED_RAMDISK_TARGET) \ $(INTERNAL_RECOVERYIMAGE_FILES) \ @@ -1937,16 +1972,19 @@ $(recovery_ramdisk): $(MKBOOTFS) $(COMPRESSION_COMMAND_DEPS) \ cp -f $(recovery_wipe) $(TARGET_RECOVERY_ROOT_OUT)/system/etc/recovery.wipe) ln -sf prop.default $(TARGET_RECOVERY_ROOT_OUT)/default.prop $(BOARD_RECOVERY_IMAGE_PREPARE) + $(hide) touch $@ + +$(recovery_ramdisk): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(COMPRESSION_COMMAND) > $(recovery_ramdisk) # $(1): output file -# $(2): kernel file +# $(2): optional kernel file define build-recoveryimage-target $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \ - $(MKBOOTIMG) --kernel $(2) $(INTERNAL_RECOVERYIMAGE_ARGS) \ + $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \ $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \ --output $(1).unsigned, \ - $(MKBOOTIMG) --kernel $(2) $(INTERNAL_RECOVERYIMAGE_ARGS) \ + $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \ $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_RECOVERY_MKBOOTIMG_ARGS) \ --output $(1)) $(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\ @@ -1998,7 +2036,8 @@ $(INSTALLED_BOOTIMAGE_TARGET): $(recoveryimage-deps) endif # BOARD_USES_RECOVERY_AS_BOOT $(INSTALLED_RECOVERYIMAGE_TARGET): $(recoveryimage-deps) - $(call build-recoveryimage-target, $@, $(recovery_kernel)) + $(call build-recoveryimage-target, $@, \ + $(if $(filter true, $(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)),, $(recovery_kernel))) ifdef RECOVERY_RESOURCE_ZIP $(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME) @@ -2010,7 +2049,8 @@ endif .PHONY: recoveryimage-nodeps recoveryimage-nodeps: @echo "make $@: ignoring dependencies" - $(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET), $(recovery_kernel)) + $(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET), \ + $(if $(filter true, $(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)),, $(recovery_kernel))) else # BUILDING_RECOVERY_IMAGE RECOVERY_RESOURCE_ZIP := @@ -2108,7 +2148,7 @@ endif # BUILDING_RAMDISK_IMAGE # # Note: it's intentional to skip signing for boot-debug.img, because it # can only be used if the device is unlocked with verification error. -ifdef BUILDING_BOOT_IMAGE +ifneq ($(INSTALLED_BOOTIMAGE_TARGET),) ifneq ($(strip $(TARGET_NO_KERNEL)),true) ifneq ($(strip $(BOARD_KERNEL_BINARIES)),) INSTALLED_DEBUG_BOOTIMAGE_TARGET := $(foreach k,$(subst kernel,boot-debug,$(BOARD_KERNEL_BINARIES)), \ @@ -2163,7 +2203,7 @@ bootimage_debug-nodeps: $(MKBOOTIMG) $(foreach b,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(call build-debug-bootimage-target,$b)) endif # TARGET_NO_KERNEL -endif # BUILDING_BOOT_IMAGE +endif # INSTALLED_BOOTIMAGE_TARGET ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true) ifeq ($(BUILDING_RAMDISK_IMAGE),true) @@ -3576,11 +3616,19 @@ endif check_vintf_system_deps := $(filter $(TARGET_OUT)/etc/vintf/%, $(check_vintf_common_srcs)) ifneq ($(check_vintf_system_deps),) check_vintf_has_system := true + check_vintf_system_log := $(intermediates)/check_vintf_system_log check_vintf_all_deps += $(check_vintf_system_log) $(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps) @( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 ) check_vintf_system_log := + +vintffm_log := $(intermediates)/vintffm_log +check_vintf_all_deps += $(vintffm_log) +$(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps) + @( $< --check --dirmap /system:$(TARGET_OUT) \ + $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 ) + endif # check_vintf_system_deps check_vintf_system_deps := @@ -3891,6 +3939,7 @@ INTERNAL_OTATOOLS_MODULES := \ signapk \ simg2img \ sload_f2fs \ + toybox \ tune2fs \ unpack_bootimg \ update_host_simulator \ @@ -4155,6 +4204,9 @@ endif ifeq ($(BOARD_BOOTLOADER_IN_UPDATE_PACKAGE),true) $(hide) echo "bootloader_in_update_package=true" >> $@ endif +ifeq ($(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE),true) + $(hide) echo "exclude_kernel_from_recovery_image=true" >> $@ +endif .PHONY: misc_info misc_info: $(INSTALLED_MISC_INFO_TARGET) @@ -4213,8 +4265,11 @@ endif $(BUILT_TARGET_FILES_PACKAGE): $(updater_dep) # If we are using recovery as boot, output recovery files to BOOT/. +# If we are moving recovery resources to vendor_boot, output recovery files to VENDOR_BOOT/. ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true) $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT +else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true) +$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := VENDOR_BOOT else $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY endif @@ -4397,6 +4452,7 @@ $(BUILT_TARGET_FILES_PACKAGE): \ $(INSTALLED_CUSTOMIMAGES_TARGET) \ $(INSTALLED_ANDROID_INFO_TXT_TARGET) \ $(INSTALLED_KERNEL_TARGET) \ + $(INSTALLED_RAMDISK_TARGET) \ $(INSTALLED_DTBIMAGE_TARGET) \ $(INSTALLED_2NDBOOTLOADER_TARGET) \ $(BOARD_PREBUILT_DTBOIMAGE) \ @@ -4424,7 +4480,7 @@ $(BUILT_TARGET_FILES_PACKAGE): \ @echo "Package target files: $@" $(hide) rm -rf $@ $@.list $(zip_root) $(hide) mkdir -p $(dir $@) $(zip_root) -ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))) +ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))$(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))) @# Components of the recovery image $(hide) mkdir -p $(zip_root)/$(PRIVATE_RECOVERY_OUT) $(hide) $(call package_files-copy-root, \ @@ -4432,12 +4488,14 @@ ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_ ifdef INSTALLED_KERNEL_TARGET ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/ -else # BOARD_USES_RECOVERY_AS_BOOT not true +else ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)) cp $(firstword $(INSTALLED_KERNEL_TARGET)) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel endif endif ifeq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(AB_OTA_UPDATER))) +ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)) echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline +endif # BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE != true else # not (BUILDING_VENDOR_BOOT_IMAGE and AB_OTA_UPDATER) ifdef INSTALLED_2NDBOOTLOADER_TARGET cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second @@ -4455,9 +4513,11 @@ endif ifdef INSTALLED_DTBIMAGE_TARGET cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/dtb endif +ifneq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)) ifdef INTERNAL_KERNEL_CMDLINE echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline -endif +endif # INTERNAL_KERNEL_CMDLINE != "" +endif # BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE != true ifdef BOARD_KERNEL_BASE echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/base endif @@ -4480,23 +4540,25 @@ endif ifdef INSTALLED_KERNEL_TARGET $(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/ endif -ifndef INSTALLED_VENDOR_BOOTIMAGE_TARGET +ifdef INSTALLED_VENDOR_BOOTIMAGE_TARGET + echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline +else ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE)) + echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline +else # INSTALLED_VENDOR_BOOTIMAGE_TARGET == "" && BOARD_USES_GENERIC_KERNEL_IMAGE != true + echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline ifdef INSTALLED_2NDBOOTLOADER_TARGET cp $(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/BOOT/second endif ifdef INSTALLED_DTBIMAGE_TARGET cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/BOOT/dtb endif - echo "$(INTERNAL_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline ifdef BOARD_KERNEL_BASE echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/BOOT/base endif ifdef BOARD_KERNEL_PAGESIZE echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/BOOT/pagesize endif -else # INSTALLED_VENDOR_BOOTIMAGE_TARGET defined - echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/BOOT/cmdline -endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET defined +endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET == "" && BOARD_USES_GENERIC_KERNEL_IMAGE != true endif # BOARD_USES_RECOVERY_AS_BOOT not true $(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\ mkdir -p $(zip_root)/RADIO; \ @@ -4564,6 +4626,13 @@ endif @# Extra contents of the OTA package $(hide) mkdir -p $(zip_root)/OTA $(hide) cp $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/ +ifdef BUILDING_RAMDISK_IMAGE +ifeq (true,$(BOARD_IMG_USE_RAMDISK)) + @# Contents of the ramdisk image + $(hide) mkdir -p $(zip_root)/IMAGES + $(hide) cp $(INSTALLED_RAMDISK_TARGET) $(zip_root)/IMAGES/ +endif +endif ifeq ($(TARGET_OTA_ALLOW_NON_AB),true) ifneq ($(built_ota_tools),) $(hide) mkdir -p $(zip_root)/OTA/bin @@ -4942,7 +5011,7 @@ ifeq (true,$(EMMA_INSTRUMENT)) JACOCO_REPORT_CLASSES_ALL := $(PRODUCT_OUT)/jacoco-report-classes-all.jar $(JACOCO_REPORT_CLASSES_ALL) : @echo "Collecting uninstrumented classes" - find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" 2>/dev/null | sort > $@.list + find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" -o -name "proguard_usage.zip" 2>/dev/null | sort > $@.list $(SOONG_ZIP) -o $@ -L 0 -C $(OUT_DIR) -P out -l $@.list endif # EMMA_INSTRUMENT=true diff --git a/core/board_config.mk b/core/board_config.mk index 05b6b29c16..1ab96ea622 100644 --- a/core/board_config.mk +++ b/core/board_config.mk @@ -97,6 +97,22 @@ _board_strip_readonly_list += \ BOARD_KERNEL_BINARIES \ BOARD_KERNEL_MODULE_INTERFACE_VERSIONS \ +# Variables related to generic kernel image (GKI) and generic boot image +# - BOARD_USES_GENERIC_KERNEL_IMAGE is the global variable that defines if the +# board uses GKI and generic boot image. +# Update mechanism of the boot image is not enforced by this variable. +# - BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE controls whether the recovery image +# contains a kernel or not. +# - BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT controls whether ramdisk +# recovery resources are built to vendor_boot. +# - BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT controls whether GSI AVB keys are +# built to vendor_boot. +_board_strip_readonly_list += \ + BOARD_USES_GENERIC_KERNEL_IMAGE \ + BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE \ + BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT \ + BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT \ + _build_broken_var_list := \ BUILD_BROKEN_DUP_RULES \ BUILD_BROKEN_DUP_SYSPROP \ @@ -364,6 +380,9 @@ BUILDING_RECOVERY_IMAGE := ifeq ($(PRODUCT_BUILD_RECOVERY_IMAGE),) ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true) BUILDING_RECOVERY_IMAGE := true + else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true) + # Set to true to build recovery resources for vendor_boot + BUILDING_RECOVERY_IMAGE := true else ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE ifeq (,$(filter true, $(TARGET_NO_KERNEL) $(TARGET_NO_RECOVERY))) BUILDING_RECOVERY_IMAGE := true @@ -378,12 +397,7 @@ endif BUILDING_VENDOR_BOOT_IMAGE := ifdef BOARD_BOOT_HEADER_VERSION ifneq ($(call math_gt_or_eq,$(BOARD_BOOT_HEADER_VERSION),3),) - ifneq ($(TARGET_NO_VENDOR_BOOT),) - $(warning TARGET_NO_VENDOR_BOOT has been deprecated. Please use PRODUCT_BUILD_VENDOR_BOOT_IMAGE.) - ifneq ($(TARGET_NO_VENDOR_BOOT),true) - BUILDING_VENDOR_BOOT_IMAGE := true - endif - else ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),) + ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),) BUILDING_VENDOR_BOOT_IMAGE := true else ifeq ($(PRODUCT_BUILD_VENDOR_BOOT_IMAGE),true) BUILDING_VENDOR_BOOT_IMAGE := true @@ -745,3 +759,28 @@ $(foreach m,$(filter-out BUILD_COPY_HEADERS,$(DEFAULT_ERROR_BUILD_MODULE_TYPES)) $(if $(filter true,$(BUILD_BROKEN_USES_$(m))),\ $(KATI_deprecated_var $(m),Please convert to Soong),\ $(KATI_obsolete_var $(m),Please convert to Soong))) + +ifndef BUILDING_RECOVERY_IMAGE + ifeq (true,$(BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE)) + $(error Should not set BOARD_EXCLUDE_KERNEL_FROM_RECOVERY_IMAGE if not building recovery image) + endif +endif + +ifndef BUILDING_VENDOR_BOOT_IMAGE + ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)) + $(error Should not set BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT if not building vendor_boot image) + endif + ifeq (true,$(BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT)) + $(error Should not set BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT if not building vendor_boot image) + endif +endif + +# If BOARD_USES_GENERIC_KERNEL_IMAGE is set, BOARD_USES_RECOVERY_AS_BOOT must not be set. +# Devices without a dedicated recovery partition uses BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT to +# build recovery into vendor_boot. +ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE)) + ifeq (true,$(BOARD_USES_RECOVERY_AS_BOOT)) + $(error BOARD_USES_RECOVERY_AS_BOOT cannot be true if BOARD_USES_GENERIC_KERNEL_IMAGE is true. \ + Use BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT instead) + endif +endif diff --git a/core/clear_vars.mk b/core/clear_vars.mk index 7d79bafb5a..6e1cb685a0 100644 --- a/core/clear_vars.mk +++ b/core/clear_vars.mk @@ -74,7 +74,6 @@ LOCAL_DROIDDOC_ASSET_DIR:= LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:= LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR:= LOCAL_DROIDDOC_DOC_ZIP := -LOCAL_DROIDDOC_JDIFF_DOC_ZIP := LOCAL_DROIDDOC_HTML_DIR:= LOCAL_DROIDDOC_METADATA_ZIP:= LOCAL_DROIDDOC_OPTIONS:= diff --git a/core/config.mk b/core/config.mk index 16fa988778..e975214991 100644 --- a/core/config.mk +++ b/core/config.mk @@ -157,6 +157,7 @@ $(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported. $(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.) $(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead.) $(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead.) +$(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead) # Used to force goals to build. Only use for conditionally defined goals. .PHONY: FORCE diff --git a/core/soong_config.mk b/core/soong_config.mk index d5e16f717e..de60b5b4b3 100644 --- a/core/soong_config.mk +++ b/core/soong_config.mk @@ -131,7 +131,7 @@ $(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VE $(call add_json_list, DeviceSystemSdkVersions, $(BOARD_SYSTEMSDK_VERSIONS)) $(call add_json_list, Platform_systemsdk_versions, $(PLATFORM_SYSTEMSDK_VERSIONS)) $(call add_json_bool, Malloc_not_svelte, $(call invert_bool,$(filter true,$(MALLOC_SVELTE)))) -$(call add_json_bool, Malloc_zero_contents, $(MALLOC_ZERO_CONTENTS)) +$(call add_json_bool, Malloc_zero_contents, $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS)))) $(call add_json_bool, Malloc_pattern_fill_contents, $(MALLOC_PATTERN_FILL_CONTENTS)) $(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER)) diff --git a/core/soong_droiddoc_prebuilt.mk b/core/soong_droiddoc_prebuilt.mk index c0467df2be..4dc5d08b25 100644 --- a/core/soong_droiddoc_prebuilt.mk +++ b/core/soong_droiddoc_prebuilt.mk @@ -29,16 +29,6 @@ ifdef LOCAL_DROIDDOC_API_VERSIONS_XML $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_API_VERSIONS_XML),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml)) endif -ifdef LOCAL_DROIDDOC_JDIFF_DOC_ZIP -$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_JDIFF_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip)) -$(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip) - -ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip - -.PHONY: $(LOCAL_MODULE) $(LOCAL_MODULE)-jdiff -$(LOCAL_MODULE) $(LOCAL_MODULE)-jdiff : $(OUT_DOCS)/$(LOCAL_MODULE)-jdiff-docs.zip -endif - ifdef LOCAL_DROIDDOC_METADATA_ZIP $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_METADATA_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip)) endif diff --git a/core/sysprop.mk b/core/sysprop.mk index a74ff9fedc..fdefcedaa9 100644 --- a/core/sysprop.mk +++ b/core/sysprop.mk @@ -54,7 +54,8 @@ define generate-common-build-props echo "ro.$(1).build.tags=$(BUILD_VERSION_TAGS)" >> $(2);\ echo "ro.$(1).build.type=$(TARGET_BUILD_VARIANT)" >> $(2);\ echo "ro.$(1).build.version.incremental=$(BUILD_NUMBER_FROM_FILE)" >> $(2);\ - echo "ro.$(1).build.version.release=$(PLATFORM_VERSION)" >> $(2);\ + echo "ro.$(1).build.version.release=$(PLATFORM_VERSION_LAST_STABLE)" >> $(2);\ + echo "ro.$(1).build.version.release_or_codename=$(PLATFORM_VERSION)" >> $(2);\ echo "ro.$(1).build.version.sdk=$(PLATFORM_SDK_VERSION)" >> $(2);\ endef diff --git a/core/tasks/apidiff.mk b/core/tasks/apidiff.mk deleted file mode 100644 index 76e4749366..0000000000 --- a/core/tasks/apidiff.mk +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (C) 2017 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# -# Rules for building API diffs. -# - -.PHONY: api-diff - -api-diff: api-stubs-docs-jdiff diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk index cd5fa8e3b3..c09daeb4af 100644 --- a/core/tasks/cts.mk +++ b/core/tasks/cts.mk @@ -14,8 +14,8 @@ test_suite_name := cts test_suite_tradefed := cts-tradefed -test_suite_dynamic_config := test/suite_harness/tools/cts-tradefed/DynamicConfig.xml -test_suite_readme := test/suite_harness/tools/cts-tradefed/README +test_suite_dynamic_config := cts/tools/cts-tradefed/DynamicConfig.xml +test_suite_readme := cts/tools/cts-tradefed/README include_test_suite_notice := true include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk diff --git a/envsetup.sh b/envsetup.sh index a3b07a76be..82c4565620 100644 --- a/envsetup.sh +++ b/envsetup.sh @@ -318,6 +318,59 @@ function setpaths() #export HOST_EXTRACFLAGS="-I "$T/system/kernel_headers/host_include } +function abazel() +{ + local T="$(gettop)" + if [ ! "$T" ]; then + echo "Couldn't locate the top of the tree. Try setting TOP." + return + fi + + case $(uname -s) in + Darwin) + ANDROID_BAZEL_PATH="${T}/prebuilts/bazel/darwin-x86_64/bazel" + ANDROID_BAZELRC_PATH="${T}/build/bazel/darwin.bazelrc" + ANDROID_BAZEL_JDK_PATH="${T}/prebuilts/jdk/jdk11/darwin-x86" + ;; + Linux) + ANDROID_BAZEL_PATH="${T}/prebuilts/bazel/linux-x86_64/bazel" + ANDROID_BAZELRC_PATH="${T}/build/bazel/linux.bazelrc" + ANDROID_BAZEL_JDK_PATH="${T}/prebuilts/jdk/jdk11/linux-x86" + ;; + *) + ANDROID_BAZEL_PATH= + ANDROID_BAZELRC_PATH= + ANDROID_BAZEL_JDK_PATH= + ;; + esac + + if [ -n "$ANDROID_BAZEL_PATH" -a -f "$ANDROID_BAZEL_PATH" ]; then + export ANDROID_BAZEL_PATH + else + echo "Couldn't locate Bazel binary" + return + fi + + if [ -n "$ANDROID_BAZELRC_PATH" -a -f "$ANDROID_BAZELRC_PATH" ]; then + export ANDROID_BAZELRC_PATH + else + echo "Couldn't locate bazelrc file for Bazel" + return + fi + + if [ -n "$ANDROID_BAZEL_JDK_PATH" -a -d "$ANDROID_BAZEL_JDK_PATH" ]; then + export ANDROID_BAZEL_JDK_PATH + else + echo "Couldn't locate JDK to use for Bazel" + return + fi + + echo "WARNING: Bazel support for the Android Platform is experimental and is undergoing development." + echo "WARNING: Currently, build stability is not guaranteed. Thank you." + echo + "${ANDROID_BAZEL_PATH}" --server_javabase="${ANDROID_BAZEL_JDK_PATH}" --bazelrc="${ANDROID_BAZELRC_PATH}" "$@" +} + function printconfig() { local T=$(gettop) @@ -769,7 +822,7 @@ function gettop local TOPFILE=build/make/core/envsetup.mk if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then # The following circumlocution ensures we remove symlinks from TOP. - (cd $TOP; PWD= /bin/pwd) + (cd "$TOP"; PWD= /bin/pwd) else if [ -f $TOPFILE ] ; then # The following circumlocution (repeated below as well) ensures @@ -779,13 +832,13 @@ function gettop else local HERE=$PWD local T= - while [ \( ! \( -f $TOPFILE \) \) -a \( $PWD != "/" \) ]; do + while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do \cd .. T=`PWD= /bin/pwd -P` done - \cd $HERE + \cd "$HERE" if [ -f "$T/$TOPFILE" ]; then - echo $T + echo "$T" fi fi fi @@ -1600,25 +1653,26 @@ function validate_current_shell() { # This allows loading only approved vendorsetup.sh files function source_vendorsetup() { unset VENDOR_PYTHONPATH + local T="$(gettop)" allowed= - for f in $(find -L device vendor product -maxdepth 4 -name 'allowed-vendorsetup_sh-files' 2>/dev/null | sort); do + for f in $(cd "$T" && find -L device vendor product -maxdepth 4 -name 'allowed-vendorsetup_sh-files' 2>/dev/null | sort); do if [ -n "$allowed" ]; then echo "More than one 'allowed_vendorsetup_sh-files' file found, not including any vendorsetup.sh files:" echo " $allowed" echo " $f" return fi - allowed="$f" + allowed="$T/$f" done allowed_files= [ -n "$allowed" ] && allowed_files=$(cat "$allowed") for dir in device vendor product; do - for f in $(test -d $dir && \ + for f in $(cd "$T" && test -d $dir && \ find -L $dir -maxdepth 4 -name 'vendorsetup.sh' 2>/dev/null | sort); do if [[ -z "$allowed" || "$allowed_files" =~ $f ]]; then - echo "including $f"; . "$f" + echo "including $f"; . "$T/$f" else echo "ignoring $f, not in $allowed" fi diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk index e9fb096647..07b07ce23a 100644 --- a/target/board/BoardConfigEmuCommon.mk +++ b/target/board/BoardConfigEmuCommon.mk @@ -73,7 +73,6 @@ else endif #vendor boot -TARGET_NO_VENDOR_BOOT := false BOARD_INCLUDE_DTB_IN_BOOTIMG := false BOARD_BOOT_HEADER_VERSION := 3 BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION) diff --git a/target/board/emulator_arm64/BoardConfig.mk b/target/board/emulator_arm64/BoardConfig.mk index a17cb7534d..95eff4bcbd 100644 --- a/target/board/emulator_arm64/BoardConfig.mk +++ b/target/board/emulator_arm64/BoardConfig.mk @@ -56,7 +56,6 @@ include build/make/target/board/BoardConfigGsiCommon.mk include build/make/target/board/BoardConfigEmuCommon.mk TARGET_NO_KERNEL := false -TARGET_NO_VENDOR_BOOT := false BOARD_USES_RECOVERY_AS_BOOT := true BOARD_BOOTIMAGE_PARTITION_SIZE := 0x02000000 diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk index d5331adfd6..42660e59a7 100644 --- a/target/board/generic_arm64/BoardConfig.mk +++ b/target/board/generic_arm64/BoardConfig.mk @@ -55,7 +55,6 @@ endif include build/make/target/board/BoardConfigGsiCommon.mk TARGET_NO_KERNEL := false -TARGET_NO_VENDOR_BOOT := true BOARD_USES_RECOVERY_AS_BOOT := true BOARD_KERNEL-4.19-GZ_BOOTIMAGE_PARTITION_SIZE := 47185920 diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk index 6f1bba0e05..866d7c7092 100644 --- a/target/board/generic_arm64/device.mk +++ b/target/board/generic_arm64/device.mk @@ -29,3 +29,5 @@ PRODUCT_COPY_FILES += \ kernel/prebuilts/5.4/arm64/kernel-5.4-gz:kernel-5.4-gz-allsyms \ kernel/prebuilts/5.4/arm64/kernel-5.4-lz4:kernel-5.4-lz4-allsyms endif + +PRODUCT_BUILD_VENDOR_BOOT_IMAGE := false diff --git a/target/product/base_system.mk b/target/product/base_system.mk index 586c058da2..1a28cf0162 100644 --- a/target/product/base_system.mk +++ b/target/product/base_system.mk @@ -38,7 +38,6 @@ PRODUCT_PACKAGES += \ bcc \ blank_screen \ blkid \ - service-blobstore \ bmgr \ bootanimation \ bootstat \ @@ -117,7 +116,6 @@ PRODUCT_PACKAGES += \ iptables \ ip-up-vpn \ javax.obex \ - service-jobscheduler \ keystore \ credstore \ ld.mc \ diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt index de6644cef9..f2ef002a02 100644 --- a/target/product/gsi/current.txt +++ b/target/product/gsi/current.txt @@ -19,6 +19,7 @@ LLNDK: libsync.so LLNDK: libvndksupport.so LLNDK: libvulkan.so VNDK-SP: android.hardware.common-V1-ndk_platform.so +VNDK-SP: android.hardware.common.fmq-V1-ndk_platform.so VNDK-SP: android.hardware.graphics.common-V1-ndk_platform.so VNDK-SP: android.hardware.graphics.common@1.0.so VNDK-SP: android.hardware.graphics.common@1.1.so diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk index 7633abed63..ec30527aa2 100644 --- a/target/product/runtime_libart.mk +++ b/target/product/runtime_libart.mk @@ -30,8 +30,8 @@ PRODUCT_PACKAGES += com.android.runtime # ART APEX module. # Note that this package includes the minimal boot classpath JARs (listed in # ART_APEX_JARS), which should no longer be added directly to PRODUCT_PACKAGES. -PRODUCT_PACKAGES += com.android.art -PRODUCT_HOST_PACKAGES += com.android.art +PRODUCT_PACKAGES += com.android.art-autoselect +PRODUCT_HOST_PACKAGES += com.android.art-autoselect # Certificates. PRODUCT_PACKAGES += \ diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp index 45e05142be..e1543e7d14 100644 --- a/tools/releasetools/Android.bp +++ b/tools/releasetools/Android.bp @@ -125,6 +125,9 @@ python_defaults { required: [ "brillo_update_payload", "checkvintf", + "lz4", + "toybox", + "unpack_bootimg" ], target: { darwin: { diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py index 3eb5196e6d..dc6e3ca82f 100644 --- a/tools/releasetools/apex_utils.py +++ b/tools/releasetools/apex_utils.py @@ -49,7 +49,10 @@ class ApexApkSigner(object): def __init__(self, apex_path, key_passwords, codename_to_api_level_map): self.apex_path = apex_path - self.key_passwords = key_passwords + if not key_passwords: + self.key_passwords = dict() + else: + self.key_passwords = key_passwords self.codename_to_api_level_map = codename_to_api_level_map self.debugfs_path = os.path.join( OPTIONS.search_path, "bin", "debugfs_static") @@ -124,7 +127,7 @@ class ApexApkSigner(object): # signed apk file. unsigned_apk = common.MakeTempFile() os.rename(apk_path, unsigned_apk) - common.SignFile(unsigned_apk, apk_path, key_name, self.key_passwords, + common.SignFile(unsigned_apk, apk_path, key_name, self.key_passwords.get(key_name), codename_to_api_level_map=self.codename_to_api_level_map) has_signed_apk = True return payload_dir, has_signed_apk @@ -371,7 +374,7 @@ def SignApex(avbtool, apex_data, payload_key, container_key, container_pw, aligned_apex, signed_apex, container_key, - container_pw, + container_pw.get(container_key), codename_to_api_level_map=codename_to_api_level_map, extra_signapk_args=extra_signapk_args) diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py index 9cc072fe0f..169a11224a 100755 --- a/tools/releasetools/build_image.py +++ b/tools/releasetools/build_image.py @@ -250,6 +250,7 @@ def BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config): run_e2fsck = False needs_projid = prop_dict.get("needs_projid", 0) needs_casefold = prop_dict.get("needs_casefold", 0) + needs_compress = prop_dict.get("needs_compress", 0) if fs_type.startswith("ext"): build_command = [prop_dict["ext_mkuserimg"]] @@ -337,6 +338,8 @@ def BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config): build_command.append("--prjquota") if (needs_casefold): build_command.append("--casefold") + if (needs_compress): + build_command.append("--compression") else: raise BuildImageError( "Error: unknown filesystem type: {}".format(fs_type)) @@ -610,6 +613,7 @@ def ImagePropFromGlobalDict(glob_dict, mount_point): copy_prop("userdata_selinux_fc", "selinux_fc") copy_prop("needs_casefold", "needs_casefold") copy_prop("needs_projid", "needs_projid") + copy_prop("needs_compress", "needs_compress") elif mount_point == "cache": copy_prop("cache_fs_type", "fs_type") copy_prop("cache_size", "partition_size") diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py index f5dfbecffe..5e70af1e7d 100644 --- a/tools/releasetools/common.py +++ b/tools/releasetools/common.py @@ -527,6 +527,27 @@ class BuildInfo(object): return BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER_LEGACY return BuildInfo._RO_PRODUCT_PROPS_DEFAULT_SOURCE_ORDER_CURRENT + def _GetPlatformVersion(self): + version_sdk = self.GetBuildProp("ro.build.version.sdk") + # init code switches to version_release_or_codename (see b/158483506). After + # API finalization, release_or_codename will be the same as release. This + # is the best effort to support pre-S dev stage builds. + if int(version_sdk) >= 30: + try: + return self.GetBuildProp("ro.build.version.release_or_codename") + except ExternalError: + logger.warning('Failed to find ro.build.version.release_or_codename') + + return self.GetBuildProp("ro.build.version.release") + + def _GetPartitionPlatformVersion(self, partition): + try: + return self.GetPartitionBuildProp("ro.build.version.release_or_codename", + partition) + except ExternalError: + return self.GetPartitionBuildProp("ro.build.version.release", + partition) + def GetOemProperty(self, key): if self.oem_props is not None and key in self.oem_props: return self.oem_dicts[0][key] @@ -543,7 +564,7 @@ class BuildInfo(object): self.GetPartitionBuildProp("ro.product.brand", partition), self.GetPartitionBuildProp("ro.product.name", partition), self.GetPartitionBuildProp("ro.product.device", partition), - self.GetPartitionBuildProp("ro.build.version.release", partition), + self._GetPartitionPlatformVersion(partition), self.GetPartitionBuildProp("ro.build.id", partition), self.GetPartitionBuildProp( "ro.build.version.incremental", partition), @@ -559,7 +580,7 @@ class BuildInfo(object): self.GetBuildProp("ro.product.brand"), self.GetBuildProp("ro.product.name"), self.GetBuildProp("ro.product.device"), - self.GetBuildProp("ro.build.version.release"), + self._GetPlatformVersion(), self.GetBuildProp("ro.build.id"), self.GetBuildProp("ro.build.version.incremental"), self.GetBuildProp("ro.build.type"), @@ -814,6 +835,15 @@ class PartitionBuildProps(object): props._LoadBuildProp(data) return props + @staticmethod + def FromBuildPropFile(name, build_prop_file): + """Constructs an instance from a build prop file.""" + + props = PartitionBuildProps("unknown", name) + with open(build_prop_file) as f: + props._LoadBuildProp(f.read()) + return props + def _LoadBuildProp(self, data): for line in data.split('\n'): line = line.strip() @@ -1003,15 +1033,35 @@ def MergeDynamicPartitionInfoDicts(framework_dict, vendor_dict): Returns: The merged dynamic partition info dictionary. """ - merged_dict = {} + + def uniq_concat(a, b): + combined = set(a.split(" ")) + combined.update(set(b.split(" "))) + combined = [item.strip() for item in combined if item.strip()] + return " ".join(sorted(combined)) + + if (framework_dict.get("use_dynamic_partitions") != + "true") or (vendor_dict.get("use_dynamic_partitions") != "true"): + raise ValueError("Both dictionaries must have use_dynamic_partitions=true") + + merged_dict = {"use_dynamic_partitions": "true"} + + merged_dict["dynamic_partition_list"] = uniq_concat( + framework_dict.get("dynamic_partition_list", ""), + vendor_dict.get("dynamic_partition_list", "")) + + # Super block devices are defined by the vendor dict. + if "super_block_devices" in vendor_dict: + merged_dict["super_block_devices"] = vendor_dict["super_block_devices"] + for block_device in merged_dict["super_block_devices"].split(" "): + key = "super_%s_device_size" % block_device + if key not in vendor_dict: + raise ValueError("Vendor dict does not contain required key %s." % key) + merged_dict[key] = vendor_dict[key] + # Partition groups and group sizes are defined by the vendor dict because # these values may vary for each board that uses a shared system image. merged_dict["super_partition_groups"] = vendor_dict["super_partition_groups"] - framework_dynamic_partition_list = framework_dict.get( - "dynamic_partition_list", "") - vendor_dynamic_partition_list = vendor_dict.get("dynamic_partition_list", "") - merged_dict["dynamic_partition_list"] = ("%s %s" % ( - framework_dynamic_partition_list, vendor_dynamic_partition_list)).strip() for partition_group in merged_dict["super_partition_groups"].split(" "): # Set the partition group's size using the value from the vendor dict. key = "super_%s_group_size" % partition_group @@ -1022,15 +1072,16 @@ def MergeDynamicPartitionInfoDicts(framework_dict, vendor_dict): # Set the partition group's partition list using a concatenation of the # framework and vendor partition lists. key = "super_%s_partition_list" % partition_group - merged_dict[key] = ( - "%s %s" % - (framework_dict.get(key, ""), vendor_dict.get(key, ""))).strip() - - # Pick virtual ab related flags from vendor dict, if defined. - if "virtual_ab" in vendor_dict.keys(): - merged_dict["virtual_ab"] = vendor_dict["virtual_ab"] - if "virtual_ab_retrofit" in vendor_dict.keys(): - merged_dict["virtual_ab_retrofit"] = vendor_dict["virtual_ab_retrofit"] + merged_dict[key] = uniq_concat( + framework_dict.get(key, ""), vendor_dict.get(key, "")) + + # Various other flags should be copied from the vendor dict, if defined. + for key in ("virtual_ab", "virtual_ab_retrofit", "lpmake", + "super_metadata_device", "super_partition_error_limit", + "super_partition_size"): + if key in vendor_dict.keys(): + merged_dict[key] = vendor_dict[key] + return merged_dict @@ -1257,23 +1308,27 @@ def _BuildBootableImage(image_name, sourcedir, fs_config_file, info_dict=None, for building the requested image. """ + if info_dict is None: + info_dict = OPTIONS.info_dict + # "boot" or "recovery", without extension. partition_name = os.path.basename(sourcedir).lower() + kernel = None if partition_name == "recovery": - kernel = "kernel" + if info_dict.get("exclude_kernel_from_recovery_image") == "true": + logger.info("Excluded kernel binary from recovery image.") + else: + kernel = "kernel" else: kernel = image_name.replace("boot", "kernel") kernel = kernel.replace(".img", "") - if not os.access(os.path.join(sourcedir, kernel), os.F_OK): + if kernel and not os.access(os.path.join(sourcedir, kernel), os.F_OK): return None if has_ramdisk and not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK): return None - if info_dict is None: - info_dict = OPTIONS.info_dict - img = tempfile.NamedTemporaryFile() if has_ramdisk: @@ -1283,7 +1338,9 @@ def _BuildBootableImage(image_name, sourcedir, fs_config_file, info_dict=None, # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg" - cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, kernel)] + cmd = [mkbootimg] + if kernel: + cmd += ["--kernel", os.path.join(sourcedir, kernel)] fn = os.path.join(sourcedir, "second") if os.access(fn, os.F_OK): diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py index bfd2f90c44..6f414a5057 100755 --- a/tools/releasetools/merge_target_files.py +++ b/tools/releasetools/merge_target_files.py @@ -70,6 +70,10 @@ Usage: merge_target_files.py [args] --rebuild_recovery Deprecated; does nothing. + --allow-duplicate-apkapex-keys + If provided, duplicate APK/APEX keys are ignored and the value from the + framework is used. + --keep-tmp Keep tempoary files for debugging purposes. """ @@ -110,6 +114,8 @@ OPTIONS.output_img = None OPTIONS.output_super_empty = None # TODO(b/132730255): Remove this option. OPTIONS.rebuild_recovery = False +# TODO(b/150582573): Remove this option. +OPTIONS.allow_duplicate_apkapex_keys = False OPTIONS.keep_tmp = False # In an item list (framework or vendor), we may see entries that select whole @@ -526,6 +532,7 @@ def item_list_to_partition_set(item_list): Args: item_list: A list of items in a target files package. + Returns: A set of partitions extracted from the list of items. """ @@ -547,7 +554,6 @@ def process_apex_keys_apk_certs_common(framework_target_files_dir, output_target_files_dir, framework_partition_set, vendor_partition_set, file_name): - """Performs special processing for META/apexkeys.txt or META/apkcerts.txt. This function merges the contents of the META/apexkeys.txt or @@ -597,7 +603,12 @@ def process_apex_keys_apk_certs_common(framework_target_files_dir, if partition_tag in partition_set: if key in merged_dict: - raise ValueError('Duplicate key %s' % key) + if OPTIONS.allow_duplicate_apkapex_keys: + # TODO(b/150582573) Always raise on duplicates. + logger.warning('Duplicate key %s' % key) + continue + else: + raise ValueError('Duplicate key %s' % key) merged_dict[key] = value @@ -647,8 +658,7 @@ def copy_file_contexts(framework_target_files_dir, vendor_target_files_dir, def process_special_cases(framework_target_files_temp_dir, vendor_target_files_temp_dir, output_target_files_temp_dir, - framework_misc_info_keys, - framework_partition_set, + framework_misc_info_keys, framework_partition_set, vendor_partition_set): """Performs special-case processing for certain target files items. @@ -967,7 +977,7 @@ def merge_target_files(temp_dir, framework_target_files, framework_item_list, rebuild_recovery) if not check_target_files_vintf.CheckVintf(output_target_files_temp_dir): - raise RuntimeError("Incompatible VINTF metadata") + raise RuntimeError('Incompatible VINTF metadata') generate_images(output_target_files_temp_dir, rebuild_recovery) @@ -1075,8 +1085,10 @@ def main(): OPTIONS.output_img = a elif o == '--output-super-empty': OPTIONS.output_super_empty = a - elif o == '--rebuild_recovery': # TODO(b/132730255): Warn + elif o == '--rebuild_recovery': # TODO(b/132730255): Warn OPTIONS.rebuild_recovery = True + elif o == '--allow-duplicate-apkapex-keys': + OPTIONS.allow_duplicate_apkapex_keys = True elif o == '--keep-tmp': OPTIONS.keep_tmp = True else: @@ -1104,6 +1116,7 @@ def main(): 'output-img=', 'output-super-empty=', 'rebuild_recovery', + 'allow-duplicate-apkapex-keys', 'keep-tmp', ], extra_option_handler=option_handler) diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index 18b2b76227..7dc648f322 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -202,6 +202,10 @@ A/B OTA specific options ones. Should only be used if caller knows it's safe to do so (e.g. all the postinstall work is to dexopt apps and a data wipe will happen immediately after). Only meaningful when generating A/B OTAs. + + --partial "<PARTITION> [<PARTITION>[...]]" + Generate partial updates, overriding ab_partitions list with the given + list. """ from __future__ import print_function @@ -257,6 +261,7 @@ OPTIONS.extracted_input = None OPTIONS.skip_postinstall = False OPTIONS.skip_compatibility_check = False OPTIONS.disable_fec_computation = False +OPTIONS.partial = None POSTINSTALL_CONFIG = 'META/postinstall_config.txt' @@ -593,6 +598,48 @@ class AbOtaPropertyFiles(StreamingPropertyFiles): return (payload_offset, metadata_total) +def UpdatesInfoForSpecialUpdates(content, partitions_filter, + delete_keys=None): + """ Updates info file for secondary payload generation, partial update, etc. + + Scan each line in the info file, and remove the unwanted partitions from + the dynamic partition list in the related properties. e.g. + "super_google_dynamic_partitions_partition_list=system vendor product" + will become "super_google_dynamic_partitions_partition_list=system". + + Args: + content: The content of the input info file. e.g. misc_info.txt. + partitions_filter: A function to filter the desired partitions from a given + list + delete_keys: A list of keys to delete in the info file + + Returns: + A string of the updated info content. + """ + + output_list = [] + # The suffix in partition_list variables that follows the name of the + # partition group. + list_suffix = 'partition_list' + for line in content.splitlines(): + if line.startswith('#') or '=' not in line: + output_list.append(line) + continue + key, value = line.strip().split('=', 1) + + if delete_keys and key in delete_keys: + pass + elif key.endswith(list_suffix): + partitions = value.split() + # TODO for partial update, partitions in the same group must be all + # updated or all omitted + partitions = filter(partitions_filter, partitions) + output_list.append('{}={}'.format(key, ' '.join(partitions))) + else: + output_list.append(line) + return '\n'.join(output_list) + + def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): """Returns a target-files.zip file for generating secondary payload. @@ -614,44 +661,15 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): """ def GetInfoForSecondaryImages(info_file): - """Updates info file for secondary payload generation. - - Scan each line in the info file, and remove the unwanted partitions from - the dynamic partition list in the related properties. e.g. - "super_google_dynamic_partitions_partition_list=system vendor product" - will become "super_google_dynamic_partitions_partition_list=system". - - Args: - info_file: The input info file. e.g. misc_info.txt. - - Returns: - A string of the updated info content. - """ - - output_list = [] + """Updates info file for secondary payload generation.""" with open(info_file) as f: - lines = f.read().splitlines() - - # The suffix in partition_list variables that follows the name of the - # partition group. - LIST_SUFFIX = 'partition_list' - for line in lines: - if line.startswith('#') or '=' not in line: - output_list.append(line) - continue - key, value = line.strip().split('=', 1) - if key == 'dynamic_partition_list' or key.endswith(LIST_SUFFIX): - partitions = value.split() - partitions = [partition for partition in partitions if partition - not in SECONDARY_PAYLOAD_SKIPPED_IMAGES] - output_list.append('{}={}'.format(key, ' '.join(partitions))) - elif key in ['virtual_ab', "virtual_ab_retrofit"]: - # Remove virtual_ab flag from secondary payload so that OTA client - # don't use snapshots for secondary update - pass - else: - output_list.append(line) - return '\n'.join(output_list) + content = f.read() + # Remove virtual_ab flag from secondary payload so that OTA client + # don't use snapshots for secondary update + delete_keys = ['virtual_ab', "virtual_ab_retrofit"] + return UpdatesInfoForSpecialUpdates( + content, lambda p: p not in SECONDARY_PAYLOAD_SKIPPED_IMAGES, + delete_keys) target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip") target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True) @@ -729,6 +747,76 @@ def GetTargetFilesZipWithoutPostinstallConfig(input_file): return target_file +def GetTargetFilesZipForPartialUpdates(input_file, ab_partitions): + """Returns a target-files.zip for partial ota update package generation. + + This function modifies ab_partitions list with the desired partitions before + calling the brillo_update_payload script. It also cleans up the reference to + the excluded partitions in the info file, e.g misc_info.txt. + + Args: + input_file: The input target-files.zip filename. + ab_partitions: A list of partitions to include in the partial update + + Returns: + The filename of target-files.zip used for partial ota update. + """ + + def AddImageForPartition(partition_name): + """Add the archive name for a given partition to the copy list.""" + for prefix in ['IMAGES', 'RADIO']: + image_path = '{}/{}.img'.format(prefix, partition_name) + if image_path in namelist: + copy_entries.append(image_path) + map_path = '{}/{}.map'.format(prefix, partition_name) + if map_path in namelist: + copy_entries.append(map_path) + return + + raise ValueError("Cannot find {} in input zipfile".format(partition_name)) + + with zipfile.ZipFile(input_file, allowZip64=True) as input_zip: + original_ab_partitions = input_zip.read(AB_PARTITIONS).decode().splitlines() + namelist = input_zip.namelist() + + unrecognized_partitions = [partition for partition in ab_partitions if + partition not in original_ab_partitions] + if unrecognized_partitions: + raise ValueError("Unrecognized partitions when generating partial updates", + unrecognized_partitions) + + logger.info("Generating partial updates for %s", ab_partitions) + + copy_entries = ['META/update_engine_config.txt'] + for partition_name in ab_partitions: + AddImageForPartition(partition_name) + + # Use zip2zip to avoid extracting the zipfile. + partial_target_file = common.MakeTempFile(suffix='.zip') + cmd = ['zip2zip', '-i', input_file, '-o', partial_target_file] + cmd.extend(['{}:{}'.format(name, name) for name in copy_entries]) + common.RunAndCheckOutput(cmd) + + partial_target_zip = zipfile.ZipFile(partial_target_file, 'a', + allowZip64=True) + with zipfile.ZipFile(input_file, allowZip64=True) as input_zip: + common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt', + '\n'.join(ab_partitions)) + for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]: + if info_file not in input_zip.namelist(): + logger.warning('Cannot find %s in input zipfile', info_file) + continue + content = input_zip.read(info_file).decode() + modified_info = UpdatesInfoForSpecialUpdates( + content, lambda p: p in ab_partitions) + common.ZipWriteStr(partial_target_zip, info_file, modified_info) + + # TODO(xunchang) handle 'META/care_map.pb', 'META/postinstall_config.txt' + common.ZipClose(partial_target_zip) + + return partial_target_file + + def GetTargetFilesZipForRetrofitDynamicPartitions(input_file, super_block_devices, dynamic_partition_list): @@ -837,10 +925,16 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts) source_info = None + additional_args = [] + if OPTIONS.retrofit_dynamic_partitions: target_file = GetTargetFilesZipForRetrofitDynamicPartitions( target_file, target_info.get("super_block_devices").strip().split(), target_info.get("dynamic_partition_list").strip().split()) + elif OPTIONS.partial: + target_file = GetTargetFilesZipForPartialUpdates(target_file, + OPTIONS.partial) + additional_args += ["--is_partial_update", "true"] elif OPTIONS.skip_postinstall: target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file) # Target_file may have been modified, reparse ab_partitions @@ -862,7 +956,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): partition_timestamps = [ part.partition_name + ":" + part.version for part in metadata.postcondition.partition_state] - additional_args = ["--max_timestamp", max_timestamp] + additional_args += ["--max_timestamp", max_timestamp] if partition_timestamps: additional_args.extend( ["--partition_timestamps", ",".join( @@ -1006,6 +1100,11 @@ def main(argv): OPTIONS.force_non_ab = True elif o == "--boot_variable_file": OPTIONS.boot_variable_file = a + elif o == "--partial": + partitions = a.split() + if not partitions: + raise ValueError("Cannot parse partitions in {}".format(a)) + OPTIONS.partial = partitions else: return False return True @@ -1044,6 +1143,7 @@ def main(argv): "disable_fec_computation", "force_non_ab", "boot_variable_file=", + "partial=", ], extra_option_handler=option_handler) if len(args) != 2: @@ -1058,6 +1158,8 @@ def main(argv): # OTA package. if OPTIONS.incremental_source is None: raise ValueError("Cannot generate downgradable full OTAs") + if OPTIONS.partial: + raise ValueError("Cannot generate downgradable partial OTAs") # Load the build info dicts from the zip directly or the extracted input # directory. We don't need to unzip the entire target-files zips, because they @@ -1072,6 +1174,10 @@ def main(argv): with zipfile.ZipFile(args[0], 'r', allowZip64=True) as input_zip: OPTIONS.info_dict = common.LoadInfoDict(input_zip) + # TODO(xunchang) for retrofit and partial updates, maybe we should rebuild the + # target-file and reload the info_dict. So the info will be consistent with + # the modified target-file. + logger.info("--- target info ---") common.DumpInfoDict(OPTIONS.info_dict) diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py index f0e4fcfeca..cb0f6e6ba5 100644 --- a/tools/releasetools/ota_utils.py +++ b/tools/releasetools/ota_utils.py @@ -14,14 +14,17 @@ import copy import itertools +import logging import os import zipfile import ota_metadata_pb2 from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile, ZipWriteStr, BuildInfo, LoadDictionaryFromFile, - SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps) + SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps, + MakeTempDir, RunAndCheckOutput, ExternalError) +logger = logging.getLogger(__name__) OPTIONS.no_signing = False OPTIONS.force_non_ab = False @@ -38,6 +41,9 @@ METADATA_NAME = 'META-INF/com/android/metadata' METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb' UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*'] +# See sysprop.mk. If file is moved, add new search paths here; don't remove +# existing search paths. +RAMDISK_BUILD_PROP_REL_PATHS = ['system/etc/ramdisk/build.prop'] def FinalizeMetadata(metadata, input_file, output_file, needed_property_files): """Finalizes the metadata and signs an A/B OTA package. @@ -561,3 +567,55 @@ def SignOutput(temp_zip_name, output_zip_name): SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw, whole_file=True) + + +def GetBootImageTimestamp(boot_img): + """ + Get timestamp from ramdisk within the boot image + + Args: + boot_img: the boot image file. Ramdisk must be compressed with lz4 format. + + Return: + An integer that corresponds to the timestamp of the boot image, or None + if file has unknown format. Raise exception if an unexpected error has + occurred. + """ + + tmp_dir = MakeTempDir('boot_', suffix='.img') + try: + RunAndCheckOutput(['unpack_bootimg', '--boot_img', boot_img, '--out', tmp_dir]) + ramdisk = os.path.join(tmp_dir, 'ramdisk') + if not os.path.isfile(ramdisk): + logger.warning('Unable to get boot image timestamp: no ramdisk in boot') + return None + uncompressed_ramdisk = os.path.join(tmp_dir, 'uncompressed_ramdisk') + RunAndCheckOutput(['lz4', '-d', ramdisk, uncompressed_ramdisk]) + + abs_uncompressed_ramdisk = os.path.abspath(uncompressed_ramdisk) + extracted_ramdisk = MakeTempDir('extracted_ramdisk') + # Use "toybox cpio" instead of "cpio" because the latter invokes cpio from + # the host environment. + RunAndCheckOutput(['toybox', 'cpio', '-F', abs_uncompressed_ramdisk, '-i'], + cwd=extracted_ramdisk) + + prop_file = None + for search_path in RAMDISK_BUILD_PROP_REL_PATHS: + prop_file = os.path.join(extracted_ramdisk, search_path) + if os.path.isfile(prop_file): + break + logger.warning('Unable to get boot image timestamp: no %s in ramdisk', search_path) + + if not prop_file: + return None + + props = PartitionBuildProps.FromBuildPropFile('boot', prop_file) + timestamp = props.GetProp('ro.bootimage.build.date.utc') + if timestamp: + return int(timestamp) + logger.warning('Unable to get boot image timestamp: ro.bootimage.build.date.utc is undefined') + return None + + except ExternalError as e: + logger.warning('Unable to get boot image timestamp: %s', e) + return None diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py index 220f5192a5..e8674b6f7f 100755 --- a/tools/releasetools/sign_target_files_apks.py +++ b/tools/releasetools/sign_target_files_apks.py @@ -515,7 +515,7 @@ def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info, data, payload_key, container_key, - key_passwords[container_key], + key_passwords, apk_keys, codename_to_api_level_map, no_hashtree=True, diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py index 22fc85adf1..ee28571878 100644 --- a/tools/releasetools/test_common.py +++ b/tools/releasetools/test_common.py @@ -1418,13 +1418,17 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): def test_MergeDynamicPartitionInfoDicts_ReturnsMergedDict(self): framework_dict = { + 'use_dynamic_partitions': 'true', 'super_partition_groups': 'group_a', 'dynamic_partition_list': 'system', 'super_group_a_partition_list': 'system', } vendor_dict = { + 'use_dynamic_partitions': 'true', 'super_partition_groups': 'group_a group_b', 'dynamic_partition_list': 'vendor product', + 'super_block_devices': 'super', + 'super_super_device_size': '3000', 'super_group_a_partition_list': 'vendor', 'super_group_a_group_size': '1000', 'super_group_b_partition_list': 'product', @@ -1434,8 +1438,11 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): framework_dict=framework_dict, vendor_dict=vendor_dict) expected_merged_dict = { + 'use_dynamic_partitions': 'true', 'super_partition_groups': 'group_a group_b', - 'dynamic_partition_list': 'system vendor product', + 'dynamic_partition_list': 'product system vendor', + 'super_block_devices': 'super', + 'super_super_device_size': '3000', 'super_group_a_partition_list': 'system vendor', 'super_group_a_group_size': '1000', 'super_group_b_partition_list': 'product', @@ -1445,12 +1452,14 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): def test_MergeDynamicPartitionInfoDicts_IgnoringFrameworkGroupSize(self): framework_dict = { + 'use_dynamic_partitions': 'true', 'super_partition_groups': 'group_a', 'dynamic_partition_list': 'system', 'super_group_a_partition_list': 'system', 'super_group_a_group_size': '5000', } vendor_dict = { + 'use_dynamic_partitions': 'true', 'super_partition_groups': 'group_a group_b', 'dynamic_partition_list': 'vendor product', 'super_group_a_partition_list': 'vendor', @@ -1462,8 +1471,9 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): framework_dict=framework_dict, vendor_dict=vendor_dict) expected_merged_dict = { + 'use_dynamic_partitions': 'true', 'super_partition_groups': 'group_a group_b', - 'dynamic_partition_list': 'system vendor product', + 'dynamic_partition_list': 'product system vendor', 'super_group_a_partition_list': 'system vendor', 'super_group_a_group_size': '1000', 'super_group_b_partition_list': 'product', diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py index f96bc7ba49..9752c2b8ff 100644 --- a/tools/releasetools/test_ota_from_target_files.py +++ b/tools/releasetools/test_ota_from_target_files.py @@ -27,6 +27,7 @@ from ota_utils import ( FinalizeMetadata, GetPackageMetadata, PropertyFiles) from ota_from_target_files import ( _LoadOemDicts, AbOtaPropertyFiles, + GetTargetFilesZipForPartialUpdates, GetTargetFilesZipForSecondaryImages, GetTargetFilesZipWithoutPostinstallConfig, Payload, PayloadSigner, POSTINSTALL_CONFIG, @@ -450,6 +451,86 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): updated_dynamic_partitions_info) @test_utils.SkipIfExternalToolsUnavailable() + def test_GetTargetFilesZipForPartialUpdates_singlePartition(self): + input_file = construct_target_files() + with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip: + common.ZipWriteStr(append_zip, 'IMAGES/system.map', 'fake map') + + target_file = GetTargetFilesZipForPartialUpdates(input_file, ['system']) + with zipfile.ZipFile(target_file) as verify_zip: + namelist = verify_zip.namelist() + ab_partitions = verify_zip.read('META/ab_partitions.txt').decode() + + self.assertIn('META/ab_partitions.txt', namelist) + self.assertIn('META/update_engine_config.txt', namelist) + self.assertIn('IMAGES/system.img', namelist) + self.assertIn('IMAGES/system.map', namelist) + + self.assertNotIn('IMAGES/boot.img', namelist) + self.assertNotIn('IMAGES/system_other.img', namelist) + self.assertNotIn('RADIO/bootloader.img', namelist) + self.assertNotIn('RADIO/modem.img', namelist) + + self.assertEqual('system', ab_partitions) + + @test_utils.SkipIfExternalToolsUnavailable() + def test_GetTargetFilesZipForPartialUpdates_unrecognizedPartition(self): + input_file = construct_target_files() + self.assertRaises(ValueError, GetTargetFilesZipForPartialUpdates, + input_file, ['product']) + + @test_utils.SkipIfExternalToolsUnavailable() + def test_GetTargetFilesZipForPartialUpdates_dynamicPartitions(self): + input_file = construct_target_files(secondary=True) + misc_info = '\n'.join([ + 'use_dynamic_partition_size=true', + 'use_dynamic_partitions=true', + 'dynamic_partition_list=system vendor product', + 'super_partition_groups=google_dynamic_partitions', + 'super_google_dynamic_partitions_group_size=4873781248', + 'super_google_dynamic_partitions_partition_list=system vendor product', + ]) + dynamic_partitions_info = '\n'.join([ + 'super_partition_groups=google_dynamic_partitions', + 'super_google_dynamic_partitions_group_size=4873781248', + 'super_google_dynamic_partitions_partition_list=system vendor product', + ]) + + with zipfile.ZipFile(input_file, 'a', allowZip64=True) as append_zip: + common.ZipWriteStr(append_zip, 'META/misc_info.txt', misc_info) + common.ZipWriteStr(append_zip, 'META/dynamic_partitions_info.txt', + dynamic_partitions_info) + + target_file = GetTargetFilesZipForPartialUpdates(input_file, + ['boot', 'system']) + with zipfile.ZipFile(target_file) as verify_zip: + namelist = verify_zip.namelist() + ab_partitions = verify_zip.read('META/ab_partitions.txt').decode() + updated_misc_info = verify_zip.read('META/misc_info.txt').decode() + updated_dynamic_partitions_info = verify_zip.read( + 'META/dynamic_partitions_info.txt').decode() + + self.assertIn('META/ab_partitions.txt', namelist) + self.assertIn('IMAGES/boot.img', namelist) + self.assertIn('IMAGES/system.img', namelist) + self.assertIn('META/misc_info.txt', namelist) + self.assertIn('META/dynamic_partitions_info.txt', namelist) + + self.assertNotIn('IMAGES/system_other.img', namelist) + self.assertNotIn('RADIO/bootloader.img', namelist) + self.assertNotIn('RADIO/modem.img', namelist) + + # Check the vendor & product are removed from the partitions list. + expected_misc_info = misc_info.replace('system vendor product', + 'system') + expected_dynamic_partitions_info = dynamic_partitions_info.replace( + 'system vendor product', 'system') + self.assertEqual(expected_misc_info, updated_misc_info) + self.assertEqual(expected_dynamic_partitions_info, + updated_dynamic_partitions_info) + self.assertEqual('boot\nsystem', ab_partitions) + + @test_utils.SkipIfExternalToolsUnavailable() def test_GetTargetFilesZipWithoutPostinstallConfig(self): input_file = construct_target_files() target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file) @@ -1205,11 +1286,12 @@ class RuntimeFingerprintTest(test_utils.ReleaseToolsTestCase): ] BUILD_PROP = [ - 'ro.build.version.release=version-release', 'ro.build.id=build-id', 'ro.build.version.incremental=version-incremental', 'ro.build.type=build-type', 'ro.build.tags=build-tags', + 'ro.build.version.release=version-release', + 'ro.build.version.release_or_codename=version-release', 'ro.build.version.sdk=30', 'ro.build.version.security_patch=2020', 'ro.build.date.utc=12345678', |