diff options
39 files changed, 1579 insertions, 1902 deletions
diff --git a/Changes.md b/Changes.md index 3109e9bbf0..84c8d950fc 100644 --- a/Changes.md +++ b/Changes.md @@ -1,5 +1,15 @@ # Build System Changes for Android.mk Writers +## `LOCAL_REQUIRED_MODULES` requires listed modules to exist {#BUILD_BROKEN_MISSING_REQUIRED_MODULES} + +Modules listed in `LOCAL_REQUIRED_MODULES`, `LOCAL_HOST_REQUIRED_MODULES` and +`LOCAL_TARGET_REQUIRED_MODULES` need to exist unless `ALLOW_MISSING_DEPENDENCIES` +is set. + +To temporarily relax missing required modules check, use: + +`BUILD_BROKEN_MISSING_REQUIRED_MODULES := true` + ## Changes in system properties settings ### Product variables diff --git a/core/Makefile b/core/Makefile index 0541f22001..92723d3277 100644 --- a/core/Makefile +++ b/core/Makefile @@ -553,12 +553,8 @@ event_log_tags_file := $(TARGET_OUT)/etc/event-log-tags all_event_log_tags_src := \ $(sort $(foreach m, $(ALL_MODULES), $(ALL_MODULES.$(m).EVENT_LOG_TAGS))) -# PDK builds will already have a full list of tags that needs to get merged -# in with the ones from source -pdk_fusion_log_tags_file := $(patsubst $(PRODUCT_OUT)/%,$(_pdk_fusion_intermediates)/%,$(filter $(event_log_tags_file),$(ALL_PDK_FUSION_FILES))) - -$(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src) $(pdk_fusion_log_tags_file) -$(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py +$(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src) +$(all_event_log_tags_file): $(all_event_log_tags_src) $(MERGETAGS) build/make/tools/event_log_tags.py $(hide) mkdir -p $(dir $@) $(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES) @@ -572,9 +568,9 @@ event_log_tags_src := \ $(ALL_MODULES.$(m).EVENT_LOG_TAGS)) \ $(filter-out vendor/% device/% out/%,$(all_event_log_tags_src))) -$(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src) $(pdk_fusion_log_tags_file) +$(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src) $(event_log_tags_file): PRIVATE_MERGED_FILE := $(all_event_log_tags_file) -$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py +$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py $(hide) mkdir -p $(dir $@) $(hide) $(MERGETAGS) -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES) @@ -955,7 +951,6 @@ tools_notice_file_txt := $(HOST_OUT_INTERMEDIATES)/NOTICE.txt tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt -pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES)) # Some targets get included under $(PRODUCT_OUT) for debug symbols or other # reasons--not to be flashed onto any device. Targets under these directories @@ -972,7 +967,7 @@ $(eval $(call combine-notice-files, html, \ $(target_notice_file_html), \ "Notices for files contained in the filesystem images in this directory:", \ $(TARGET_OUT_NOTICE_FILES), \ - $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files), \ + $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file), \ $(exclude_target_dirs))) $(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP) $(hide) $(MINIGZIP) -9 < $< > $@ @@ -1016,7 +1011,7 @@ installed_odm_dlkm_notice_xml_gz := $(TARGET_OUT_ODM_DLKM)/etc/NOTICE.xml.gz # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module # being built. A notice xml file must depend on all modules that could potentially # install a license file relevant to it. -license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files) +license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) # Only files copied to a system image need system image notices. license_modules := $(filter $(PRODUCT_OUT)/%,$(license_modules)) # Phonys/fakes don't have notice files (though their deps might) @@ -2275,17 +2270,10 @@ endif # BOARD_BUILD_SYSTEM_ROOT_IMAGE is not true # ----------------------------------------------------------------- # system image -# -# Remove overridden packages from $(ALL_PDK_FUSION_FILES) -PDK_FUSION_SYSIMG_FILES := \ - $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk), \ - $(ALL_PDK_FUSION_FILES)) INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \ $(ALL_GENERATED_SOURCES) \ - $(ALL_DEFAULT_INSTALLED_MODULES) \ - $(PDK_FUSION_SYSIMG_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP)) + $(ALL_DEFAULT_INSTALLED_MODULES))) FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS) @@ -2453,111 +2441,14 @@ endif # BUILDING_SYSTEM_IMAGE sync syncsys: $(INTERNAL_SYSTEMIMAGE_FILES) # ----------------------------------------------------------------- -## platform.zip: system, plus other files to be used in PDK fusion build, -## in a zip file -## -## PDK_PLATFORM_ZIP_PRODUCT_BINARIES is used to store specified files to platform.zip. -## The variable will be typically set from BoardConfig.mk. -## Files under out dir will be rejected to prevent possible conflicts with other rules. -ifneq (,$(BUILD_PLATFORM_ZIP)) -pdk_odex_javalibs := $(strip $(foreach m,$(DEXPREOPT.MODULES.JAVA_LIBRARIES),\ - $(if $(filter $(DEXPREOPT.$(m).INSTALLED_STRIPPED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m)))) -pdk_odex_apps := $(strip $(foreach m,$(DEXPREOPT.MODULES.APPS),\ - $(if $(filter $(DEXPREOPT.$(m).INSTALLED_STRIPPED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m)))) -pdk_classes_dex := $(strip \ - $(foreach m,$(pdk_odex_javalibs),$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar) \ - $(foreach m,$(pdk_odex_apps),$(call intermediates-dir-for,APPS,$(m))/package.apk)) - -pdk_odex_config_mk := $(PRODUCT_OUT)/pdk_dexpreopt_config.mk -$(pdk_odex_config_mk): PRIVATE_JAVA_LIBRARIES := $(pdk_odex_javalibs) -$(pdk_odex_config_mk): PRIVATE_APPS := $(pdk_odex_apps) -$(pdk_odex_config_mk) : - @echo "PDK odex config makefile: $@" - $(hide) mkdir -p $(dir $@) - $(hide) echo "# Auto-generated. Do not modify." > $@ - $(hide) echo "PDK.DEXPREOPT.JAVA_LIBRARIES:=$(PRIVATE_JAVA_LIBRARIES)" >> $@ - $(hide) echo "PDK.DEXPREOPT.APPS:=$(PRIVATE_APPS)" >> $@ - $(foreach m,$(PRIVATE_JAVA_LIBRARIES),\ - $(hide) echo "PDK.DEXPREOPT.$(m).SRC:=$(patsubst $(OUT_DIR)/%,%,$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT:=$(DEXPREOPT.$(m).DEX_PREOPT)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\ - ) - $(foreach m,$(PRIVATE_APPS),\ - $(hide) echo "PDK.DEXPREOPT.$(m).SRC:=$(patsubst $(OUT_DIR)/%,%,$(call intermediates-dir-for,APPS,$(m))/package.apk)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT:=$(DEXPREOPT.$(m).DEX_PREOPT)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).PRIVILEGED_MODULE:=$(DEXPREOPT.$(m).PRIVILEGED_MODULE)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).VENDOR_MODULE:=$(DEXPREOPT.$(m).VENDOR_MODULE)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).TARGET_ARCH:=$(DEXPREOPT.$(m).TARGET_ARCH)" >> $@$(newline)\ - $(hide) echo "PDK.DEXPREOPT.$(m).STRIPPED_SRC:=$(patsubst $(PRODUCT_OUT)/%,%,$(DEXPREOPT.$(m).INSTALLED_STRIPPED))" >> $@$(newline)\ - ) - -PDK_PLATFORM_ZIP_PRODUCT_BINARIES := $(filter-out $(OUT_DIR)/%,$(PDK_PLATFORM_ZIP_PRODUCT_BINARIES)) -INSTALLED_PLATFORM_ZIP := $(PRODUCT_OUT)/platform.zip - -$(INSTALLED_PLATFORM_ZIP): PRIVATE_DEX_FILES := $(pdk_classes_dex) -$(INSTALLED_PLATFORM_ZIP): PRIVATE_ODEX_CONFIG := $(pdk_odex_config_mk) -$(INSTALLED_PLATFORM_ZIP) : $(SOONG_ZIP) -# dependencies for the other partitions are defined below after their file lists -# are known -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEMIMAGE_FILES) $(pdk_classes_dex) $(pdk_odex_config_mk) $(API_FINGERPRINT) - $(call pretty,"Platform zip package: $(INSTALLED_PLATFORM_ZIP)") - rm -f $@ $@.lst - echo "-C $(PRODUCT_OUT)" >> $@.lst - echo "-D $(TARGET_OUT)" >> $@.lst - echo "-D $(TARGET_OUT_NOTICE_FILES)" >> $@.lst - echo "$(addprefix -f $(TARGET_OUT_UNSTRIPPED)/,$(PDK_SYMBOL_FILES_LIST))" >> $@.lst -ifdef BUILDING_VENDOR_IMAGE - echo "-D $(TARGET_OUT_VENDOR)" >> $@.lst -endif -ifdef BUILDING_PRODUCT_IMAGE - echo "-D $(TARGET_OUT_PRODUCT)" >> $@.lst -endif -ifdef BUILDING_SYSTEM_EXT_IMAGE - echo "-D $(TARGET_OUT_SYSTEM_EXT)" >> $@.lst -endif -ifdef BUILDING_ODM_IMAGE - echo "-D $(TARGET_OUT_ODM)" >> $@.lst -endif -ifdef BUILDING_VENDOR_DLKM_IMAGE - echo "-D $(TARGET_OUT_VENDOR_DLKM)" >> $@.lst -endif -ifdef BUILDING_ODM_DLKM_IMAGE - echo "-D $(TARGET_OUT_ODM_DLKM)" >> $@.lst -endif -ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),) - echo "-C $(OUT_DIR)" >> $@.lst - for f in $(filter-out $(PRIVATE_DEX_FILES),$(addprefix -f $(OUT_DIR)/,$(PDK_PLATFORM_JAVA_ZIP_CONTENTS))); do \ - if [ -e $$f ]; then \ - echo "-f $$f"; \ - fi \ - done >> $@.lst -endif -ifneq ($(PDK_PLATFORM_ZIP_PRODUCT_BINARIES),) - echo "-C . $(addprefix -f ,$(PDK_PLATFORM_ZIP_PRODUCT_BINARIES))" >> $@.lst -endif - @# Add dex-preopt files and config. - $(if $(PRIVATE_DEX_FILES),\ - echo "-C $(OUT_DIR) $(addprefix -f ,$(PRIVATE_DEX_FILES))") >> $@.lst - echo "-C $(dir $(API_FINGERPRINT)) -f $(API_FINGERPRINT)" >> $@.lst - touch $(PRODUCT_OUT)/pdk.mk - echo "-C $(PRODUCT_OUT) -f $(PRIVATE_ODEX_CONFIG) -f $(PRODUCT_OUT)/pdk.mk" >> $@.lst - $(SOONG_ZIP) --ignore_missing_files -o $@ @$@.lst - +# Old PDK fusion targets .PHONY: platform -platform: $(INSTALLED_PLATFORM_ZIP) +platform: + echo "Warning: 'platform' is obsolete" .PHONY: platform-java -platform-java: platform - -# Dist the platform.zip -ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))) -$(call dist-for-goals, platform platform-java, $(INSTALLED_PLATFORM_ZIP)) -endif - -endif # BUILD_PLATFORM_ZIP +platform-java: + echo "Warning: 'platform-java' is obsolete" # ----------------------------------------------------------------- # data partition image @@ -2691,9 +2582,7 @@ endif INTERNAL_SYSTEMOTHERIMAGE_FILES := \ $(filter $(TARGET_OUT_SYSTEM_OTHER)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) + $(ALL_DEFAULT_INSTALLED_MODULES)) # system_other dex files are installed as a side-effect of installing system image files INTERNAL_SYSTEMOTHERIMAGE_FILES += $(INTERNAL_SYSTEMIMAGE_FILES) @@ -2756,12 +2645,7 @@ endif # BUILDING_SYSTEM_OTHER_IMAGE ifdef BUILDING_VENDOR_IMAGE INTERNAL_VENDORIMAGE_FILES := \ $(filter $(TARGET_OUT_VENDOR)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) - -# platform.zip depends on $(INTERNAL_VENDORIMAGE_FILES). -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES) + $(ALL_DEFAULT_INSTALLED_MODULES)) INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json) @@ -2889,12 +2773,7 @@ endif ifdef BUILDING_PRODUCT_IMAGE INTERNAL_PRODUCTIMAGE_FILES := \ $(filter $(TARGET_OUT_PRODUCT)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) - -# platform.zip depends on $(INTERNAL_PRODUCTIMAGE_FILES). -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCTIMAGE_FILES) + $(ALL_DEFAULT_INSTALLED_MODULES)) INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json) @@ -2945,12 +2824,7 @@ endif ifdef BUILDING_SYSTEM_EXT_IMAGE INTERNAL_SYSTEM_EXTIMAGE_FILES := \ $(filter $(TARGET_OUT_SYSTEM_EXT)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) - -# platform.zip depends on $(INTERNAL_SYSTEM_EXTIMAGE_FILES). -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEM_EXTIMAGE_FILES) + $(ALL_DEFAULT_INSTALLED_MODULES)) INSTALLED_FILES_FILE_SYSTEM_EXT := $(PRODUCT_OUT)/installed-files-system_ext.txt INSTALLED_FILES_JSON_SYSTEM_EXT := $(INSTALLED_FILES_FILE_SYSTEM_EXT:.txt=.json) @@ -3003,11 +2877,7 @@ endif ifdef BUILDING_ODM_IMAGE INTERNAL_ODMIMAGE_FILES := \ $(filter $(TARGET_OUT_ODM)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) -# platform.zip depends on $(INTERNAL_ODMIMAGE_FILES). -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_ODMIMAGE_FILES) + $(ALL_DEFAULT_INSTALLED_MODULES)) INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt INSTALLED_FILES_JSON_ODM := $(INSTALLED_FILES_FILE_ODM:.txt=.json) @@ -3058,11 +2928,7 @@ endif ifdef BUILDING_VENDOR_DLKM_IMAGE INTERNAL_VENDOR_DLKMIMAGE_FILES := \ $(filter $(TARGET_OUT_VENDOR_DLKM)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) -# platform.zip depends on $(INTERNAL_VENDOR_DLKMIMAGE_FILES). -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDOR_DLKMIMAGE_FILES) + $(ALL_DEFAULT_INSTALLED_MODULES)) INSTALLED_FILES_FILE_VENDOR_DLKM := $(PRODUCT_OUT)/installed-files-vendor_dlkm.txt INSTALLED_FILES_JSON_VENDOR_DLKM := $(INSTALLED_FILES_FILE_VENDOR_DLKM:.txt=.json) @@ -3113,11 +2979,7 @@ endif ifdef BUILDING_ODM_DLKM_IMAGE INTERNAL_ODM_DLKMIMAGE_FILES := \ $(filter $(TARGET_OUT_ODM_DLKM)/%,\ - $(ALL_DEFAULT_INSTALLED_MODULES)\ - $(ALL_PDK_FUSION_FILES)) \ - $(PDK_FUSION_SYMLINK_STAMP) -# platform.zip depends on $(INTERNAL_ODM_DLKMIMAGE_FILES). -$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_ODM_DLKMIMAGE_FILES) + $(ALL_DEFAULT_INSTALLED_MODULES)) INSTALLED_FILES_FILE_ODM_DLKM := $(PRODUCT_OUT)/installed-files-odm_dlkm.txt INSTALLED_FILES_JSON_ODM_DLKM := $(INSTALLED_FILES_FILE_ODM_DLKM:.txt=.json) @@ -3937,9 +3799,6 @@ else ifeq ($(TARGET_PRODUCT),sdk) build_ota_package := false endif - ifeq ($(TARGET_BUILD_PDK),true) - build_ota_package := false - endif ifneq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true) ifneq ($(filter generic%,$(TARGET_DEVICE)),) build_ota_package := false @@ -5311,13 +5170,6 @@ $(INTERNAL_EMULATOR_PACKAGE_TARGET): $(INTERNAL_EMULATOR_PACKAGE_FILES) $(hide) zip -qjX $@ $(INTERNAL_EMULATOR_PACKAGE_FILES) endif -# ----------------------------------------------------------------- -# Old PDK stuffs, retired -# The pdk package (Platform Development Kit) - -#ifneq (,$(filter pdk,$(MAKECMDGOALS))) -# include development/pdk/Pdk.mk -#endif # ----------------------------------------------------------------- diff --git a/core/android_manifest.mk b/core/android_manifest.mk index 8fab9c6aed..254e09bc93 100644 --- a/core/android_manifest.mk +++ b/core/android_manifest.mk @@ -45,7 +45,7 @@ my_target_sdk_version := $(call module-target-sdk-version) my_min_sdk_version := $(call module-min-sdk-version) ifdef TARGET_BUILD_APPS - ifndef TARGET_BUILD_APPS_USE_PREBUILT_SDK + ifndef TARGET_BUILD_USE_PREBUILT_SDKS ifeq ($(my_target_sdk_version),$(PLATFORM_VERSION_CODENAME)) ifdef UNBUNDLED_BUILD_TARGET_SDK_WITH_API_FINGERPRINT my_target_sdk_version := $(my_target_sdk_version).$$(cat $(API_FINGERPRINT)) diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk index ab574b3fa4..576799692f 100644 --- a/core/app_prebuilt_internal.mk +++ b/core/app_prebuilt_internal.mk @@ -45,7 +45,7 @@ built_module := $(LOCAL_BUILT_MODULE) # We skip it for unbundled app builds where we cannot build veridex. module_run_appcompat := ifeq (true,$(non_system_module)) -ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build +ifeq (,$(TARGET_BUILD_APPS)) # ! unbundled app build ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true) module_run_appcompat := true endif diff --git a/core/base_rules.mk b/core/base_rules.mk index abe059b256..ddf736bf37 100644 --- a/core/base_rules.mk +++ b/core/base_rules.mk @@ -77,20 +77,6 @@ LOCAL_SYSTEM_EXT_MODULE := true endif _path := -ifeq ($(LOCAL_HOST_MODULE),true) -my_image_variant := host -else ifeq ($(LOCAL_VENDOR_MODULE),true) -my_image_variant := vendor -else ifeq ($(LOCAL_OEM_MODULE),true) -my_image_variant := vendor -else ifeq ($(LOCAL_ODM_MODULE),true) -my_image_variant := vendor -else ifeq ($(LOCAL_PRODUCT_MODULE),true) -my_image_variant := product -else -my_image_variant := core -endif - # TODO(b/135957588) Remove following workaround # LOCAL_PRODUCT_SERVICES_MODULE to LOCAL_PRODUCT_MODULE for all Android.mk ifndef LOCAL_PRODUCT_MODULE @@ -107,6 +93,20 @@ ifneq ($(filter-out $(LOCAL_PROPRIETARY_MODULE),$(LOCAL_VENDOR_MODULE))$(filter- $(call pretty-error,Only one of LOCAL_PROPRIETARY_MODULE[$(LOCAL_PROPRIETARY_MODULE)] and LOCAL_VENDOR_MODULE[$(LOCAL_VENDOR_MODULE)] may be set, or they must be equal) endif +ifeq ($(LOCAL_HOST_MODULE),true) +my_image_variant := host +else ifeq ($(LOCAL_VENDOR_MODULE),true) +my_image_variant := vendor +else ifeq ($(LOCAL_OEM_MODULE),true) +my_image_variant := vendor +else ifeq ($(LOCAL_ODM_MODULE),true) +my_image_variant := vendor +else ifeq ($(LOCAL_PRODUCT_MODULE),true) +my_image_variant := product +else +my_image_variant := core +endif + non_system_module := $(filter true, \ $(LOCAL_PRODUCT_MODULE) \ $(LOCAL_SYSTEM_EXT_MODULE) \ @@ -487,7 +487,9 @@ $(_local_path_target): $(my_register_name) ifndef $(_local_path_target) $(_local_path_target) := true - $(eval $(call my_path_comp,$(_local_path),$(_local_path_target))) + ifneq (,$(findstring /,$(_local_path))) + $(eval $(call my_path_comp,$(_local_path),$(_local_path_target))) + endif endif _local_path := diff --git a/core/board_config.mk b/core/board_config.mk index 43a34f92ca..b7d0178c0e 100644 --- a/core/board_config.mk +++ b/core/board_config.mk @@ -94,16 +94,18 @@ _board_strip_readonly_list += $(_dynamic_partitions_var_list) # Kernel related variables _board_strip_readonly_list += \ BOARD_KERNEL_BINARIES \ + BOARD_KERNEL_MODULE_INTERFACE_VERSIONS \ _build_broken_var_list := \ BUILD_BROKEN_DUP_RULES \ + BUILD_BROKEN_DUP_SYSPROP \ BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \ + BUILD_BROKEN_MISSING_REQUIRED_MODULES \ BUILD_BROKEN_OUTSIDE_INCLUDE_DIRS \ BUILD_BROKEN_PREBUILT_ELF_FILES \ BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \ BUILD_BROKEN_USES_NETWORK \ BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \ - BUILD_BROKEN_DUP_SYSPROP \ _build_broken_var_list += \ $(foreach m,$(AVAILABLE_BUILD_MODULE_TYPES) \ diff --git a/core/clear_vars.mk b/core/clear_vars.mk index 7f420a70d8..307c2c2274 100644 --- a/core/clear_vars.mk +++ b/core/clear_vars.mk @@ -258,7 +258,6 @@ LOCAL_SANITIZE:= LOCAL_SANITIZE_DIAG:= LOCAL_SANITIZE_RECOVER:= LOCAL_SANITIZE_NO_RECOVER:= -LOCAL_SANITIZE_BLACKLIST := LOCAL_SANITIZE_BLOCKLIST := LOCAL_SDK_LIBRARIES := LOCAL_SDK_RES_VERSION:= diff --git a/core/config.mk b/core/config.mk index e17013cbd4..57296d80a5 100644 --- a/core/config.mk +++ b/core/config.mk @@ -154,6 +154,7 @@ $(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTI $(KATI_obsolete_var COVERAGE_PATHS,Use NATIVE_COVERAGE_PATHS instead) $(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead) $(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.) +$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.) # Used to force goals to build. Only use for conditionally defined goals. .PHONY: FORCE @@ -428,93 +429,13 @@ endif endif endif -# Set up PDK so we can use TARGET_BUILD_PDK to select prebuilt tools below -.PHONY: pdk fusion -pdk fusion: $(DEFAULT_GOAL) - -# What to build: -# pdk fusion if: -# 1) PDK_FUSION_PLATFORM_ZIP / PDK_FUSION_PLATFORM_DIR is passed in from the environment -# or -# 2) the platform.zip / pdk.mk exists in the default location -# or -# 3) fusion is a command line build goal, -# PDK_FUSION_PLATFORM_ZIP is needed anyway, then do we need the 'fusion' goal? -# otherwise pdk only if: -# 1) pdk is a command line build goal -# or -# 2) TARGET_BUILD_PDK is passed in from the environment - -# if PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR is specified, do not override. -ifeq (,$(strip $(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))) - # Most PDK project paths should be using vendor/pdk/TARGET_DEVICE - # but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup - # with vendor/pdk/TARGET_PRODUCT. - # Others are set up with vendor/pdk/TARGET_DEVICE/TARGET_DEVICE-userdebug - _pdk_fusion_search_paths := \ - vendor/pdk/$(TARGET_DEVICE)/$(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)/platform \ - vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform \ - vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform \ - vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform \ - vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform - - _pdk_fusion_default_platform_zip := $(strip $(foreach p,$(_pdk_fusion_search_paths),$(wildcard $(p)/platform.zip))) - ifneq (,$(_pdk_fusion_default_platform_zip)) - PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip)) - _pdk_fusion_default_platform_zip := - else - _pdk_fusion_default_platform_mk := $(strip $(foreach p,$(_pdk_fusion_search_paths),$(wildcard $(p)/pdk.mk))) - ifneq (,$(_pdk_fusion_default_platform_mk)) - PDK_FUSION_PLATFORM_DIR := $(dir $(word 1,$(_pdk_fusion_default_platform_mk))) - _pdk_fusion_default_platform_mk := - endif - endif # _pdk_fusion_default_platform_zip - _pdk_fusion_search_paths := -endif # !PDK_FUSION_PLATFORM_ZIP && !PDK_FUSION_PLATFORM_DIR - -ifneq (,$(PDK_FUSION_PLATFORM_ZIP)) - ifneq (,$(PDK_FUSION_PLATFORM_DIR)) - $(error Only one of PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR may be specified) - endif -endif - -ifneq (,$(filter pdk fusion, $(MAKECMDGOALS))) -TARGET_BUILD_PDK := true -ifneq (,$(filter fusion, $(MAKECMDGOALS))) -ifeq (,$(strip $(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))) - $(error Specify PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR to do a PDK fusion.) -endif -endif # fusion -endif # pdk or fusion - -ifdef PDK_FUSION_PLATFORM_ZIP -TARGET_BUILD_PDK := true -ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP))) - ifneq (,$(wildcard $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))/pdk.mk)) - PDK_FUSION_PLATFORM_DIR := $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP)) - PDK_FUSION_PLATFORM_ZIP := - else - $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).) - endif -endif -endif - -ifdef PDK_FUSION_PLATFORM_DIR -TARGET_BUILD_PDK := true -ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_DIR)/pdk.mk)) - $(error Cannot find file $(PDK_FUSION_PLATFORM_DIR)/pdk.mk.) -endif -endif - -BUILD_PLATFORM_ZIP := $(filter platform platform-java,$(MAKECMDGOALS)) - # --------------------------------------------------------------- # Whether we can expect a full build graph ALLOW_MISSING_DEPENDENCIES := $(filter true,$(ALLOW_MISSING_DEPENDENCIES)) ifneq ($(TARGET_BUILD_APPS),) ALLOW_MISSING_DEPENDENCIES := true endif -ifeq ($(TARGET_BUILD_PDK),true) +ifeq ($(TARGET_BUILD_UNBUNDLED_IMAGE),true) ALLOW_MISSING_DEPENDENCIES := true endif ifneq ($(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),) @@ -522,13 +443,19 @@ ALLOW_MISSING_DEPENDENCIES := true endif .KATI_READONLY := ALLOW_MISSING_DEPENDENCIES -TARGET_BUILD_APPS_USE_PREBUILT_SDK := -ifdef TARGET_BUILD_APPS +TARGET_BUILD_USE_PREBUILT_SDKS := +DISABLE_PREOPT := +ifneq (,$(TARGET_BUILD_APPS)$(TARGET_BUILD_UNBUNDLED_IMAGE)) + DISABLE_PREOPT := true ifndef UNBUNDLED_BUILD_SDKS_FROM_SOURCE - TARGET_BUILD_APPS_USE_PREBUILT_SDK := true + TARGET_BUILD_USE_PREBUILT_SDKS := true endif endif +.KATI_READONLY := \ + TARGET_BUILD_USE_PREBUILT_SDKS \ + DISABLE_PREOPT \ + prebuilt_sdk_tools := prebuilts/sdk/tools prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin @@ -550,25 +477,25 @@ USE_D8 := true .KATI_READONLY := USE_D8 # -# Tools that are prebuilts for TARGET_BUILD_APPS +# Tools that are prebuilts for TARGET_BUILD_USE_PREBUILT_SDKS # -ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) +ifeq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)) AAPT := $(HOST_OUT_EXECUTABLES)/aapt MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses -else # TARGET_BUILD_APPS || TARGET_BUILD_PDK +else # TARGET_BUILD_USE_PREBUILT_SDKS AAPT := $(prebuilt_sdk_tools_bin)/aapt MAINDEXCLASSES := $(prebuilt_sdk_tools)/mainDexClasses -endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK +endif # TARGET_BUILD_USE_PREBUILT_SDKS -ifeq (,$(TARGET_BUILD_APPS)) - # Use RenderScript prebuilts for unbundled builds but not PDK builds +ifeq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)) + # Use RenderScript prebuilts for unbundled builds LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat else LLVM_RS_CC := $(prebuilt_sdk_tools_bin)/llvm-rs-cc BCC_COMPAT := $(prebuilt_sdk_tools_bin)/bcc_compat -endif # TARGET_BUILD_PDK +endif prebuilt_sdk_tools := prebuilt_sdk_tools_bin := @@ -1101,7 +1028,7 @@ HISTORICAL_SDK_VERSIONS_ROOT := $(TOPDIR)prebuilts/sdk HISTORICAL_NDK_VERSIONS_ROOT := $(TOPDIR)prebuilts/ndk # The path where app can reference the support library resources. -ifdef TARGET_BUILD_APPS +ifdef TARGET_BUILD_USE_PREBUILT_SDKS SUPPORT_LIBRARY_ROOT := $(HISTORICAL_SDK_VERSIONS_ROOT)/current/support else SUPPORT_LIBRARY_ROOT := frameworks/support diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk index 323bb36d9d..eaab1b5eca 100644 --- a/core/config_sanitizers.mk +++ b/core/config_sanitizers.mk @@ -144,9 +144,6 @@ endif # Support for local sanitize blacklist paths. ifneq ($(my_sanitize)$(my_global_sanitize),) - ifneq ($(LOCAL_SANITIZE_BLACKLIST),) - my_cflags += -fsanitize-blacklist=$(LOCAL_PATH)/$(LOCAL_SANITIZE_BLACKLIST) - endif ifneq ($(LOCAL_SANITIZE_BLOCKLIST),) my_cflags += -fsanitize-blacklist=$(LOCAL_PATH)/$(LOCAL_SANITIZE_BLOCKLIST) endif diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk index 55f6f0bec6..41a2be91bb 100644 --- a/core/dex_preopt_config.mk +++ b/core/dex_preopt_config.mk @@ -5,7 +5,7 @@ ifneq (true,$(filter true,$(WITH_DEXPREOPT))) ENABLE_PREOPT := else ifneq (true,$(filter true,$(PRODUCT_USES_DEFAULT_ART_CONFIG))) ENABLE_PREOPT := -else ifneq (,$(TARGET_BUILD_APPS)) +else ifeq (true,$(DISABLE_PREOPT)) ENABLE_PREOPT := endif diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk index 3d5f68aa01..799b62321b 100644 --- a/core/dex_preopt_odex_install.mk +++ b/core/dex_preopt_odex_install.mk @@ -40,8 +40,8 @@ ifneq (,$(filter $(LOCAL_MODULE),$(DEXPREOPT_DISABLED_MODULES))) LOCAL_DEX_PREOPT := endif -# Disable preopt for TARGET_BUILD_APPS -ifneq (,$(TARGET_BUILD_APPS)) +# Disable preopt for DISABLE_PREOPT +ifeq (true,$(DISABLE_PREOPT)) LOCAL_DEX_PREOPT := endif diff --git a/core/distdir.mk b/core/distdir.mk index 5f404075df..aad8ff3d65 100644 --- a/core/distdir.mk +++ b/core/distdir.mk @@ -28,7 +28,7 @@ _all_dist_src_dst_pairs := # certain files with certain goals. When those goals are built # and "dist" is specified, the marked files will be copied to DIST_DIR. # -# $(1): a list of goals (e.g. droid, sdk, pdk, ndk). These must be PHONY +# $(1): a list of goals (e.g. droid, sdk, ndk). These must be PHONY # $(2): the dist files to add to those goals. If the file contains ':', # the text following the colon is the name that the file is copied # to under the dist directory. Subdirs are ok, and will be created diff --git a/core/envsetup.mk b/core/envsetup.mk index f78ecb4f58..167fed9474 100644 --- a/core/envsetup.mk +++ b/core/envsetup.mk @@ -93,6 +93,7 @@ TARGET_BUILD_VARIANT := eng endif TARGET_BUILD_APPS ?= +TARGET_BUILD_UNBUNDLED_IMAGE ?= # Set to true for an unbundled build, i.e. a build without # support for platform targets like the system image. This also @@ -107,11 +108,19 @@ ifneq ($(TARGET_BUILD_APPS),) TARGET_BUILD_UNBUNDLED := true endif +# TARGET_BUILD_UNBUNDLED_IMAGE also implies unbundled build. +# (i.e. it targets to only unbundled image, such as the vendor image, +# ,or the product image). +ifneq ($(TARGET_BUILD_UNBUNDLED_IMAGE),) + TARGET_BUILD_UNBUNDLED := true +endif + .KATI_READONLY := \ TARGET_PRODUCT \ TARGET_BUILD_VARIANT \ TARGET_BUILD_APPS \ TARGET_BUILD_UNBUNDLED \ + TARGET_BUILD_UNBUNDLED_IMAGE \ # --------------------------------------------------------------- # Set up configuration for host machine. We don't do cross- diff --git a/core/java.mk b/core/java.mk index 2f18ad9dd7..5fe8da5714 100644 --- a/core/java.mk +++ b/core/java.mk @@ -4,17 +4,6 @@ # LOCAL_MODULE_CLASS # all_res_assets -ifeq ($(TARGET_BUILD_PDK),true) -ifeq ($(TARGET_BUILD_PDK_JAVA_PLATFORM),) -# LOCAL_SDK not defined or set to current -ifeq ($(filter-out current,$(LOCAL_SDK_VERSION)),) -ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) -LOCAL_SDK_VERSION := $(PDK_BUILD_SDK_VERSION) -endif #!LOCAL_NO_STANDARD_LIBRARIES -endif -endif # !PDK_JAVA -endif #PDK - LOCAL_NO_STANDARD_LIBRARIES:=$(strip $(LOCAL_NO_STANDARD_LIBRARIES)) LOCAL_SDK_VERSION:=$(strip $(LOCAL_SDK_VERSION)) @@ -106,8 +95,8 @@ ifneq ($(strip $(aidl_sources)),) aidl_preprocess_import := ifdef LOCAL_SDK_VERSION -ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)),) - # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS +ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS)),) + # LOCAL_SDK_VERSION is current and no TARGET_BUILD_USE_PREBUILT_SDKS aidl_preprocess_import := $(FRAMEWORK_AIDL) else aidl_preprocess_import := $(call resolve-prebuilt-sdk-aidl-path,$(LOCAL_SDK_VERSION)) diff --git a/core/java_common.mk b/core/java_common.mk index b7f288389e..1798ca8452 100644 --- a/core/java_common.mk +++ b/core/java_common.mk @@ -25,7 +25,7 @@ ifeq (,$(LOCAL_JAVA_LANGUAGE_VERSION)) LOCAL_JAVA_LANGUAGE_VERSION := 1.7 else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT))) LOCAL_JAVA_LANGUAGE_VERSION := 1.8 - else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)) + else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS)) # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules LOCAL_JAVA_LANGUAGE_VERSION := 1.8 else @@ -268,7 +268,7 @@ ifndef LOCAL_IS_HOST_MODULE my_system_modules := $(LEGACY_CORE_PLATFORM_SYSTEM_MODULES) endif # LOCAL_NO_STANDARD_LIBRARIES - ifneq (,$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)) + ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)) sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,system_current,$(lib_name))) else # When SDK libraries are referenced from modules built without SDK, provide the all APIs to them @@ -283,8 +283,8 @@ ifndef LOCAL_IS_HOST_MODULE Choices are: $(TARGET_AVAILABLE_SDK_VERSIONS)) endif - ifneq (,$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)$(filter-out %current,$(LOCAL_SDK_VERSION))) - # TARGET_BUILD_APPS mode or numbered SDK. Use prebuilt modules. + ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)$(filter-out %current,$(LOCAL_SDK_VERSION))) + # TARGET_BUILD_USE_PREBUILT_SDKS mode or numbered SDK. Use prebuilt modules. sdk_module := $(call resolve-prebuilt-sdk-module,$(LOCAL_SDK_VERSION)) sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,$(LOCAL_SDK_VERSION),$(lib_name))) else @@ -325,7 +325,7 @@ ifndef LOCAL_IS_HOST_MODULE # related classes to be present. This change adds stubs needed for # javac to compile lambdas. ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) - ifdef TARGET_BUILD_APPS_USE_PREBUILT_SDK + ifdef TARGET_BUILD_USE_PREBUILT_SDKS full_java_bootclasspath_libs += $(call java-lib-header-files,sdk-core-lambda-stubs) else full_java_bootclasspath_libs += $(call java-lib-header-files,core-lambda-stubs) diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk index 6ee1ae1d71..279b0e4a91 100644 --- a/core/java_prebuilt_internal.mk +++ b/core/java_prebuilt_internal.mk @@ -173,7 +173,7 @@ endif framework_res_package_export := # Please refer to package.mk ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) -ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),) +ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),) framework_res_package_export := \ $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION)) else diff --git a/core/java_renderscript.mk b/core/java_renderscript.mk index bfcf59e325..572d6e4a4b 100644 --- a/core/java_renderscript.mk +++ b/core/java_renderscript.mk @@ -50,8 +50,8 @@ renderscript_flags := -Wall -Werror renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS) # prepend the RenderScript system include path -ifneq ($(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),) -# if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_APPS +ifneq ($(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),) +# if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_USE_PREBUILT_SDKS LOCAL_RENDERSCRIPT_INCLUDES := \ $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/clang-include \ $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/include \ @@ -110,7 +110,7 @@ renderscript_intermediate := $(intermediates)/renderscript rs_jni_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,librsjni.so)/librsjni.so LOCAL_JNI_SHARED_LIBRARIES += librsjni -ifneq (,$(TARGET_BUILD_APPS)$(FORCE_BUILD_RS_COMPAT)) +ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)$(FORCE_BUILD_RS_COMPAT)) rs_compatibility_jni_libs := $(addprefix \ $(renderscript_intermediate)/librs., \ diff --git a/core/main.mk b/core/main.mk index 37b2367d5e..a3d594bba5 100644 --- a/core/main.mk +++ b/core/main.mk @@ -177,16 +177,8 @@ $(error stopping) endif # ----------------------------------------------------------------- -# Variable to check java support level inside PDK build. -# Not necessary if the components is not in PDK. -# not defined : not supported -# "sdk" : sdk API only -# "platform" : platform API supproted -TARGET_BUILD_JAVA_SUPPORT_LEVEL := platform - -# ----------------------------------------------------------------- -# The pdk (Platform Development Kit) build -include build/make/core/pdk_config.mk +# PDK builds are no longer supported, this is always platform +TARGET_BUILD_JAVA_SUPPORT_LEVEL :=$= platform # ----------------------------------------------------------------- @@ -215,6 +207,9 @@ ifneq ($(TARGET_BUILD_VARIANT),user) ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn endif +# Define ro.sanitize.<name> properties for all global sanitizers. +ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true) + # Sets the default value of ro.postinstall.fstab.prefix to /system. # Device board config should override the value to /product when needed by: # @@ -515,11 +510,6 @@ subdir_makefiles_total := $(words int $(subdir_makefiles) post finish) $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk))) -ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)) -# Bring in the PDK platform.zip modules. -include $(BUILD_SYSTEM)/pdk_fusion_modules.mk -endif # PDK_FUSION_PLATFORM_ZIP || PDK_FUSION_PLATFORM_DIR - droid_targets : blueprint_tools endif # dont_bother @@ -635,7 +625,6 @@ $(strip \ ) endef -# TODO(b/7456955): error if a required module doesn't exist. # Resolve the required module names to 32-bit or 64-bit variant for: # ALL_MODULES.<*>.REQUIRED_FROM_TARGET # ALL_MODULES.<*>.REQUIRED_FROM_HOST @@ -668,7 +657,8 @@ $(foreach m,$(ALL_MODULES), \ $(if $(and $(module_is_native),$(required_is_shared_library_or_native_test)), \ $(if $(ALL_MODULES.$(m).FOR_2ND_ARCH),$(r_i_2nd),$(r_i)), \ $(r_i) $(r_i_2nd)))) \ - $(eval ### TODO(b/7456955): error if r_m is empty / does not exist) \ + $(eval r_m := $(foreach r_j,$(r_m),$(if $(ALL_MODULES.$(r_j).PATH),$(r_j)))) \ + $(if $(r_m),,$(eval _nonexistent_required += $(1)$(comma)$(m)$(comma)$(1)$(comma)$(r_i))) \ $(r_m))) \ $(eval ALL_MODULES.$(m).REQUIRED_FROM_$(1) := $(sort $(r_r))) \ ) \ @@ -691,18 +681,37 @@ $(foreach m,$(ALL_MODULES), \ $(eval r_r := \ $(foreach r_i,$(r), \ $(eval r_m := $(call resolve-bitness-for-modules,$(1),$(r_i))) \ - $(eval ### TODO(b/7456955): error if r_m is empty / does not exist) \ + $(eval r_m := $(foreach r_j,$(r_m),$(if $(ALL_MODULES.$(r_j).PATH),$(r_j)))) \ + $(if $(r_m),,$(eval _nonexistent_required += $(2)$(comma)$(m)$(comma)$(1)$(comma)$(r_i))) \ $(r_m))) \ $(eval ALL_MODULES.$(m).$(1)_REQUIRED_FROM_$(2) := $(sort $(r_r))) \ ) \ ) endef +_nonexistent_required := $(call select-bitness-of-required-modules,TARGET) $(call select-bitness-of-required-modules,HOST) $(call select-bitness-of-required-modules,HOST_CROSS) $(call select-bitness-of-target-host-required-modules,TARGET,HOST) $(call select-bitness-of-target-host-required-modules,HOST,TARGET) +_nonexistent_required := $(sort $(_nonexistent_required)) + +# HOST OS darwin build is broken, disable this check for darwin for now. +# TODO(b/162102724): Remove this +ifeq (,$(filter $(HOST_OS),darwin)) +ifeq (,$(filter true,$(ALLOW_MISSING_DEPENDENCIES) $(BUILD_BROKEN_MISSING_REQUIRED_MODULES))) +ifneq (,$(_nonexistent_required)) + $(warning Missing required dependencies:) + $(foreach r_i,$(_nonexistent_required), \ + $(eval r := $(subst $(comma),$(space),$(r_i))) \ + $(info $(word 1,$(r)) module $(word 2,$(r)) requires non-existent $(word 3,$(r)) module: $(word 4,$(r))) \ + ) + $(warning Set BUILD_BROKEN_MISSING_REQUIRED_MODULES := true to bypass this check if this is intentional) + $(error Build failed) +endif # _nonexistent_required != empty +endif # ALLOW_MISSING_DEPENDENCIES != true && BUILD_BROKEN_MISSING_REQUIRED_MODULES != true +endif # HOST_OS != darwin define add-required-deps $(1): | $(2) @@ -1721,13 +1730,11 @@ else ifeq (,$(TARGET_BUILD_UNBUNDLED)) $(call dist-for-goals, droidcore, $(f))) ifneq ($(ANDROID_BUILD_EMBEDDED),true) - ifneq ($(TARGET_BUILD_PDK),true) $(call dist-for-goals, droidcore, \ $(APPS_ZIP) \ $(INTERNAL_EMULATOR_PACKAGE_TARGET) \ ) endif - endif $(call dist-for-goals, droidcore, \ $(INSTALLED_FILES_FILE_ROOT) \ diff --git a/core/ninja_config.mk b/core/ninja_config.mk index 336048fe38..4d1009fda8 100644 --- a/core/ninja_config.mk +++ b/core/ninja_config.mk @@ -27,12 +27,8 @@ PARSE_TIME_MAKE_GOALS := \ dicttool_aosp \ dump-products \ eng \ - fusion \ oem_image \ online-system-api-sdk-docs \ - pdk \ - platform \ - platform-java \ product-graph \ samplecode \ sdk \ diff --git a/core/package_internal.mk b/core/package_internal.mk index 775ee4853a..a97e401b52 100644 --- a/core/package_internal.mk +++ b/core/package_internal.mk @@ -401,7 +401,7 @@ ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) # resources. ifeq ($(LOCAL_SDK_RES_VERSION),core_current) # core_current doesn't contain any framework resources. -else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS_USE_PREBUILT_SDK),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),) +else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),) # for released sdk versions, the platform resources were built into android.jar. framework_res_package_export := \ $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION)) @@ -529,7 +529,7 @@ endif # We skip it for unbundled app builds where we cannot build veridex. module_run_appcompat := ifeq (true,$(non_system_module)) -ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build +ifeq (,$(TARGET_BUILD_APPS)) # ! unbundled app build ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true) module_run_appcompat := true endif diff --git a/core/pdk_config.mk b/core/pdk_config.mk deleted file mode 100644 index 922e0ef2d6..0000000000 --- a/core/pdk_config.mk +++ /dev/null @@ -1,190 +0,0 @@ -# This file defines the rule to fuse the platform.zip into the current PDK build. -PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR := -PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR := \ - host/common/obj/JAVA_LIBRARIES/bouncycastle-host_intermediates \ - host/common/obj/JAVA_LIBRARIES/compatibility-host-util_intermediates \ - host/common/obj/JAVA_LIBRARIES/cts-tradefed-harness_intermediates \ - host/common/obj/JAVA_LIBRARIES/hosttestlib_intermediates -PDK_PLATFORM_JAVA_ZIP_CONTENTS := - -ifneq (,$(filter platform-java, $(MAKECMDGOALS))$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)) -# additional items to add to platform.zip for platform-java build -# For these dirs, add classes.jar and javalib.jar from the dir to platform.zip -# all paths under out dir -PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR += \ - target/common/obj/JAVA_LIBRARIES/android.test.runner_intermediates \ - target/common/obj/JAVA_LIBRARIES/android-common_intermediates \ - target/common/obj/JAVA_LIBRARIES/android-ex-camera2_intermediates \ - target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \ - target/common/obj/JAVA_LIBRARIES/bouncycastle_intermediates \ - target/common/obj/JAVA_LIBRARIES/conscrypt_intermediates \ - target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \ - target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \ - target/common/obj/JAVA_LIBRARIES/core-icu4j_intermediates \ - target/common/obj/JAVA_LIBRARIES/ext_intermediates \ - target/common/obj/JAVA_LIBRARIES/framework-minus-apex_intermediates \ - target/common/obj/JAVA_LIBRARIES/hwbinder_intermediates \ - target/common/obj/JAVA_LIBRARIES/ims-common_intermediates \ - target/common/obj/JAVA_LIBRARIES/okhttp_intermediates \ - target/common/obj/JAVA_LIBRARIES/telephony-common_intermediates \ - target/common/obj/JAVA_LIBRARIES/voip-common_intermediates \ - -# not java libraries -PDK_PLATFORM_JAVA_ZIP_CONTENTS += \ - target/common/obj/APPS/framework-res_intermediates/package-export.apk \ - target/common/obj/APPS/framework-res_intermediates/src/R.stamp -endif # platform-java or FUSION build - -PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR := \ - $(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR) \ - $(PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR) - -PDK_PLATFORM_JAVA_ZIP_CONTENTS += $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR),\ - $(lib_dir)/classes.jar $(lib_dir)/classes-header.jar \ - $(lib_dir)/javalib.jar $(lib_dir)/classes*.dex \ - $(lib_dir)/classes.dex.toc ) - -# check and override java support level -ifneq ($(TARGET_BUILD_PDK)$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR),) - ifneq ($(wildcard external/proguard),) - TARGET_BUILD_JAVA_SUPPORT_LEVEL := sdk - else # no proguard - TARGET_BUILD_JAVA_SUPPORT_LEVEL := - endif - # platform support is set after checking platform.zip -endif # PDK - -ifneq (,$(PDK_FUSION_PLATFORM_DIR)$(PDK_FUSION_PLATFORM_ZIP)) - -_pdk_fusion_intermediates := -_pdk_fusion_stamp := -_pdk_fusion_file_list := -_pdk_fusion_java_file_list := -PDK_FUSION_SYMLINK_STAMP := - -ifdef PDK_FUSION_PLATFORM_DIR - _pdk_fusion_intermediates := $(PDK_FUSION_PLATFORM_DIR) - _pdk_fusion_file_list := $(sort \ - $(shell cd $(PDK_FUSION_PLATFORM_DIR); find * -type f)) - _pdk_fusion_java_file_list := $(filter target/common/%,$(_pdk_fusion_file_list)) - _pdk_fusion_file_list := $(filter-out target/common/%,$(_pdk_fusion_file_list)) - - PDK_FUSION_SYMLINK_STAMP := $(call intermediates-dir-for, PACKAGING, pdk_fusion)/pdk_symlinks.stamp - - symlink_list := $(sort \ - $(shell cd $(PDK_FUSION_PLATFORM_DIR); find * -type l)) -$(PDK_FUSION_SYMLINK_STAMP): PRIVATE_SYMLINKS := $(foreach s,$(symlink_list),\ - $(s):$(shell readlink $(PDK_FUSION_PLATFORM_DIR)/$(s))) -$(PDK_FUSION_SYMLINK_STAMP): - $(foreach s,$(PRIVATE_SYMLINKS),\ - mkdir -p $(PRODUCT_OUT)/$(dir $(call word-colon,1,$(s))) && \ - ln -sf $(call word-colon,2,$(s)) $(PRODUCT_OUT)/$(call word-colon,1,$(s)) &&) true - touch $@ - - symlink_list := -endif # PDK_FUSION_PLATFORM_DIR - -ifdef PDK_FUSION_PLATFORM_ZIP - _pdk_fusion_intermediates := $(call intermediates-dir-for, PACKAGING, pdk_fusion) - _pdk_fusion_stamp := $(_pdk_fusion_intermediates)/pdk_fusion.stamp - - _pdk_fusion_file_list := $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) \ - '*[^/]' -x 'target/common/*' 2>/dev/null) - _pdk_fusion_java_file_list := \ - $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) 'target/common/*' 2>/dev/null) - _pdk_fusion_files := $(addprefix $(_pdk_fusion_intermediates)/,\ - $(_pdk_fusion_file_list) $(_pdk_fusion_java_file_list)) - -$(_pdk_fusion_stamp) : $(PDK_FUSION_PLATFORM_ZIP) - @echo "Unzip $(dir $@) <- $<" - $(hide) rm -rf $(dir $@) && mkdir -p $(dir $@) - $(hide) unzip -qo $< -d $(dir $@) - $(call split-long-arguments,-touch,$(_pdk_fusion_files)) - $(hide) touch $@ - -$(_pdk_fusion_files) : $(_pdk_fusion_stamp) -endif # PDK_FUSION_PLATFORM_ZIP - -ifneq ($(_pdk_fusion_java_file_list),) - # This represents whether java build can use platform API or not - # This should not be used in Android.mk - TARGET_BUILD_PDK_JAVA_PLATFORM := true - ifneq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),) - TARGET_BUILD_JAVA_SUPPORT_LEVEL := platform - endif -endif - -# Implicit pattern rules to copy the fusion files to the system image directory. -# Note that if there is already explicit rule in the build system to generate a file, -# the pattern rule will be just ignored by make. -# That's desired by us: we want only absent files from the platform zip package. -# Copy with the last-modified time preserved, never follow symbolic links. -$(PRODUCT_OUT)/% : $(_pdk_fusion_intermediates)/% $(_pdk_fusion_stamp) - @mkdir -p $(dir $@) - $(hide) rm -rf $@ - $(hide) cp -fpPR $< $@ - -# implicit rules for host java files -$(HOST_COMMON_OUT_ROOT)/% : $(_pdk_fusion_intermediates)/host/common/% $(_pdk_fusion_stamp) - @mkdir -p $(dir $@) - $(hide) cp -fpPR $< $@ - -ifeq (true,$(TARGET_BUILD_PDK_JAVA_PLATFORM)) - PDK_FUSION_OUT_DIR := $(OUT_DIR) - - define JAVA_dependency_template - $(call add-dependency,$(PDK_FUSION_OUT_DIR)/$(strip $(1)),\ - $(foreach d,$(filter $(2),$(_pdk_fusion_java_file_list)),$(PDK_FUSION_OUT_DIR)/$(d))) - endef - - # needs explicit dependency as package-export.apk is not explicitly pulled - $(eval $(call JAVA_dependency_template,\ - target/common/obj/APPS/framework-res_intermediates/src/R.stamp,\ - target/common/obj/APPS/framework-res_intermediates/package-export.apk)) - - # javalib.jar should pull classes.jar as classes.jar is not explicitly pulled. - $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR),\ - $(eval $(call JAVA_dependency_template,$(lib_dir)/javalib.jar,\ - $(lib_dir)/classes.jar))) - -# implicit rules for all other target files -$(TARGET_COMMON_OUT_ROOT)/% : $(_pdk_fusion_intermediates)/target/common/% $(_pdk_fusion_stamp) - @mkdir -p $(dir $@) - $(hide) cp -fpPR $< $@ -endif # TARGET_BUILD_PDK_JAVA_PLATFORM - -ALL_PDK_FUSION_FILES := $(addprefix $(PRODUCT_OUT)/, $(_pdk_fusion_file_list)) - -endif # PDK_FUSION_PLATFORM_ZIP || PDK_FUSION_PLATFORM_DIR - -ifeq ($(TARGET_BUILD_PDK),true) - $(info PDK TARGET_BUILD_JAVA_SUPPORT_LEVEL $(TARGET_BUILD_JAVA_SUPPORT_LEVEL)) - ifeq ($(TARGET_BUILD_PDK_JAVA_PLATFORM),) - # SDK used for Java build under PDK - PDK_BUILD_SDK_VERSION := $(lastword $(TARGET_AVAILABLE_SDK_VERSIONS)) - $(info PDK Build uses SDK $(PDK_BUILD_SDK_VERSION)) - else # PDK_JAVA - $(info PDK Build uses the current platform API) - endif # PDK_JAVA -endif # BUILD_PDK - -ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))$(filter true,$(TARGET_BUILD_PDK))) - # files under $(PRODUCT_OUT)/symbols to help debugging. - # Source not included to PDK due to dependency issue, so provide symbols instead. - - PDK_SYMBOL_FILES_LIST := - ifeq ($(TARGET_IS_64_BIT),true) - PDK_SYMBOL_FILES_LIST += system/bin/app_process64 - ifdef TARGET_2ND_ARCH - PDK_SYMBOL_FILES_LIST += system/bin/app_process32 - endif - else - PDK_SYMBOL_FILES_LIST += system/bin/app_process32 - endif - - ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)) - # symbols should be explicitly pulled for fusion build - $(foreach f,$(filter $(PDK_SYMBOL_FILES_LIST), $(_pdk_fusion_file_list)),\ - $(eval $(call add-dependency,$(PRODUCT_OUT)/$(f),$(PRODUCT_OUT)/symbols/$(f)))) - endif # PLATFORM_ZIP || PLATFORM_DIR -endif # platform.zip/dir build or PDK diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk deleted file mode 100644 index 235acf9378..0000000000 --- a/core/pdk_fusion_modules.mk +++ /dev/null @@ -1,86 +0,0 @@ -# Auto-generate module defitions from platform.zip. -# We use these rules to rebuild .odex files of the .jar/.apk inside the platform.zip. -# - -ifdef PDK_FUSION_PLATFORM_ZIP -pdk_dexpreopt_config_mk := $(TARGET_OUT_INTERMEDIATES)/pdk_dexpreopt_config.mk - -$(shell rm -f $(pdk_dexpreopt_config_mk) && mkdir -p $(dir $(pdk_dexpreopt_config_mk)) && \ - unzip -qo $(PDK_FUSION_PLATFORM_ZIP) -d $(dir $(pdk_dexpreopt_config_mk)) pdk_dexpreopt_config.mk 2>/dev/null) -endif - -ifdef PDK_FUSION_PLATFORM_DIR -pdk_dexpreopt_config_mk := $(PDK_FUSION_PLATFORM_DIR)/pdk_dexpreopt_config.mk -endif - --include $(pdk_dexpreopt_config_mk) - -# Define a PDK prebuilt module that comes from platform.zip. -# Must be called with $(eval) -define prebuilt-pdk-java-module -include $(CLEAR_VARS) -LOCAL_MODULE:=$(1) -LOCAL_MODULE_CLASS:=$(2) -# Use LOCAL_PREBUILT_MODULE_FILE instead of LOCAL_SRC_FILES so we don't need to deal with LOCAL_PATH. -LOCAL_PREBUILT_MODULE_FILE:=$(3) -LOCAL_DEX_PREOPT:=$(4) -LOCAL_MULTILIB:=$(5) -LOCAL_DEX_PREOPT_FLAGS:=$(6) -LOCAL_BUILT_MODULE_STEM:=$(7) -LOCAL_MODULE_SUFFIX:=$(suffix $(7)) -LOCAL_PRIVILEGED_MODULE:=$(8) -LOCAL_VENDOR_MODULE:=$(9) -LOCAL_MODULE_TARGET_ARCH:=$(10) -LOCAL_REPLACE_PREBUILT_APK_INSTALLED:=$(11) -LOCAL_CERTIFICATE:=PRESIGNED -include $(BUILD_PREBUILT) - -# The source prebuilts are extracted in the rule of _pdk_fusion_stamp. -# Use a touch rule to establish the dependency. -ifndef PDK_FUSION_PLATFORM_DIR -$(3) $(11) : $(_pdk_fusion_stamp) - $(hide) if [ ! -f $$@ ]; then \ - echo 'Error: $$@ does not exist. Check your platform.zip.' 1>&2; \ - exit 1; \ - fi - $(hide) touch $$@ -endif -endef - -# We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM). -LOCAL_PATH := $(BUILD_SYSTEM) - -##### Java libraries. -# Only set up rules for modules that aren't built from source. -pdk_prebuilt_libraries := $(foreach l,$(PDK.DEXPREOPT.JAVA_LIBRARIES),\ - $(if $(MODULE.TARGET.JAVA_LIBRARIES.$(l)),,$(l))) - -$(foreach l,$(pdk_prebuilt_libraries), $(eval \ - $(call prebuilt-pdk-java-module,\ - $(l),\ - JAVA_LIBRARIES,\ - $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(l).SRC),\ - $(PDK.DEXPREOPT.$(l).DEX_PREOPT),\ - $(PDK.DEXPREOPT.$(l).MULTILIB),\ - $(PDK.DEXPREOPT.$(l).DEX_PREOPT_FLAGS),\ - javalib.jar,\ - ))) - -###### Apps. -pdk_prebuilt_apps := $(foreach a,$(PDK.DEXPREOPT.APPS),\ - $(if $(MODULE.TARGET.APPS.$(a)),,$(a))) - -$(foreach a,$(pdk_prebuilt_apps), $(eval \ - $(call prebuilt-pdk-java-module,\ - $(a),\ - APPS,\ - $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).SRC),\ - $(PDK.DEXPREOPT.$(a).DEX_PREOPT),\ - $(PDK.DEXPREOPT.$(a).MULTILIB),\ - $(PDK.DEXPREOPT.$(a).DEX_PREOPT_FLAGS),\ - package.apk,\ - $(PDK.DEXPREOPT.$(a).PRIVILEGED_MODULE),\ - $(PDK.DEXPREOPT.$(a).VENDOR_MODULE),\ - $(PDK.DEXPREOPT.$(a).TARGET_ARCH),\ - $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).STRIPPED_SRC),\ - ))) diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk index 46b16ac105..b994b17ad2 100644 --- a/core/soong_app_prebuilt.mk +++ b/core/soong_app_prebuilt.mk @@ -52,7 +52,7 @@ endif # We skip it for unbundled app builds where we cannot build veridex. module_run_appcompat := ifeq (true,$(non_system_module)) -ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK))) # ! unbundled app build +ifeq (,$(TARGET_BUILD_APPS)) # ! unbundled app build ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true) module_run_appcompat := true endif diff --git a/core/soong_config.mk b/core/soong_config.mk index 98ab07dd9a..4731250c56 100644 --- a/core/soong_config.mk +++ b/core/soong_config.mk @@ -41,8 +41,7 @@ $(call add_json_str, Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_ $(call add_json_bool, Allow_missing_dependencies, $(ALLOW_MISSING_DEPENDENCIES)) $(call add_json_bool, Unbundled_build, $(TARGET_BUILD_UNBUNDLED)) $(call add_json_bool, Unbundled_build_apps, $(TARGET_BUILD_APPS)) -$(call add_json_bool, Unbundled_build_sdks_from_source, $(UNBUNDLED_BUILD_SDKS_FROM_SOURCE)) -$(call add_json_bool, Pdk, $(filter true,$(TARGET_BUILD_PDK))) +$(call add_json_bool, Always_use_prebuilt_sdks, $(TARGET_BUILD_USE_PREBUILT_SDKS)) $(call add_json_bool, Debuggable, $(filter userdebug eng,$(TARGET_BUILD_VARIANT))) $(call add_json_bool, Eng, $(filter eng,$(TARGET_BUILD_VARIANT))) @@ -217,6 +216,7 @@ $(call add_json_bool, InstallExtraFlattenedApexes, $(PRODUCT_INSTALL_EXTRA_FLATT $(call add_json_bool, BoardUsesRecoveryAsBoot, $(BOARD_USES_RECOVERY_AS_BOOT)) $(call add_json_list, BoardKernelBinaries, $(BOARD_KERNEL_BINARIES)) +$(call add_json_list, BoardKernelModuleInterfaceVersions, $(BOARD_KERNEL_MODULE_INTERFACE_VERSIONS)) $(call json_end) diff --git a/core/static_java_library.mk b/core/static_java_library.mk index 81dc2dfc4e..7a873229d7 100644 --- a/core/static_java_library.mk +++ b/core/static_java_library.mk @@ -111,7 +111,7 @@ endif framework_res_package_export := # Please refer to package.mk ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true) -ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS_USE_PREBUILT_SDK),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),) +ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),) framework_res_package_export := \ $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION)) else diff --git a/core/tasks/boot_jars_package_check.mk b/core/tasks/boot_jars_package_check.mk index a17aaff321..c9a8e27614 100644 --- a/core/tasks/boot_jars_package_check.mk +++ b/core/tasks/boot_jars_package_check.mk @@ -17,7 +17,6 @@ # ifneq ($(SKIP_BOOT_JARS_CHECK),true) -ifneq ($(TARGET_BUILD_PDK),true) ifdef PRODUCT_BOOT_JARS intermediates := $(call intermediates-dir-for, PACKAGING, boot-jars-package-check,,COMMON) @@ -61,5 +60,4 @@ check-boot-jars : $(stamp) droidcore : check-boot-jars endif # PRODUCT_BOOT_JARS -endif # TARGET_BUILD_PDK not true endif # SKIP_BOOT_JARS_CHECK not true diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk index f3b4368775..cd5fa8e3b3 100644 --- a/core/tasks/cts.mk +++ b/core/tasks/cts.mk @@ -16,6 +16,7 @@ test_suite_name := cts test_suite_tradefed := cts-tradefed test_suite_dynamic_config := test/suite_harness/tools/cts-tradefed/DynamicConfig.xml test_suite_readme := test/suite_harness/tools/cts-tradefed/README +include_test_suite_notice := true include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk index 5d820d503a..93185af903 100644 --- a/core/tasks/tools/compatibility.mk +++ b/core/tasks/tools/compatibility.mk @@ -47,20 +47,40 @@ test_tools += $(test_suite_tools) # Include host shared libraries host_shared_libs := $(call copy-many-files, $(COMPATIBILITY.$(test_suite_name).HOST_SHARED_LIBRARY.FILES)) +compatibility_zip_deps := $(test_artifacts) $(test_tools) $(test_suite_prebuilt_tools) $(test_suite_dynamic_config) $(SOONG_ZIP) $(host_shared_libs) +compatibility_zip_resources := $(out_dir)/tools $(out_dir)/testcases + +# Test Suite NOTICE files +test_suite_notice_txt := $(out_dir)/NOTICE.txt +test_suite_notice_html := $(out_dir)/NOTICE.html + +$(eval $(call combine-notice-files, html, \ + $(test_suite_notice_txt), \ + $(test_suite_notice_html), \ + "Notices for files contained in the test suites filesystem image in this directory:", \ + $(HOST_OUT_NOTICE_FILES) $(TARGET_OUT_NOTICE_FILES), \ + $(compatibility_zip_deps))) + +ifeq ($(include_test_suite_notice),true) + compatibility_zip_deps += $(test_suite_notice_txt) + compatibility_zip_resources += $(test_suite_notice_txt) +endif + compatibility_zip := $(out_dir).zip $(compatibility_zip): PRIVATE_NAME := android-$(test_suite_name) $(compatibility_zip): PRIVATE_OUT_DIR := $(out_dir) $(compatibility_zip): PRIVATE_TOOLS := $(test_tools) $(test_suite_prebuilt_tools) $(compatibility_zip): PRIVATE_SUITE_NAME := $(test_suite_name) $(compatibility_zip): PRIVATE_DYNAMIC_CONFIG := $(test_suite_dynamic_config) -$(compatibility_zip): $(test_artifacts) $(test_tools) $(test_suite_prebuilt_tools) $(test_suite_dynamic_config) $(SOONG_ZIP) $(host_shared_libs) | $(ADB) $(ACP) +$(compatibility_zip): PRIVATE_RESOURCES := $(compatibility_zip_resources) +$(compatibility_zip): $(compatibility_zip_deps) | $(ADB) $(ACP) # Make dir structure $(hide) mkdir -p $(PRIVATE_OUT_DIR)/tools $(PRIVATE_OUT_DIR)/testcases $(hide) echo $(BUILD_NUMBER_FROM_FILE) > $(PRIVATE_OUT_DIR)/tools/version.txt # Copy tools $(hide) cp $(PRIVATE_TOOLS) $(PRIVATE_OUT_DIR)/tools $(if $(PRIVATE_DYNAMIC_CONFIG),$(hide) cp $(PRIVATE_DYNAMIC_CONFIG) $(PRIVATE_OUT_DIR)/testcases/$(PRIVATE_SUITE_NAME).dynamic) - $(hide) find $(PRIVATE_OUT_DIR)/tools $(PRIVATE_OUT_DIR)/testcases | sort >$@.list + $(hide) find $(PRIVATE_RESOURCES) | sort >$@.list $(hide) $(SOONG_ZIP) -d -o $@ -C $(dir $@) -l $@.list # Reset all input variables @@ -70,4 +90,5 @@ test_suite_dynamic_config := test_suite_readme := test_suite_prebuilt_tools := test_suite_tools := +include_test_suite_notice := host_shared_libs := diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk index 6775131dbd..b4df5fef97 100644 --- a/target/product/gsi/Android.mk +++ b/target/product/gsi/Android.mk @@ -31,10 +31,6 @@ check-vndk-abi-dump-list-timestamp := $(call intermediates-dir-for,PACKAGING,vnd ifeq ($(TARGET_IS_64_BIT)|$(TARGET_2ND_ARCH),true|) # TODO(b/110429754) remove this condition when we support 64-bit-only device check-vndk-list: ; -else ifeq ($(TARGET_BUILD_PDK),true) -# b/118634643: don't check VNDK lib list when building PDK. Some libs (libandroid_net.so -# and some render-script related ones) can't be built in PDK due to missing frameworks/base. -check-vndk-list: ; else ifeq ($(TARGET_SKIP_CURRENT_VNDK),true) check-vndk-list: ; else ifeq ($(BOARD_VNDK_VERSION),) diff --git a/tools/extract_kernel.py b/tools/extract_kernel.py index 92a647be3c..0046b38faf 100755 --- a/tools/extract_kernel.py +++ b/tools/extract_kernel.py @@ -40,7 +40,7 @@ COMPRESSION_ALGO = ( # LINUX_COMPILE_HOST ") (" LINUX_COMPILER ") " UTS_VERSION "\n"; LINUX_BANNER_PREFIX = b'Linux version ' LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX + \ - r'(?P<release>(?P<version>[0-9]+[.][0-9]+[.][0-9]+).*) \(.*@.*\) \(.*\) .*\n' + r'(?P<release>(?P<version>[0-9]+[.][0-9]+[.][0-9]+).*) \(.*@.*\) \((?P<compiler>.*)\) .*\n' def get_from_release(input_bytes, start_idx, key): @@ -82,6 +82,14 @@ def dump_version(input_bytes): return dump_from_release(input_bytes, "version") +def dump_compiler(input_bytes): + """ + Dump kernel version, w.x.y, from input_bytes. Search for the string + "Linux version " and do pattern matching after it. See LINUX_BANNER_REGEX. + """ + return dump_from_release(input_bytes, "compiler") + + def dump_release(input_bytes): """ Dump kernel release, w.x.y-..., from input_bytes. Search for the string @@ -208,6 +216,13 @@ def main(): nargs='?', type=argparse.FileType('wb'), const=sys.stdout) + parser.add_argument('--output-compiler', + help='If specified, write the compiler information. Use stdout if no file ' + 'is specified.', + metavar='FILE', + nargs='?', + type=argparse.FileType('wb'), + const=sys.stdout) parser.add_argument('--tools', help='Decompression tools to use. If not specified, PATH ' 'is searched.', @@ -234,6 +249,10 @@ def main(): "kernel release in {}".format(args.input.name)): ret = 1 + if not dump_to_file(args.output_compiler, dump_compiler, input_bytes, + "kernel compiler in {}".format(args.input.name)): + ret = 1 + return ret diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp index 11f92abb17..ace00acca4 100644 --- a/tools/releasetools/Android.bp +++ b/tools/releasetools/Android.bp @@ -93,7 +93,9 @@ python_defaults { srcs: [ "edify_generator.py", "ota_from_target_files.py", + "non_ab_ota.py", "target_files_diff.py", + "ota_utils.py", ], libs: [ "releasetools_check_target_files_vintf", @@ -104,6 +106,12 @@ python_defaults { "brillo_update_payload", "checkvintf", ], + target: { + darwin: { + // required module "brillo_update_payload" is disabled on darwin + enabled: false, + }, + }, } // @@ -297,6 +305,12 @@ python_binary_host { required: [ "delta_generator", ], + target: { + darwin: { + // required module "delta_generator" is disabled on darwin + enabled: false, + }, + }, } python_binary_host { @@ -369,6 +383,12 @@ python_binary_host { required: [ "checkvintf", ], + target: { + darwin: { + // libs dep "releasetools_ota_from_target_files" is disabled on darwin + enabled: false, + }, + }, } python_binary_host { @@ -466,6 +486,12 @@ python_defaults { data: [ "testdata/**/*", ], + target: { + darwin: { + // libs dep "releasetools_ota_from_target_files" is disabled on darwin + enabled: false, + }, + }, } python_test_host { diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py index ef66112355..0edefac9c1 100755 --- a/tools/releasetools/check_target_files_vintf.py +++ b/tools/releasetools/check_target_files_vintf.py @@ -220,6 +220,52 @@ def CheckVintf(inp, info_dict=None): raise ValueError('{} is not a valid directory or zip file'.format(inp)) +def CheckVintfIfTrebleEnabled(target_files, target_info): + """Checks compatibility info of the input target files. + + Metadata used for compatibility verification is retrieved from target_zip. + + Compatibility should only be checked for devices that have enabled + Treble support. + + Args: + target_files: Path to zip file containing the source files to be included + for OTA. Can also be the path to extracted directory. + target_info: The BuildInfo instance that holds the target build info. + """ + + # Will only proceed if the target has enabled the Treble support (as well as + # having a /vendor partition). + if not HasTrebleEnabled(target_files, target_info): + return + + # Skip adding the compatibility package as a workaround for b/114240221. The + # compatibility will always fail on devices without qualified kernels. + if OPTIONS.skip_compatibility_check: + return + + if not CheckVintf(target_files, target_info): + raise RuntimeError("VINTF compatibility check failed") + +def HasTrebleEnabled(target_files, target_info): + def HasVendorPartition(target_files): + if os.path.isdir(target_files): + return os.path.isdir(os.path.join(target_files, "VENDOR")) + if zipfile.is_zipfile(target_files): + return HasPartition(zipfile.ZipFile(target_files), "vendor") + raise ValueError("Unknown target_files argument") + + return (HasVendorPartition(target_files) and + target_info.GetBuildProp("ro.treble.enabled") == "true") + + +def HasPartition(target_files_zip, partition): + try: + target_files_zip.getinfo(partition.upper() + "/") + return True + except KeyError: + return False + def main(argv): args = common.ParseOptions(argv, __doc__) diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py index 1846a67de1..89900d3eea 100644 --- a/tools/releasetools/common.py +++ b/tools/releasetools/common.py @@ -1227,7 +1227,7 @@ def _MakeRamdisk(sourcedir, fs_config_file=None, lz4_ramdisks=False): cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")] p1 = Run(cmd, stdout=subprocess.PIPE) if lz4_ramdisks: - p2 = Run(["lz4", "-l", "-12" , "--favor-decSpeed"], stdin=p1.stdout, + p2 = Run(["lz4", "-l", "-12", "--favor-decSpeed"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno()) else: p2 = Run(["minigzip"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno()) diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py new file mode 100644 index 0000000000..3a8795798b --- /dev/null +++ b/tools/releasetools/non_ab_ota.py @@ -0,0 +1,684 @@ +# Copyright (C) 2020 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import logging +import os +import zipfile + +import common +import edify_generator +import verity_utils +from check_target_files_vintf import CheckVintfIfTrebleEnabled, HasPartition +from common import OPTIONS +from ota_utils import UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, PropertyFiles + +logger = logging.getLogger(__name__) + + +def GetBlockDifferences(target_zip, source_zip, target_info, source_info, + device_specific): + """Returns a ordered dict of block differences with partition name as key.""" + + def GetIncrementalBlockDifferenceForPartition(name): + if not HasPartition(source_zip, name): + raise RuntimeError( + "can't generate incremental that adds {}".format(name)) + + partition_src = common.GetUserImage(name, OPTIONS.source_tmp, source_zip, + info_dict=source_info, + allow_shared_blocks=allow_shared_blocks) + + hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator( + name, 4096, target_info) + partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip, + info_dict=target_info, + allow_shared_blocks=allow_shared_blocks, + hashtree_info_generator=hashtree_info_generator) + + # Check the first block of the source system partition for remount R/W only + # if the filesystem is ext4. + partition_source_info = source_info["fstab"]["/" + name] + check_first_block = partition_source_info.fs_type == "ext4" + # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be + # in zip formats. However with squashfs, a) all files are compressed in LZ4; + # b) the blocks listed in block map may not contain all the bytes for a + # given file (because they're rounded to be 4K-aligned). + partition_target_info = target_info["fstab"]["/" + name] + disable_imgdiff = (partition_source_info.fs_type == "squashfs" or + partition_target_info.fs_type == "squashfs") + return common.BlockDifference(name, partition_tgt, partition_src, + check_first_block, + version=blockimgdiff_version, + disable_imgdiff=disable_imgdiff) + + if source_zip: + # See notes in common.GetUserImage() + allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or + target_info.get('ext4_share_dup_blocks') == "true") + blockimgdiff_version = max( + int(i) for i in target_info.get( + "blockimgdiff_versions", "1").split(",")) + assert blockimgdiff_version >= 3 + + block_diff_dict = collections.OrderedDict() + partition_names = ["system", "vendor", "product", "odm", "system_ext", + "vendor_dlkm", "odm_dlkm"] + for partition in partition_names: + if not HasPartition(target_zip, partition): + continue + # Full OTA update. + if not source_zip: + tgt = common.GetUserImage(partition, OPTIONS.input_tmp, target_zip, + info_dict=target_info, + reset_file_map=True) + block_diff_dict[partition] = common.BlockDifference(partition, tgt, + src=None) + # Incremental OTA update. + else: + block_diff_dict[partition] = GetIncrementalBlockDifferenceForPartition( + partition) + assert "system" in block_diff_dict + + # Get the block diffs from the device specific script. If there is a + # duplicate block diff for a partition, ignore the diff in the generic script + # and use the one in the device specific script instead. + if source_zip: + device_specific_diffs = device_specific.IncrementalOTA_GetBlockDifferences() + function_name = "IncrementalOTA_GetBlockDifferences" + else: + device_specific_diffs = device_specific.FullOTA_GetBlockDifferences() + function_name = "FullOTA_GetBlockDifferences" + + if device_specific_diffs: + assert all(isinstance(diff, common.BlockDifference) + for diff in device_specific_diffs), \ + "{} is not returning a list of BlockDifference objects".format( + function_name) + for diff in device_specific_diffs: + if diff.partition in block_diff_dict: + logger.warning("Duplicate block difference found. Device specific block" + " diff for partition '%s' overrides the one in generic" + " script.", diff.partition) + block_diff_dict[diff.partition] = diff + + return block_diff_dict + + +def WriteFullOTAPackage(input_zip, output_file): + target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts) + + # We don't know what version it will be installed on top of. We expect the API + # just won't change very often. Similarly for fstab, it might have changed in + # the target build. + target_api_version = target_info["recovery_api_version"] + script = edify_generator.EdifyGenerator(target_api_version, target_info) + + if target_info.oem_props and not OPTIONS.oem_no_mount: + target_info.WriteMountOemScript(script) + + metadata = GetPackageMetadata(target_info) + + if not OPTIONS.no_signing: + staging_file = common.MakeTempFile(suffix='.zip') + else: + staging_file = output_file + + output_zip = zipfile.ZipFile( + staging_file, "w", compression=zipfile.ZIP_DEFLATED) + + device_specific = common.DeviceSpecificParams( + input_zip=input_zip, + input_version=target_api_version, + output_zip=output_zip, + script=script, + input_tmp=OPTIONS.input_tmp, + metadata=metadata, + info_dict=OPTIONS.info_dict) + + assert HasRecoveryPatch(input_zip, info_dict=OPTIONS.info_dict) + + # Assertions (e.g. downgrade check, device properties check). + ts = target_info.GetBuildProp("ro.build.date.utc") + ts_text = target_info.GetBuildProp("ro.build.date") + script.AssertOlderBuild(ts, ts_text) + + target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount) + device_specific.FullOTA_Assertions() + + block_diff_dict = GetBlockDifferences(target_zip=input_zip, source_zip=None, + target_info=target_info, + source_info=None, + device_specific=device_specific) + + # Two-step package strategy (in chronological order, which is *not* + # the order in which the generated script has things): + # + # if stage is not "2/3" or "3/3": + # write recovery image to boot partition + # set stage to "2/3" + # reboot to boot partition and restart recovery + # else if stage is "2/3": + # write recovery image to recovery partition + # set stage to "3/3" + # reboot to recovery partition and restart recovery + # else: + # (stage must be "3/3") + # set stage to "" + # do normal full package installation: + # wipe and install system, boot image, etc. + # set up system to update recovery partition on first boot + # complete script normally + # (allow recovery to mark itself finished and reboot) + + recovery_img = common.GetBootableImage("recovery.img", "recovery.img", + OPTIONS.input_tmp, "RECOVERY") + if OPTIONS.two_step: + if not target_info.get("multistage_support"): + assert False, "two-step packages not supported by this build" + fs = target_info["fstab"]["/misc"] + assert fs.fs_type.upper() == "EMMC", \ + "two-step packages only supported on devices with EMMC /misc partitions" + bcb_dev = {"bcb_dev": fs.device} + common.ZipWriteStr(output_zip, "recovery.img", recovery_img.data) + script.AppendExtra(""" +if get_stage("%(bcb_dev)s") == "2/3" then +""" % bcb_dev) + + # Stage 2/3: Write recovery image to /recovery (currently running /boot). + script.Comment("Stage 2/3") + script.WriteRawImage("/recovery", "recovery.img") + script.AppendExtra(""" +set_stage("%(bcb_dev)s", "3/3"); +reboot_now("%(bcb_dev)s", "recovery"); +else if get_stage("%(bcb_dev)s") == "3/3" then +""" % bcb_dev) + + # Stage 3/3: Make changes. + script.Comment("Stage 3/3") + + # Dump fingerprints + script.Print("Target: {}".format(target_info.fingerprint)) + + device_specific.FullOTA_InstallBegin() + + # All other partitions as well as the data wipe use 10% of the progress, and + # the update of the system partition takes the remaining progress. + system_progress = 0.9 - (len(block_diff_dict) - 1) * 0.1 + if OPTIONS.wipe_user_data: + system_progress -= 0.1 + progress_dict = {partition: 0.1 for partition in block_diff_dict} + progress_dict["system"] = system_progress + + if target_info.get('use_dynamic_partitions') == "true": + # Use empty source_info_dict to indicate that all partitions / groups must + # be re-added. + dynamic_partitions_diff = common.DynamicPartitionsDifference( + info_dict=OPTIONS.info_dict, + block_diffs=block_diff_dict.values(), + progress_dict=progress_dict) + dynamic_partitions_diff.WriteScript(script, output_zip, + write_verify_script=OPTIONS.verify) + else: + for block_diff in block_diff_dict.values(): + block_diff.WriteScript(script, output_zip, + progress=progress_dict.get(block_diff.partition), + write_verify_script=OPTIONS.verify) + + CheckVintfIfTrebleEnabled(OPTIONS.input_tmp, target_info) + + boot_img = common.GetBootableImage( + "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT") + common.CheckSize(boot_img.data, "boot.img", target_info) + common.ZipWriteStr(output_zip, "boot.img", boot_img.data) + + script.WriteRawImage("/boot", "boot.img") + + script.ShowProgress(0.1, 10) + device_specific.FullOTA_InstallEnd() + + if OPTIONS.extra_script is not None: + script.AppendExtra(OPTIONS.extra_script) + + script.UnmountAll() + + if OPTIONS.wipe_user_data: + script.ShowProgress(0.1, 10) + script.FormatPartition("/data") + + if OPTIONS.two_step: + script.AppendExtra(""" +set_stage("%(bcb_dev)s", ""); +""" % bcb_dev) + script.AppendExtra("else\n") + + # Stage 1/3: Nothing to verify for full OTA. Write recovery image to /boot. + script.Comment("Stage 1/3") + _WriteRecoveryImageToBoot(script, output_zip) + + script.AppendExtra(""" +set_stage("%(bcb_dev)s", "2/3"); +reboot_now("%(bcb_dev)s", ""); +endif; +endif; +""" % bcb_dev) + + script.SetProgress(1) + script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary) + metadata["ota-required-cache"] = str(script.required_cache) + + # We haven't written the metadata entry, which will be done in + # FinalizeMetadata. + common.ZipClose(output_zip) + + needed_property_files = ( + NonAbOtaPropertyFiles(), + ) + FinalizeMetadata(metadata, staging_file, output_file, needed_property_files) + + +def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file): + target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts) + source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts) + + target_api_version = target_info["recovery_api_version"] + source_api_version = source_info["recovery_api_version"] + if source_api_version == 0: + logger.warning( + "Generating edify script for a source that can't install it.") + + script = edify_generator.EdifyGenerator( + source_api_version, target_info, fstab=source_info["fstab"]) + + if target_info.oem_props or source_info.oem_props: + if not OPTIONS.oem_no_mount: + source_info.WriteMountOemScript(script) + + metadata = GetPackageMetadata(target_info, source_info) + + if not OPTIONS.no_signing: + staging_file = common.MakeTempFile(suffix='.zip') + else: + staging_file = output_file + + output_zip = zipfile.ZipFile( + staging_file, "w", compression=zipfile.ZIP_DEFLATED) + + device_specific = common.DeviceSpecificParams( + source_zip=source_zip, + source_version=source_api_version, + source_tmp=OPTIONS.source_tmp, + target_zip=target_zip, + target_version=target_api_version, + target_tmp=OPTIONS.target_tmp, + output_zip=output_zip, + script=script, + metadata=metadata, + info_dict=source_info) + + source_boot = common.GetBootableImage( + "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info) + target_boot = common.GetBootableImage( + "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info) + updating_boot = (not OPTIONS.two_step and + (source_boot.data != target_boot.data)) + + target_recovery = common.GetBootableImage( + "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY") + + block_diff_dict = GetBlockDifferences(target_zip=target_zip, + source_zip=source_zip, + target_info=target_info, + source_info=source_info, + device_specific=device_specific) + + CheckVintfIfTrebleEnabled(OPTIONS.target_tmp, target_info) + + # Assertions (e.g. device properties check). + target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount) + device_specific.IncrementalOTA_Assertions() + + # Two-step incremental package strategy (in chronological order, + # which is *not* the order in which the generated script has + # things): + # + # if stage is not "2/3" or "3/3": + # do verification on current system + # write recovery image to boot partition + # set stage to "2/3" + # reboot to boot partition and restart recovery + # else if stage is "2/3": + # write recovery image to recovery partition + # set stage to "3/3" + # reboot to recovery partition and restart recovery + # else: + # (stage must be "3/3") + # perform update: + # patch system files, etc. + # force full install of new boot image + # set up system to update recovery partition on first boot + # complete script normally + # (allow recovery to mark itself finished and reboot) + + if OPTIONS.two_step: + if not source_info.get("multistage_support"): + assert False, "two-step packages not supported by this build" + fs = source_info["fstab"]["/misc"] + assert fs.fs_type.upper() == "EMMC", \ + "two-step packages only supported on devices with EMMC /misc partitions" + bcb_dev = {"bcb_dev": fs.device} + common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data) + script.AppendExtra(""" +if get_stage("%(bcb_dev)s") == "2/3" then +""" % bcb_dev) + + # Stage 2/3: Write recovery image to /recovery (currently running /boot). + script.Comment("Stage 2/3") + script.AppendExtra("sleep(20);\n") + script.WriteRawImage("/recovery", "recovery.img") + script.AppendExtra(""" +set_stage("%(bcb_dev)s", "3/3"); +reboot_now("%(bcb_dev)s", "recovery"); +else if get_stage("%(bcb_dev)s") != "3/3" then +""" % bcb_dev) + + # Stage 1/3: (a) Verify the current system. + script.Comment("Stage 1/3") + + # Dump fingerprints + script.Print("Source: {}".format(source_info.fingerprint)) + script.Print("Target: {}".format(target_info.fingerprint)) + + script.Print("Verifying current system...") + + device_specific.IncrementalOTA_VerifyBegin() + + WriteFingerprintAssertion(script, target_info, source_info) + + # Check the required cache size (i.e. stashed blocks). + required_cache_sizes = [diff.required_cache for diff in + block_diff_dict.values()] + if updating_boot: + boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot", + source_info) + d = common.Difference(target_boot, source_boot) + _, _, d = d.ComputePatch() + if d is None: + include_full_boot = True + common.ZipWriteStr(output_zip, "boot.img", target_boot.data) + else: + include_full_boot = False + + logger.info( + "boot target: %d source: %d diff: %d", target_boot.size, + source_boot.size, len(d)) + + common.ZipWriteStr(output_zip, "boot.img.p", d) + + target_expr = 'concat("{}:",{},":{}:{}")'.format( + boot_type, boot_device_expr, target_boot.size, target_boot.sha1) + source_expr = 'concat("{}:",{},":{}:{}")'.format( + boot_type, boot_device_expr, source_boot.size, source_boot.sha1) + script.PatchPartitionExprCheck(target_expr, source_expr) + + required_cache_sizes.append(target_boot.size) + + if required_cache_sizes: + script.CacheFreeSpaceCheck(max(required_cache_sizes)) + + # Verify the existing partitions. + for diff in block_diff_dict.values(): + diff.WriteVerifyScript(script, touched_blocks_only=True) + + device_specific.IncrementalOTA_VerifyEnd() + + if OPTIONS.two_step: + # Stage 1/3: (b) Write recovery image to /boot. + _WriteRecoveryImageToBoot(script, output_zip) + + script.AppendExtra(""" +set_stage("%(bcb_dev)s", "2/3"); +reboot_now("%(bcb_dev)s", ""); +else +""" % bcb_dev) + + # Stage 3/3: Make changes. + script.Comment("Stage 3/3") + + script.Comment("---- start making changes here ----") + + device_specific.IncrementalOTA_InstallBegin() + + progress_dict = {partition: 0.1 for partition in block_diff_dict} + progress_dict["system"] = 1 - len(block_diff_dict) * 0.1 + + if OPTIONS.source_info_dict.get("use_dynamic_partitions") == "true": + if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true": + raise RuntimeError( + "can't generate incremental that disables dynamic partitions") + dynamic_partitions_diff = common.DynamicPartitionsDifference( + info_dict=OPTIONS.target_info_dict, + source_info_dict=OPTIONS.source_info_dict, + block_diffs=block_diff_dict.values(), + progress_dict=progress_dict) + dynamic_partitions_diff.WriteScript( + script, output_zip, write_verify_script=OPTIONS.verify) + else: + for block_diff in block_diff_dict.values(): + block_diff.WriteScript(script, output_zip, + progress=progress_dict.get(block_diff.partition), + write_verify_script=OPTIONS.verify) + + if OPTIONS.two_step: + common.ZipWriteStr(output_zip, "boot.img", target_boot.data) + script.WriteRawImage("/boot", "boot.img") + logger.info("writing full boot image (forced by two-step mode)") + + if not OPTIONS.two_step: + if updating_boot: + if include_full_boot: + logger.info("boot image changed; including full.") + script.Print("Installing boot image...") + script.WriteRawImage("/boot", "boot.img") + else: + # Produce the boot image by applying a patch to the current + # contents of the boot partition, and write it back to the + # partition. + logger.info("boot image changed; including patch.") + script.Print("Patching boot image...") + script.ShowProgress(0.1, 10) + target_expr = 'concat("{}:",{},":{}:{}")'.format( + boot_type, boot_device_expr, target_boot.size, target_boot.sha1) + source_expr = 'concat("{}:",{},":{}:{}")'.format( + boot_type, boot_device_expr, source_boot.size, source_boot.sha1) + script.PatchPartitionExpr(target_expr, source_expr, '"boot.img.p"') + else: + logger.info("boot image unchanged; skipping.") + + # Do device-specific installation (eg, write radio image). + device_specific.IncrementalOTA_InstallEnd() + + if OPTIONS.extra_script is not None: + script.AppendExtra(OPTIONS.extra_script) + + if OPTIONS.wipe_user_data: + script.Print("Erasing user data...") + script.FormatPartition("/data") + + if OPTIONS.two_step: + script.AppendExtra(""" +set_stage("%(bcb_dev)s", ""); +endif; +endif; +""" % bcb_dev) + + script.SetProgress(1) + # For downgrade OTAs, we prefer to use the update-binary in the source + # build that is actually newer than the one in the target build. + if OPTIONS.downgrade: + script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary) + else: + script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary) + metadata["ota-required-cache"] = str(script.required_cache) + + # We haven't written the metadata entry yet, which will be handled in + # FinalizeMetadata(). + common.ZipClose(output_zip) + + # Sign the generated zip package unless no_signing is specified. + needed_property_files = ( + NonAbOtaPropertyFiles(), + ) + FinalizeMetadata(metadata, staging_file, output_file, needed_property_files) + + +def GenerateNonAbOtaPackage(target_file, output_file, source_file=None): + """Generates a non-A/B OTA package.""" + # Check the loaded info dicts first. + if OPTIONS.info_dict.get("no_recovery") == "true": + raise common.ExternalError( + "--- target build has specified no recovery ---") + + # Non-A/B OTAs rely on /cache partition to store temporary files. + cache_size = OPTIONS.info_dict.get("cache_size") + if cache_size is None: + logger.warning("--- can't determine the cache partition size ---") + OPTIONS.cache_size = cache_size + + if OPTIONS.extra_script is not None: + with open(OPTIONS.extra_script) as fp: + OPTIONS.extra_script = fp.read() + + if OPTIONS.extracted_input is not None: + OPTIONS.input_tmp = OPTIONS.extracted_input + else: + logger.info("unzipping target target-files...") + OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN) + OPTIONS.target_tmp = OPTIONS.input_tmp + + # If the caller explicitly specified the device-specific extensions path via + # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it + # is present in the target target_files. Otherwise, take the path of the file + # from 'tool_extensions' in the info dict and look for that in the local + # filesystem, relative to the current directory. + if OPTIONS.device_specific is None: + from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py") + if os.path.exists(from_input): + logger.info("(using device-specific extensions from target_files)") + OPTIONS.device_specific = from_input + else: + OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions") + + if OPTIONS.device_specific is not None: + OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific) + + # Generate a full OTA. + if source_file is None: + with zipfile.ZipFile(target_file) as input_zip: + WriteFullOTAPackage( + input_zip, + output_file) + + # Generate an incremental OTA. + else: + logger.info("unzipping source target-files...") + OPTIONS.source_tmp = common.UnzipTemp( + OPTIONS.incremental_source, UNZIP_PATTERN) + with zipfile.ZipFile(target_file) as input_zip, \ + zipfile.ZipFile(source_file) as source_zip: + WriteBlockIncrementalOTAPackage( + input_zip, + source_zip, + output_file) + + +def WriteFingerprintAssertion(script, target_info, source_info): + source_oem_props = source_info.oem_props + target_oem_props = target_info.oem_props + + if source_oem_props is None and target_oem_props is None: + script.AssertSomeFingerprint( + source_info.fingerprint, target_info.fingerprint) + elif source_oem_props is not None and target_oem_props is not None: + script.AssertSomeThumbprint( + target_info.GetBuildProp("ro.build.thumbprint"), + source_info.GetBuildProp("ro.build.thumbprint")) + elif source_oem_props is None and target_oem_props is not None: + script.AssertFingerprintOrThumbprint( + source_info.fingerprint, + target_info.GetBuildProp("ro.build.thumbprint")) + else: + script.AssertFingerprintOrThumbprint( + target_info.fingerprint, + source_info.GetBuildProp("ro.build.thumbprint")) + + +class NonAbOtaPropertyFiles(PropertyFiles): + """The property-files for non-A/B OTA. + + For non-A/B OTA, the property-files string contains the info for METADATA + entry, with which a system updater can be fetched the package metadata prior + to downloading the entire package. + """ + + def __init__(self): + super(NonAbOtaPropertyFiles, self).__init__() + self.name = 'ota-property-files' + + +def _WriteRecoveryImageToBoot(script, output_zip): + """Find and write recovery image to /boot in two-step OTA. + + In two-step OTAs, we write recovery image to /boot as the first step so that + we can reboot to there and install a new recovery image to /recovery. + A special "recovery-two-step.img" will be preferred, which encodes the correct + path of "/boot". Otherwise the device may show "device is corrupt" message + when booting into /boot. + + Fall back to using the regular recovery.img if the two-step recovery image + doesn't exist. Note that rebuilding the special image at this point may be + infeasible, because we don't have the desired boot signer and keys when + calling ota_from_target_files.py. + """ + + recovery_two_step_img_name = "recovery-two-step.img" + recovery_two_step_img_path = os.path.join( + OPTIONS.input_tmp, "OTA", recovery_two_step_img_name) + if os.path.exists(recovery_two_step_img_path): + common.ZipWrite( + output_zip, + recovery_two_step_img_path, + arcname=recovery_two_step_img_name) + logger.info( + "two-step package: using %s in stage 1/3", recovery_two_step_img_name) + script.WriteRawImage("/boot", recovery_two_step_img_name) + else: + logger.info("two-step package: using recovery.img in stage 1/3") + # The "recovery.img" entry has been written into package earlier. + script.WriteRawImage("/boot", "recovery.img") + + +def HasRecoveryPatch(target_files_zip, info_dict): + board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true" + + if board_uses_vendorimage: + target_files_dir = "VENDOR" + else: + target_files_dir = "SYSTEM/vendor" + + patch = "%s/recovery-from-boot.p" % target_files_dir + img = "%s/etc/recovery.img" % target_files_dir + + namelist = target_files_zip.namelist() + return patch in namelist or img in namelist diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index b70044e3f8..f42974f6a5 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -206,9 +206,6 @@ A/B OTA specific options from __future__ import print_function -import collections -import copy -import itertools import logging import multiprocessing import os.path @@ -218,12 +215,12 @@ import struct import sys import zipfile -import check_target_files_vintf import common -import edify_generator import target_files_diff -import verity_utils - +from check_target_files_vintf import CheckVintfIfTrebleEnabled +from non_ab_ota import GenerateNonAbOtaPackage +from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, + PropertyFiles) if sys.hexversion < 0x02070000: print("Python 2.7 or newer is required.", file=sys.stderr) @@ -270,11 +267,10 @@ OPTIONS.force_non_ab = False OPTIONS.boot_variable_file = None -METADATA_NAME = 'META-INF/com/android/metadata' POSTINSTALL_CONFIG = 'META/postinstall_config.txt' DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt' AB_PARTITIONS = 'META/ab_partitions.txt' -UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*'] + # Files to be unzipped for target diffing purpose. TARGET_DIFFING_UNZIP_PATTERN = ['BOOT', 'RECOVERY', 'SYSTEM/*', 'VENDOR/*', 'PRODUCT/*', 'SYSTEM_EXT/*', 'ODM/*', @@ -488,13 +484,6 @@ class Payload(object): compress_type=zipfile.ZIP_STORED) -def SignOutput(temp_zip_name, output_zip_name): - pw = OPTIONS.key_passwords[OPTIONS.package_key] - - common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw, - whole_file=True) - - def _LoadOemDicts(oem_source): """Returns the list of loaded OEM properties dict.""" if not oem_source: @@ -507,658 +496,6 @@ def _LoadOemDicts(oem_source): return oem_dicts -def _WriteRecoveryImageToBoot(script, output_zip): - """Find and write recovery image to /boot in two-step OTA. - - In two-step OTAs, we write recovery image to /boot as the first step so that - we can reboot to there and install a new recovery image to /recovery. - A special "recovery-two-step.img" will be preferred, which encodes the correct - path of "/boot". Otherwise the device may show "device is corrupt" message - when booting into /boot. - - Fall back to using the regular recovery.img if the two-step recovery image - doesn't exist. Note that rebuilding the special image at this point may be - infeasible, because we don't have the desired boot signer and keys when - calling ota_from_target_files.py. - """ - - recovery_two_step_img_name = "recovery-two-step.img" - recovery_two_step_img_path = os.path.join( - OPTIONS.input_tmp, "OTA", recovery_two_step_img_name) - if os.path.exists(recovery_two_step_img_path): - common.ZipWrite( - output_zip, - recovery_two_step_img_path, - arcname=recovery_two_step_img_name) - logger.info( - "two-step package: using %s in stage 1/3", recovery_two_step_img_name) - script.WriteRawImage("/boot", recovery_two_step_img_name) - else: - logger.info("two-step package: using recovery.img in stage 1/3") - # The "recovery.img" entry has been written into package earlier. - script.WriteRawImage("/boot", "recovery.img") - - -def HasRecoveryPatch(target_files_zip, info_dict): - board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true" - - if board_uses_vendorimage: - target_files_dir = "VENDOR" - else: - target_files_dir = "SYSTEM/vendor" - - patch = "%s/recovery-from-boot.p" % target_files_dir - img = "%s/etc/recovery.img" % target_files_dir - - namelist = target_files_zip.namelist() - return patch in namelist or img in namelist - - -def HasPartition(target_files_zip, partition): - try: - target_files_zip.getinfo(partition.upper() + "/") - return True - except KeyError: - return False - - -def HasTrebleEnabled(target_files, target_info): - def HasVendorPartition(target_files): - if os.path.isdir(target_files): - return os.path.isdir(os.path.join(target_files, "VENDOR")) - if zipfile.is_zipfile(target_files): - return HasPartition(zipfile.ZipFile(target_files), "vendor") - raise ValueError("Unknown target_files argument") - - return (HasVendorPartition(target_files) and - target_info.GetBuildProp("ro.treble.enabled") == "true") - - -def WriteFingerprintAssertion(script, target_info, source_info): - source_oem_props = source_info.oem_props - target_oem_props = target_info.oem_props - - if source_oem_props is None and target_oem_props is None: - script.AssertSomeFingerprint( - source_info.fingerprint, target_info.fingerprint) - elif source_oem_props is not None and target_oem_props is not None: - script.AssertSomeThumbprint( - target_info.GetBuildProp("ro.build.thumbprint"), - source_info.GetBuildProp("ro.build.thumbprint")) - elif source_oem_props is None and target_oem_props is not None: - script.AssertFingerprintOrThumbprint( - source_info.fingerprint, - target_info.GetBuildProp("ro.build.thumbprint")) - else: - script.AssertFingerprintOrThumbprint( - target_info.fingerprint, - source_info.GetBuildProp("ro.build.thumbprint")) - - -def CheckVintfIfTrebleEnabled(target_files, target_info): - """Checks compatibility info of the input target files. - - Metadata used for compatibility verification is retrieved from target_zip. - - Compatibility should only be checked for devices that have enabled - Treble support. - - Args: - target_files: Path to zip file containing the source files to be included - for OTA. Can also be the path to extracted directory. - target_info: The BuildInfo instance that holds the target build info. - """ - - # Will only proceed if the target has enabled the Treble support (as well as - # having a /vendor partition). - if not HasTrebleEnabled(target_files, target_info): - return - - # Skip adding the compatibility package as a workaround for b/114240221. The - # compatibility will always fail on devices without qualified kernels. - if OPTIONS.skip_compatibility_check: - return - - if not check_target_files_vintf.CheckVintf(target_files, target_info): - raise RuntimeError("VINTF compatibility check failed") - - -def GetBlockDifferences(target_zip, source_zip, target_info, source_info, - device_specific): - """Returns a ordered dict of block differences with partition name as key.""" - - def GetIncrementalBlockDifferenceForPartition(name): - if not HasPartition(source_zip, name): - raise RuntimeError( - "can't generate incremental that adds {}".format(name)) - - partition_src = common.GetUserImage(name, OPTIONS.source_tmp, source_zip, - info_dict=source_info, - allow_shared_blocks=allow_shared_blocks) - - hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator( - name, 4096, target_info) - partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip, - info_dict=target_info, - allow_shared_blocks=allow_shared_blocks, - hashtree_info_generator=hashtree_info_generator) - - # Check the first block of the source system partition for remount R/W only - # if the filesystem is ext4. - partition_source_info = source_info["fstab"]["/" + name] - check_first_block = partition_source_info.fs_type == "ext4" - # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be - # in zip formats. However with squashfs, a) all files are compressed in LZ4; - # b) the blocks listed in block map may not contain all the bytes for a - # given file (because they're rounded to be 4K-aligned). - partition_target_info = target_info["fstab"]["/" + name] - disable_imgdiff = (partition_source_info.fs_type == "squashfs" or - partition_target_info.fs_type == "squashfs") - return common.BlockDifference(name, partition_tgt, partition_src, - check_first_block, - version=blockimgdiff_version, - disable_imgdiff=disable_imgdiff) - - if source_zip: - # See notes in common.GetUserImage() - allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or - target_info.get('ext4_share_dup_blocks') == "true") - blockimgdiff_version = max( - int(i) for i in target_info.get( - "blockimgdiff_versions", "1").split(",")) - assert blockimgdiff_version >= 3 - - block_diff_dict = collections.OrderedDict() - partition_names = ["system", "vendor", "product", "odm", "system_ext", - "vendor_dlkm", "odm_dlkm"] - for partition in partition_names: - if not HasPartition(target_zip, partition): - continue - # Full OTA update. - if not source_zip: - tgt = common.GetUserImage(partition, OPTIONS.input_tmp, target_zip, - info_dict=target_info, - reset_file_map=True) - block_diff_dict[partition] = common.BlockDifference(partition, tgt, - src=None) - # Incremental OTA update. - else: - block_diff_dict[partition] = GetIncrementalBlockDifferenceForPartition( - partition) - assert "system" in block_diff_dict - - # Get the block diffs from the device specific script. If there is a - # duplicate block diff for a partition, ignore the diff in the generic script - # and use the one in the device specific script instead. - if source_zip: - device_specific_diffs = device_specific.IncrementalOTA_GetBlockDifferences() - function_name = "IncrementalOTA_GetBlockDifferences" - else: - device_specific_diffs = device_specific.FullOTA_GetBlockDifferences() - function_name = "FullOTA_GetBlockDifferences" - - if device_specific_diffs: - assert all(isinstance(diff, common.BlockDifference) - for diff in device_specific_diffs), \ - "{} is not returning a list of BlockDifference objects".format( - function_name) - for diff in device_specific_diffs: - if diff.partition in block_diff_dict: - logger.warning("Duplicate block difference found. Device specific block" - " diff for partition '%s' overrides the one in generic" - " script.", diff.partition) - block_diff_dict[diff.partition] = diff - - return block_diff_dict - - -def WriteFullOTAPackage(input_zip, output_file): - target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts) - - # We don't know what version it will be installed on top of. We expect the API - # just won't change very often. Similarly for fstab, it might have changed in - # the target build. - target_api_version = target_info["recovery_api_version"] - script = edify_generator.EdifyGenerator(target_api_version, target_info) - - if target_info.oem_props and not OPTIONS.oem_no_mount: - target_info.WriteMountOemScript(script) - - metadata = GetPackageMetadata(target_info) - - if not OPTIONS.no_signing: - staging_file = common.MakeTempFile(suffix='.zip') - else: - staging_file = output_file - - output_zip = zipfile.ZipFile( - staging_file, "w", compression=zipfile.ZIP_DEFLATED) - - device_specific = common.DeviceSpecificParams( - input_zip=input_zip, - input_version=target_api_version, - output_zip=output_zip, - script=script, - input_tmp=OPTIONS.input_tmp, - metadata=metadata, - info_dict=OPTIONS.info_dict) - - assert HasRecoveryPatch(input_zip, info_dict=OPTIONS.info_dict) - - # Assertions (e.g. downgrade check, device properties check). - ts = target_info.GetBuildProp("ro.build.date.utc") - ts_text = target_info.GetBuildProp("ro.build.date") - script.AssertOlderBuild(ts, ts_text) - - target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount) - device_specific.FullOTA_Assertions() - - block_diff_dict = GetBlockDifferences(target_zip=input_zip, source_zip=None, - target_info=target_info, - source_info=None, - device_specific=device_specific) - - # Two-step package strategy (in chronological order, which is *not* - # the order in which the generated script has things): - # - # if stage is not "2/3" or "3/3": - # write recovery image to boot partition - # set stage to "2/3" - # reboot to boot partition and restart recovery - # else if stage is "2/3": - # write recovery image to recovery partition - # set stage to "3/3" - # reboot to recovery partition and restart recovery - # else: - # (stage must be "3/3") - # set stage to "" - # do normal full package installation: - # wipe and install system, boot image, etc. - # set up system to update recovery partition on first boot - # complete script normally - # (allow recovery to mark itself finished and reboot) - - recovery_img = common.GetBootableImage("recovery.img", "recovery.img", - OPTIONS.input_tmp, "RECOVERY") - if OPTIONS.two_step: - if not target_info.get("multistage_support"): - assert False, "two-step packages not supported by this build" - fs = target_info["fstab"]["/misc"] - assert fs.fs_type.upper() == "EMMC", \ - "two-step packages only supported on devices with EMMC /misc partitions" - bcb_dev = {"bcb_dev": fs.device} - common.ZipWriteStr(output_zip, "recovery.img", recovery_img.data) - script.AppendExtra(""" -if get_stage("%(bcb_dev)s") == "2/3" then -""" % bcb_dev) - - # Stage 2/3: Write recovery image to /recovery (currently running /boot). - script.Comment("Stage 2/3") - script.WriteRawImage("/recovery", "recovery.img") - script.AppendExtra(""" -set_stage("%(bcb_dev)s", "3/3"); -reboot_now("%(bcb_dev)s", "recovery"); -else if get_stage("%(bcb_dev)s") == "3/3" then -""" % bcb_dev) - - # Stage 3/3: Make changes. - script.Comment("Stage 3/3") - - # Dump fingerprints - script.Print("Target: {}".format(target_info.fingerprint)) - - device_specific.FullOTA_InstallBegin() - - # All other partitions as well as the data wipe use 10% of the progress, and - # the update of the system partition takes the remaining progress. - system_progress = 0.9 - (len(block_diff_dict) - 1) * 0.1 - if OPTIONS.wipe_user_data: - system_progress -= 0.1 - progress_dict = {partition: 0.1 for partition in block_diff_dict} - progress_dict["system"] = system_progress - - if target_info.get('use_dynamic_partitions') == "true": - # Use empty source_info_dict to indicate that all partitions / groups must - # be re-added. - dynamic_partitions_diff = common.DynamicPartitionsDifference( - info_dict=OPTIONS.info_dict, - block_diffs=block_diff_dict.values(), - progress_dict=progress_dict) - dynamic_partitions_diff.WriteScript(script, output_zip, - write_verify_script=OPTIONS.verify) - else: - for block_diff in block_diff_dict.values(): - block_diff.WriteScript(script, output_zip, - progress=progress_dict.get(block_diff.partition), - write_verify_script=OPTIONS.verify) - - CheckVintfIfTrebleEnabled(OPTIONS.input_tmp, target_info) - - boot_img = common.GetBootableImage( - "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT") - common.CheckSize(boot_img.data, "boot.img", target_info) - common.ZipWriteStr(output_zip, "boot.img", boot_img.data) - - script.WriteRawImage("/boot", "boot.img") - - script.ShowProgress(0.1, 10) - device_specific.FullOTA_InstallEnd() - - if OPTIONS.extra_script is not None: - script.AppendExtra(OPTIONS.extra_script) - - script.UnmountAll() - - if OPTIONS.wipe_user_data: - script.ShowProgress(0.1, 10) - script.FormatPartition("/data") - - if OPTIONS.two_step: - script.AppendExtra(""" -set_stage("%(bcb_dev)s", ""); -""" % bcb_dev) - script.AppendExtra("else\n") - - # Stage 1/3: Nothing to verify for full OTA. Write recovery image to /boot. - script.Comment("Stage 1/3") - _WriteRecoveryImageToBoot(script, output_zip) - - script.AppendExtra(""" -set_stage("%(bcb_dev)s", "2/3"); -reboot_now("%(bcb_dev)s", ""); -endif; -endif; -""" % bcb_dev) - - script.SetProgress(1) - script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary) - metadata["ota-required-cache"] = str(script.required_cache) - - # We haven't written the metadata entry, which will be done in - # FinalizeMetadata. - common.ZipClose(output_zip) - - needed_property_files = ( - NonAbOtaPropertyFiles(), - ) - FinalizeMetadata(metadata, staging_file, output_file, needed_property_files) - - -def WriteMetadata(metadata, output): - """Writes the metadata to the zip archive or a file. - - Args: - metadata: The metadata dict for the package. - output: A ZipFile object or a string of the output file path. - """ - - value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())]) - if isinstance(output, zipfile.ZipFile): - common.ZipWriteStr(output, METADATA_NAME, value, - compress_type=zipfile.ZIP_STORED) - return - - with open(output, 'w') as f: - f.write(value) - - -def HandleDowngradeMetadata(metadata, target_info, source_info): - # Only incremental OTAs are allowed to reach here. - assert OPTIONS.incremental_source is not None - - post_timestamp = target_info.GetBuildProp("ro.build.date.utc") - pre_timestamp = source_info.GetBuildProp("ro.build.date.utc") - is_downgrade = int(post_timestamp) < int(pre_timestamp) - - if OPTIONS.downgrade: - if not is_downgrade: - raise RuntimeError( - "--downgrade or --override_timestamp specified but no downgrade " - "detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp)) - metadata["ota-downgrade"] = "yes" - else: - if is_downgrade: - raise RuntimeError( - "Downgrade detected based on timestamp check: pre: %s, post: %s. " - "Need to specify --override_timestamp OR --downgrade to allow " - "building the incremental." % (pre_timestamp, post_timestamp)) - - -def GetPackageMetadata(target_info, source_info=None): - """Generates and returns the metadata dict. - - It generates a dict() that contains the info to be written into an OTA - package (META-INF/com/android/metadata). It also handles the detection of - downgrade / data wipe based on the global options. - - Args: - target_info: The BuildInfo instance that holds the target build info. - source_info: The BuildInfo instance that holds the source build info, or - None if generating full OTA. - - Returns: - A dict to be written into package metadata entry. - """ - assert isinstance(target_info, common.BuildInfo) - assert source_info is None or isinstance(source_info, common.BuildInfo) - - separator = '|' - - boot_variable_values = {} - if OPTIONS.boot_variable_file: - d = common.LoadDictionaryFromFile(OPTIONS.boot_variable_file) - for key, values in d.items(): - boot_variable_values[key] = [val.strip() for val in values.split(',')] - - post_build_devices, post_build_fingerprints = \ - CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values) - metadata = { - 'post-build': separator.join(sorted(post_build_fingerprints)), - 'post-build-incremental': target_info.GetBuildProp( - 'ro.build.version.incremental'), - 'post-sdk-level': target_info.GetBuildProp( - 'ro.build.version.sdk'), - 'post-security-patch-level': target_info.GetBuildProp( - 'ro.build.version.security_patch'), - } - - if target_info.is_ab and not OPTIONS.force_non_ab: - metadata['ota-type'] = 'AB' - metadata['ota-required-cache'] = '0' - else: - metadata['ota-type'] = 'BLOCK' - - if OPTIONS.wipe_user_data: - metadata['ota-wipe'] = 'yes' - - if OPTIONS.retrofit_dynamic_partitions: - metadata['ota-retrofit-dynamic-partitions'] = 'yes' - - is_incremental = source_info is not None - if is_incremental: - pre_build_devices, pre_build_fingerprints = \ - CalculateRuntimeDevicesAndFingerprints(source_info, - boot_variable_values) - metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints)) - metadata['pre-build-incremental'] = source_info.GetBuildProp( - 'ro.build.version.incremental') - metadata['pre-device'] = separator.join(sorted(pre_build_devices)) - else: - metadata['pre-device'] = separator.join(sorted(post_build_devices)) - - # Use the actual post-timestamp, even for a downgrade case. - metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc') - - # Detect downgrades and set up downgrade flags accordingly. - if is_incremental: - HandleDowngradeMetadata(metadata, target_info, source_info) - - return metadata - - -class PropertyFiles(object): - """A class that computes the property-files string for an OTA package. - - A property-files string is a comma-separated string that contains the - offset/size info for an OTA package. The entries, which must be ZIP_STORED, - can be fetched directly with the package URL along with the offset/size info. - These strings can be used for streaming A/B OTAs, or allowing an updater to - download package metadata entry directly, without paying the cost of - downloading entire package. - - Computing the final property-files string requires two passes. Because doing - the whole package signing (with signapk.jar) will possibly reorder the ZIP - entries, which may in turn invalidate earlier computed ZIP entry offset/size - values. - - This class provides functions to be called for each pass. The general flow is - as follows. - - property_files = PropertyFiles() - # The first pass, which writes placeholders before doing initial signing. - property_files.Compute() - SignOutput() - - # The second pass, by replacing the placeholders with actual data. - property_files.Finalize() - SignOutput() - - And the caller can additionally verify the final result. - - property_files.Verify() - """ - - def __init__(self): - self.name = None - self.required = () - self.optional = () - - def Compute(self, input_zip): - """Computes and returns a property-files string with placeholders. - - We reserve extra space for the offset and size of the metadata entry itself, - although we don't know the final values until the package gets signed. - - Args: - input_zip: The input ZIP file. - - Returns: - A string with placeholders for the metadata offset/size info, e.g. - "payload.bin:679:343,payload_properties.txt:378:45,metadata: ". - """ - return self.GetPropertyFilesString(input_zip, reserve_space=True) - - class InsufficientSpaceException(Exception): - pass - - def Finalize(self, input_zip, reserved_length): - """Finalizes a property-files string with actual METADATA offset/size info. - - The input ZIP file has been signed, with the ZIP entries in the desired - place (signapk.jar will possibly reorder the ZIP entries). Now we compute - the ZIP entry offsets and construct the property-files string with actual - data. Note that during this process, we must pad the property-files string - to the reserved length, so that the METADATA entry size remains the same. - Otherwise the entries' offsets and sizes may change again. - - Args: - input_zip: The input ZIP file. - reserved_length: The reserved length of the property-files string during - the call to Compute(). The final string must be no more than this - size. - - Returns: - A property-files string including the metadata offset/size info, e.g. - "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379 ". - - Raises: - InsufficientSpaceException: If the reserved length is insufficient to hold - the final string. - """ - result = self.GetPropertyFilesString(input_zip, reserve_space=False) - if len(result) > reserved_length: - raise self.InsufficientSpaceException( - 'Insufficient reserved space: reserved={}, actual={}'.format( - reserved_length, len(result))) - - result += ' ' * (reserved_length - len(result)) - return result - - def Verify(self, input_zip, expected): - """Verifies the input ZIP file contains the expected property-files string. - - Args: - input_zip: The input ZIP file. - expected: The property-files string that's computed from Finalize(). - - Raises: - AssertionError: On finding a mismatch. - """ - actual = self.GetPropertyFilesString(input_zip) - assert actual == expected, \ - "Mismatching streaming metadata: {} vs {}.".format(actual, expected) - - def GetPropertyFilesString(self, zip_file, reserve_space=False): - """ - Constructs the property-files string per request. - - Args: - zip_file: The input ZIP file. - reserved_length: The reserved length of the property-files string. - - Returns: - A property-files string including the metadata offset/size info, e.g. - "payload.bin:679:343,payload_properties.txt:378:45,metadata: ". - """ - - def ComputeEntryOffsetSize(name): - """Computes the zip entry offset and size.""" - info = zip_file.getinfo(name) - offset = info.header_offset - offset += zipfile.sizeFileHeader - offset += len(info.extra) + len(info.filename) - size = info.file_size - return '%s:%d:%d' % (os.path.basename(name), offset, size) - - tokens = [] - tokens.extend(self._GetPrecomputed(zip_file)) - for entry in self.required: - tokens.append(ComputeEntryOffsetSize(entry)) - for entry in self.optional: - if entry in zip_file.namelist(): - tokens.append(ComputeEntryOffsetSize(entry)) - - # 'META-INF/com/android/metadata' is required. We don't know its actual - # offset and length (as well as the values for other entries). So we reserve - # 15-byte as a placeholder ('offset:length'), which is sufficient to cover - # the space for metadata entry. Because 'offset' allows a max of 10-digit - # (i.e. ~9 GiB), with a max of 4-digit for the length. Note that all the - # reserved space serves the metadata entry only. - if reserve_space: - tokens.append('metadata:' + ' ' * 15) - else: - tokens.append(ComputeEntryOffsetSize(METADATA_NAME)) - - return ','.join(tokens) - - def _GetPrecomputed(self, input_zip): - """Computes the additional tokens to be included into the property-files. - - This applies to tokens without actual ZIP entries, such as - payload_metadadata.bin. We want to expose the offset/size to updaters, so - that they can download the payload metadata directly with the info. - - Args: - input_zip: The input zip file. - - Returns: - A list of strings (tokens) to be added to the property-files string. - """ - # pylint: disable=no-self-use - # pylint: disable=unused-argument - return [] - - class StreamingPropertyFiles(PropertyFiles): """A subclass for computing the property-files for streaming A/B OTAs.""" @@ -1264,362 +601,6 @@ class AbOtaPropertyFiles(StreamingPropertyFiles): return (payload_offset, metadata_total) -class NonAbOtaPropertyFiles(PropertyFiles): - """The property-files for non-A/B OTA. - - For non-A/B OTA, the property-files string contains the info for METADATA - entry, with which a system updater can be fetched the package metadata prior - to downloading the entire package. - """ - - def __init__(self): - super(NonAbOtaPropertyFiles, self).__init__() - self.name = 'ota-property-files' - - -def FinalizeMetadata(metadata, input_file, output_file, needed_property_files): - """Finalizes the metadata and signs an A/B OTA package. - - In order to stream an A/B OTA package, we need 'ota-streaming-property-files' - that contains the offsets and sizes for the ZIP entries. An example - property-files string is as follows. - - "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379" - - OTA server can pass down this string, in addition to the package URL, to the - system update client. System update client can then fetch individual ZIP - entries (ZIP_STORED) directly at the given offset of the URL. - - Args: - metadata: The metadata dict for the package. - input_file: The input ZIP filename that doesn't contain the package METADATA - entry yet. - output_file: The final output ZIP filename. - needed_property_files: The list of PropertyFiles' to be generated. - """ - - def ComputeAllPropertyFiles(input_file, needed_property_files): - # Write the current metadata entry with placeholders. - with zipfile.ZipFile(input_file) as input_zip: - for property_files in needed_property_files: - metadata[property_files.name] = property_files.Compute(input_zip) - namelist = input_zip.namelist() - - if METADATA_NAME in namelist: - common.ZipDelete(input_file, METADATA_NAME) - output_zip = zipfile.ZipFile(input_file, 'a') - WriteMetadata(metadata, output_zip) - common.ZipClose(output_zip) - - if OPTIONS.no_signing: - return input_file - - prelim_signing = common.MakeTempFile(suffix='.zip') - SignOutput(input_file, prelim_signing) - return prelim_signing - - def FinalizeAllPropertyFiles(prelim_signing, needed_property_files): - with zipfile.ZipFile(prelim_signing) as prelim_signing_zip: - for property_files in needed_property_files: - metadata[property_files.name] = property_files.Finalize( - prelim_signing_zip, len(metadata[property_files.name])) - - # SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP - # entries, as well as padding the entry headers. We do a preliminary signing - # (with an incomplete metadata entry) to allow that to happen. Then compute - # the ZIP entry offsets, write back the final metadata and do the final - # signing. - prelim_signing = ComputeAllPropertyFiles(input_file, needed_property_files) - try: - FinalizeAllPropertyFiles(prelim_signing, needed_property_files) - except PropertyFiles.InsufficientSpaceException: - # Even with the preliminary signing, the entry orders may change - # dramatically, which leads to insufficiently reserved space during the - # first call to ComputeAllPropertyFiles(). In that case, we redo all the - # preliminary signing works, based on the already ordered ZIP entries, to - # address the issue. - prelim_signing = ComputeAllPropertyFiles( - prelim_signing, needed_property_files) - FinalizeAllPropertyFiles(prelim_signing, needed_property_files) - - # Replace the METADATA entry. - common.ZipDelete(prelim_signing, METADATA_NAME) - output_zip = zipfile.ZipFile(prelim_signing, 'a') - WriteMetadata(metadata, output_zip) - common.ZipClose(output_zip) - - # Re-sign the package after updating the metadata entry. - if OPTIONS.no_signing: - output_file = prelim_signing - else: - SignOutput(prelim_signing, output_file) - - # Reopen the final signed zip to double check the streaming metadata. - with zipfile.ZipFile(output_file) as output_zip: - for property_files in needed_property_files: - property_files.Verify(output_zip, metadata[property_files.name].strip()) - - # If requested, dump the metadata to a separate file. - output_metadata_path = OPTIONS.output_metadata_path - if output_metadata_path: - WriteMetadata(metadata, output_metadata_path) - - -def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file): - target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts) - source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts) - - target_api_version = target_info["recovery_api_version"] - source_api_version = source_info["recovery_api_version"] - if source_api_version == 0: - logger.warning( - "Generating edify script for a source that can't install it.") - - script = edify_generator.EdifyGenerator( - source_api_version, target_info, fstab=source_info["fstab"]) - - if target_info.oem_props or source_info.oem_props: - if not OPTIONS.oem_no_mount: - source_info.WriteMountOemScript(script) - - metadata = GetPackageMetadata(target_info, source_info) - - if not OPTIONS.no_signing: - staging_file = common.MakeTempFile(suffix='.zip') - else: - staging_file = output_file - - output_zip = zipfile.ZipFile( - staging_file, "w", compression=zipfile.ZIP_DEFLATED) - - device_specific = common.DeviceSpecificParams( - source_zip=source_zip, - source_version=source_api_version, - source_tmp=OPTIONS.source_tmp, - target_zip=target_zip, - target_version=target_api_version, - target_tmp=OPTIONS.target_tmp, - output_zip=output_zip, - script=script, - metadata=metadata, - info_dict=source_info) - - source_boot = common.GetBootableImage( - "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info) - target_boot = common.GetBootableImage( - "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info) - updating_boot = (not OPTIONS.two_step and - (source_boot.data != target_boot.data)) - - target_recovery = common.GetBootableImage( - "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY") - - block_diff_dict = GetBlockDifferences(target_zip=target_zip, - source_zip=source_zip, - target_info=target_info, - source_info=source_info, - device_specific=device_specific) - - CheckVintfIfTrebleEnabled(OPTIONS.target_tmp, target_info) - - # Assertions (e.g. device properties check). - target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount) - device_specific.IncrementalOTA_Assertions() - - # Two-step incremental package strategy (in chronological order, - # which is *not* the order in which the generated script has - # things): - # - # if stage is not "2/3" or "3/3": - # do verification on current system - # write recovery image to boot partition - # set stage to "2/3" - # reboot to boot partition and restart recovery - # else if stage is "2/3": - # write recovery image to recovery partition - # set stage to "3/3" - # reboot to recovery partition and restart recovery - # else: - # (stage must be "3/3") - # perform update: - # patch system files, etc. - # force full install of new boot image - # set up system to update recovery partition on first boot - # complete script normally - # (allow recovery to mark itself finished and reboot) - - if OPTIONS.two_step: - if not source_info.get("multistage_support"): - assert False, "two-step packages not supported by this build" - fs = source_info["fstab"]["/misc"] - assert fs.fs_type.upper() == "EMMC", \ - "two-step packages only supported on devices with EMMC /misc partitions" - bcb_dev = {"bcb_dev": fs.device} - common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data) - script.AppendExtra(""" -if get_stage("%(bcb_dev)s") == "2/3" then -""" % bcb_dev) - - # Stage 2/3: Write recovery image to /recovery (currently running /boot). - script.Comment("Stage 2/3") - script.AppendExtra("sleep(20);\n") - script.WriteRawImage("/recovery", "recovery.img") - script.AppendExtra(""" -set_stage("%(bcb_dev)s", "3/3"); -reboot_now("%(bcb_dev)s", "recovery"); -else if get_stage("%(bcb_dev)s") != "3/3" then -""" % bcb_dev) - - # Stage 1/3: (a) Verify the current system. - script.Comment("Stage 1/3") - - # Dump fingerprints - script.Print("Source: {}".format(source_info.fingerprint)) - script.Print("Target: {}".format(target_info.fingerprint)) - - script.Print("Verifying current system...") - - device_specific.IncrementalOTA_VerifyBegin() - - WriteFingerprintAssertion(script, target_info, source_info) - - # Check the required cache size (i.e. stashed blocks). - required_cache_sizes = [diff.required_cache for diff in - block_diff_dict.values()] - if updating_boot: - boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot", - source_info) - d = common.Difference(target_boot, source_boot) - _, _, d = d.ComputePatch() - if d is None: - include_full_boot = True - common.ZipWriteStr(output_zip, "boot.img", target_boot.data) - else: - include_full_boot = False - - logger.info( - "boot target: %d source: %d diff: %d", target_boot.size, - source_boot.size, len(d)) - - common.ZipWriteStr(output_zip, "boot.img.p", d) - - target_expr = 'concat("{}:",{},":{}:{}")'.format( - boot_type, boot_device_expr, target_boot.size, target_boot.sha1) - source_expr = 'concat("{}:",{},":{}:{}")'.format( - boot_type, boot_device_expr, source_boot.size, source_boot.sha1) - script.PatchPartitionExprCheck(target_expr, source_expr) - - required_cache_sizes.append(target_boot.size) - - if required_cache_sizes: - script.CacheFreeSpaceCheck(max(required_cache_sizes)) - - # Verify the existing partitions. - for diff in block_diff_dict.values(): - diff.WriteVerifyScript(script, touched_blocks_only=True) - - device_specific.IncrementalOTA_VerifyEnd() - - if OPTIONS.two_step: - # Stage 1/3: (b) Write recovery image to /boot. - _WriteRecoveryImageToBoot(script, output_zip) - - script.AppendExtra(""" -set_stage("%(bcb_dev)s", "2/3"); -reboot_now("%(bcb_dev)s", ""); -else -""" % bcb_dev) - - # Stage 3/3: Make changes. - script.Comment("Stage 3/3") - - script.Comment("---- start making changes here ----") - - device_specific.IncrementalOTA_InstallBegin() - - progress_dict = {partition: 0.1 for partition in block_diff_dict} - progress_dict["system"] = 1 - len(block_diff_dict) * 0.1 - - if OPTIONS.source_info_dict.get("use_dynamic_partitions") == "true": - if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true": - raise RuntimeError( - "can't generate incremental that disables dynamic partitions") - dynamic_partitions_diff = common.DynamicPartitionsDifference( - info_dict=OPTIONS.target_info_dict, - source_info_dict=OPTIONS.source_info_dict, - block_diffs=block_diff_dict.values(), - progress_dict=progress_dict) - dynamic_partitions_diff.WriteScript( - script, output_zip, write_verify_script=OPTIONS.verify) - else: - for block_diff in block_diff_dict.values(): - block_diff.WriteScript(script, output_zip, - progress=progress_dict.get(block_diff.partition), - write_verify_script=OPTIONS.verify) - - if OPTIONS.two_step: - common.ZipWriteStr(output_zip, "boot.img", target_boot.data) - script.WriteRawImage("/boot", "boot.img") - logger.info("writing full boot image (forced by two-step mode)") - - if not OPTIONS.two_step: - if updating_boot: - if include_full_boot: - logger.info("boot image changed; including full.") - script.Print("Installing boot image...") - script.WriteRawImage("/boot", "boot.img") - else: - # Produce the boot image by applying a patch to the current - # contents of the boot partition, and write it back to the - # partition. - logger.info("boot image changed; including patch.") - script.Print("Patching boot image...") - script.ShowProgress(0.1, 10) - target_expr = 'concat("{}:",{},":{}:{}")'.format( - boot_type, boot_device_expr, target_boot.size, target_boot.sha1) - source_expr = 'concat("{}:",{},":{}:{}")'.format( - boot_type, boot_device_expr, source_boot.size, source_boot.sha1) - script.PatchPartitionExpr(target_expr, source_expr, '"boot.img.p"') - else: - logger.info("boot image unchanged; skipping.") - - # Do device-specific installation (eg, write radio image). - device_specific.IncrementalOTA_InstallEnd() - - if OPTIONS.extra_script is not None: - script.AppendExtra(OPTIONS.extra_script) - - if OPTIONS.wipe_user_data: - script.Print("Erasing user data...") - script.FormatPartition("/data") - - if OPTIONS.two_step: - script.AppendExtra(""" -set_stage("%(bcb_dev)s", ""); -endif; -endif; -""" % bcb_dev) - - script.SetProgress(1) - # For downgrade OTAs, we prefer to use the update-binary in the source - # build that is actually newer than the one in the target build. - if OPTIONS.downgrade: - script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary) - else: - script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary) - metadata["ota-required-cache"] = str(script.required_cache) - - # We haven't written the metadata entry yet, which will be handled in - # FinalizeMetadata(). - common.ZipClose(output_zip) - - # Sign the generated zip package unless no_signing is specified. - needed_property_files = ( - NonAbOtaPropertyFiles(), - ) - FinalizeMetadata(metadata, staging_file, output_file, needed_property_files) - - def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): """Returns a target-files.zip file for generating secondary payload. @@ -1938,104 +919,6 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): FinalizeMetadata(metadata, staging_file, output_file, needed_property_files) -def GenerateNonAbOtaPackage(target_file, output_file, source_file=None): - """Generates a non-A/B OTA package.""" - # Check the loaded info dicts first. - if OPTIONS.info_dict.get("no_recovery") == "true": - raise common.ExternalError( - "--- target build has specified no recovery ---") - - # Non-A/B OTAs rely on /cache partition to store temporary files. - cache_size = OPTIONS.info_dict.get("cache_size") - if cache_size is None: - logger.warning("--- can't determine the cache partition size ---") - OPTIONS.cache_size = cache_size - - if OPTIONS.extra_script is not None: - with open(OPTIONS.extra_script) as fp: - OPTIONS.extra_script = fp.read() - - if OPTIONS.extracted_input is not None: - OPTIONS.input_tmp = OPTIONS.extracted_input - else: - logger.info("unzipping target target-files...") - OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN) - OPTIONS.target_tmp = OPTIONS.input_tmp - - # If the caller explicitly specified the device-specific extensions path via - # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it - # is present in the target target_files. Otherwise, take the path of the file - # from 'tool_extensions' in the info dict and look for that in the local - # filesystem, relative to the current directory. - if OPTIONS.device_specific is None: - from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py") - if os.path.exists(from_input): - logger.info("(using device-specific extensions from target_files)") - OPTIONS.device_specific = from_input - else: - OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions") - - if OPTIONS.device_specific is not None: - OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific) - - # Generate a full OTA. - if source_file is None: - with zipfile.ZipFile(target_file) as input_zip: - WriteFullOTAPackage( - input_zip, - output_file) - - # Generate an incremental OTA. - else: - logger.info("unzipping source target-files...") - OPTIONS.source_tmp = common.UnzipTemp( - OPTIONS.incremental_source, UNZIP_PATTERN) - with zipfile.ZipFile(target_file) as input_zip, \ - zipfile.ZipFile(source_file) as source_zip: - WriteBlockIncrementalOTAPackage( - input_zip, - source_zip, - output_file) - - -def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values): - """Returns a tuple of sets for runtime devices and fingerprints""" - - device_names = {build_info.device} - fingerprints = {build_info.fingerprint} - - if not boot_variable_values: - return device_names, fingerprints - - # Calculate all possible combinations of the values for the boot variables. - keys = boot_variable_values.keys() - value_list = boot_variable_values.values() - combinations = [dict(zip(keys, values)) - for values in itertools.product(*value_list)] - for placeholder_values in combinations: - # Reload the info_dict as some build properties may change their values - # based on the value of ro.boot* properties. - info_dict = copy.deepcopy(build_info.info_dict) - for partition in common.PARTITIONS_WITH_CARE_MAP: - partition_prop_key = "{}.build.prop".format(partition) - input_file = info_dict[partition_prop_key].input_file - if isinstance(input_file, zipfile.ZipFile): - with zipfile.ZipFile(input_file.filename) as input_zip: - info_dict[partition_prop_key] = \ - common.PartitionBuildProps.FromInputFile(input_zip, partition, - placeholder_values) - else: - info_dict[partition_prop_key] = \ - common.PartitionBuildProps.FromInputFile(input_file, partition, - placeholder_values) - info_dict["build.prop"] = info_dict["system.build.prop"] - - new_build_info = common.BuildInfo(info_dict, build_info.oem_dicts) - device_names.add(new_build_info.device) - fingerprints.add(new_build_info.fingerprint) - return device_names, fingerprints - - def main(argv): def option_handler(o, a): diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py new file mode 100644 index 0000000000..874ab951c1 --- /dev/null +++ b/tools/releasetools/ota_utils.py @@ -0,0 +1,433 @@ +# Copyright (C) 2020 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import itertools +import os +import zipfile + +from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile, + ZipWriteStr, BuildInfo, LoadDictionaryFromFile, + SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps) + +METADATA_NAME = 'META-INF/com/android/metadata' +UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*'] + + +def FinalizeMetadata(metadata, input_file, output_file, needed_property_files): + """Finalizes the metadata and signs an A/B OTA package. + + In order to stream an A/B OTA package, we need 'ota-streaming-property-files' + that contains the offsets and sizes for the ZIP entries. An example + property-files string is as follows. + + "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379" + + OTA server can pass down this string, in addition to the package URL, to the + system update client. System update client can then fetch individual ZIP + entries (ZIP_STORED) directly at the given offset of the URL. + + Args: + metadata: The metadata dict for the package. + input_file: The input ZIP filename that doesn't contain the package METADATA + entry yet. + output_file: The final output ZIP filename. + needed_property_files: The list of PropertyFiles' to be generated. + """ + + def ComputeAllPropertyFiles(input_file, needed_property_files): + # Write the current metadata entry with placeholders. + with zipfile.ZipFile(input_file) as input_zip: + for property_files in needed_property_files: + metadata[property_files.name] = property_files.Compute(input_zip) + namelist = input_zip.namelist() + + if METADATA_NAME in namelist: + ZipDelete(input_file, METADATA_NAME) + output_zip = zipfile.ZipFile(input_file, 'a') + WriteMetadata(metadata, output_zip) + ZipClose(output_zip) + + if OPTIONS.no_signing: + return input_file + + prelim_signing = MakeTempFile(suffix='.zip') + SignOutput(input_file, prelim_signing) + return prelim_signing + + def FinalizeAllPropertyFiles(prelim_signing, needed_property_files): + with zipfile.ZipFile(prelim_signing) as prelim_signing_zip: + for property_files in needed_property_files: + metadata[property_files.name] = property_files.Finalize( + prelim_signing_zip, len(metadata[property_files.name])) + + # SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP + # entries, as well as padding the entry headers. We do a preliminary signing + # (with an incomplete metadata entry) to allow that to happen. Then compute + # the ZIP entry offsets, write back the final metadata and do the final + # signing. + prelim_signing = ComputeAllPropertyFiles(input_file, needed_property_files) + try: + FinalizeAllPropertyFiles(prelim_signing, needed_property_files) + except PropertyFiles.InsufficientSpaceException: + # Even with the preliminary signing, the entry orders may change + # dramatically, which leads to insufficiently reserved space during the + # first call to ComputeAllPropertyFiles(). In that case, we redo all the + # preliminary signing works, based on the already ordered ZIP entries, to + # address the issue. + prelim_signing = ComputeAllPropertyFiles( + prelim_signing, needed_property_files) + FinalizeAllPropertyFiles(prelim_signing, needed_property_files) + + # Replace the METADATA entry. + ZipDelete(prelim_signing, METADATA_NAME) + output_zip = zipfile.ZipFile(prelim_signing, 'a') + WriteMetadata(metadata, output_zip) + ZipClose(output_zip) + + # Re-sign the package after updating the metadata entry. + if OPTIONS.no_signing: + output_file = prelim_signing + else: + SignOutput(prelim_signing, output_file) + + # Reopen the final signed zip to double check the streaming metadata. + with zipfile.ZipFile(output_file) as output_zip: + for property_files in needed_property_files: + property_files.Verify(output_zip, metadata[property_files.name].strip()) + + # If requested, dump the metadata to a separate file. + output_metadata_path = OPTIONS.output_metadata_path + if output_metadata_path: + WriteMetadata(metadata, output_metadata_path) + + +def WriteMetadata(metadata, output): + """Writes the metadata to the zip archive or a file. + + Args: + metadata: The metadata dict for the package. + output: A ZipFile object or a string of the output file path. + """ + + value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())]) + if isinstance(output, zipfile.ZipFile): + ZipWriteStr(output, METADATA_NAME, value, + compress_type=zipfile.ZIP_STORED) + return + + with open(output, 'w') as f: + f.write(value) + + +def GetPackageMetadata(target_info, source_info=None): + """Generates and returns the metadata dict. + + It generates a dict() that contains the info to be written into an OTA + package (META-INF/com/android/metadata). It also handles the detection of + downgrade / data wipe based on the global options. + + Args: + target_info: The BuildInfo instance that holds the target build info. + source_info: The BuildInfo instance that holds the source build info, or + None if generating full OTA. + + Returns: + A dict to be written into package metadata entry. + """ + assert isinstance(target_info, BuildInfo) + assert source_info is None or isinstance(source_info, BuildInfo) + + separator = '|' + + boot_variable_values = {} + if OPTIONS.boot_variable_file: + d = LoadDictionaryFromFile(OPTIONS.boot_variable_file) + for key, values in d.items(): + boot_variable_values[key] = [val.strip() for val in values.split(',')] + + post_build_devices, post_build_fingerprints = \ + CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values) + metadata = { + 'post-build': separator.join(sorted(post_build_fingerprints)), + 'post-build-incremental': target_info.GetBuildProp( + 'ro.build.version.incremental'), + 'post-sdk-level': target_info.GetBuildProp( + 'ro.build.version.sdk'), + 'post-security-patch-level': target_info.GetBuildProp( + 'ro.build.version.security_patch'), + } + + if target_info.is_ab and not OPTIONS.force_non_ab: + metadata['ota-type'] = 'AB' + metadata['ota-required-cache'] = '0' + else: + metadata['ota-type'] = 'BLOCK' + + if OPTIONS.wipe_user_data: + metadata['ota-wipe'] = 'yes' + + if OPTIONS.retrofit_dynamic_partitions: + metadata['ota-retrofit-dynamic-partitions'] = 'yes' + + is_incremental = source_info is not None + if is_incremental: + pre_build_devices, pre_build_fingerprints = \ + CalculateRuntimeDevicesAndFingerprints(source_info, + boot_variable_values) + metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints)) + metadata['pre-build-incremental'] = source_info.GetBuildProp( + 'ro.build.version.incremental') + metadata['pre-device'] = separator.join(sorted(pre_build_devices)) + else: + metadata['pre-device'] = separator.join(sorted(post_build_devices)) + + # Use the actual post-timestamp, even for a downgrade case. + metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc') + + # Detect downgrades and set up downgrade flags accordingly. + if is_incremental: + HandleDowngradeMetadata(metadata, target_info, source_info) + + return metadata + + +def HandleDowngradeMetadata(metadata, target_info, source_info): + # Only incremental OTAs are allowed to reach here. + assert OPTIONS.incremental_source is not None + + post_timestamp = target_info.GetBuildProp("ro.build.date.utc") + pre_timestamp = source_info.GetBuildProp("ro.build.date.utc") + is_downgrade = int(post_timestamp) < int(pre_timestamp) + + if OPTIONS.downgrade: + if not is_downgrade: + raise RuntimeError( + "--downgrade or --override_timestamp specified but no downgrade " + "detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp)) + metadata["ota-downgrade"] = "yes" + else: + if is_downgrade: + raise RuntimeError( + "Downgrade detected based on timestamp check: pre: %s, post: %s. " + "Need to specify --override_timestamp OR --downgrade to allow " + "building the incremental." % (pre_timestamp, post_timestamp)) + + +def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values): + """Returns a tuple of sets for runtime devices and fingerprints""" + + device_names = {build_info.device} + fingerprints = {build_info.fingerprint} + + if not boot_variable_values: + return device_names, fingerprints + + # Calculate all possible combinations of the values for the boot variables. + keys = boot_variable_values.keys() + value_list = boot_variable_values.values() + combinations = [dict(zip(keys, values)) + for values in itertools.product(*value_list)] + for placeholder_values in combinations: + # Reload the info_dict as some build properties may change their values + # based on the value of ro.boot* properties. + info_dict = copy.deepcopy(build_info.info_dict) + for partition in PARTITIONS_WITH_CARE_MAP: + partition_prop_key = "{}.build.prop".format(partition) + input_file = info_dict[partition_prop_key].input_file + if isinstance(input_file, zipfile.ZipFile): + with zipfile.ZipFile(input_file.filename) as input_zip: + info_dict[partition_prop_key] = \ + PartitionBuildProps.FromInputFile(input_zip, partition, + placeholder_values) + else: + info_dict[partition_prop_key] = \ + PartitionBuildProps.FromInputFile(input_file, partition, + placeholder_values) + info_dict["build.prop"] = info_dict["system.build.prop"] + + new_build_info = BuildInfo(info_dict, build_info.oem_dicts) + device_names.add(new_build_info.device) + fingerprints.add(new_build_info.fingerprint) + return device_names, fingerprints + + +class PropertyFiles(object): + """A class that computes the property-files string for an OTA package. + + A property-files string is a comma-separated string that contains the + offset/size info for an OTA package. The entries, which must be ZIP_STORED, + can be fetched directly with the package URL along with the offset/size info. + These strings can be used for streaming A/B OTAs, or allowing an updater to + download package metadata entry directly, without paying the cost of + downloading entire package. + + Computing the final property-files string requires two passes. Because doing + the whole package signing (with signapk.jar) will possibly reorder the ZIP + entries, which may in turn invalidate earlier computed ZIP entry offset/size + values. + + This class provides functions to be called for each pass. The general flow is + as follows. + + property_files = PropertyFiles() + # The first pass, which writes placeholders before doing initial signing. + property_files.Compute() + SignOutput() + + # The second pass, by replacing the placeholders with actual data. + property_files.Finalize() + SignOutput() + + And the caller can additionally verify the final result. + + property_files.Verify() + """ + + def __init__(self): + self.name = None + self.required = () + self.optional = () + + def Compute(self, input_zip): + """Computes and returns a property-files string with placeholders. + + We reserve extra space for the offset and size of the metadata entry itself, + although we don't know the final values until the package gets signed. + + Args: + input_zip: The input ZIP file. + + Returns: + A string with placeholders for the metadata offset/size info, e.g. + "payload.bin:679:343,payload_properties.txt:378:45,metadata: ". + """ + return self.GetPropertyFilesString(input_zip, reserve_space=True) + + class InsufficientSpaceException(Exception): + pass + + def Finalize(self, input_zip, reserved_length): + """Finalizes a property-files string with actual METADATA offset/size info. + + The input ZIP file has been signed, with the ZIP entries in the desired + place (signapk.jar will possibly reorder the ZIP entries). Now we compute + the ZIP entry offsets and construct the property-files string with actual + data. Note that during this process, we must pad the property-files string + to the reserved length, so that the METADATA entry size remains the same. + Otherwise the entries' offsets and sizes may change again. + + Args: + input_zip: The input ZIP file. + reserved_length: The reserved length of the property-files string during + the call to Compute(). The final string must be no more than this + size. + + Returns: + A property-files string including the metadata offset/size info, e.g. + "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379 ". + + Raises: + InsufficientSpaceException: If the reserved length is insufficient to hold + the final string. + """ + result = self.GetPropertyFilesString(input_zip, reserve_space=False) + if len(result) > reserved_length: + raise self.InsufficientSpaceException( + 'Insufficient reserved space: reserved={}, actual={}'.format( + reserved_length, len(result))) + + result += ' ' * (reserved_length - len(result)) + return result + + def Verify(self, input_zip, expected): + """Verifies the input ZIP file contains the expected property-files string. + + Args: + input_zip: The input ZIP file. + expected: The property-files string that's computed from Finalize(). + + Raises: + AssertionError: On finding a mismatch. + """ + actual = self.GetPropertyFilesString(input_zip) + assert actual == expected, \ + "Mismatching streaming metadata: {} vs {}.".format(actual, expected) + + def GetPropertyFilesString(self, zip_file, reserve_space=False): + """ + Constructs the property-files string per request. + + Args: + zip_file: The input ZIP file. + reserved_length: The reserved length of the property-files string. + + Returns: + A property-files string including the metadata offset/size info, e.g. + "payload.bin:679:343,payload_properties.txt:378:45,metadata: ". + """ + + def ComputeEntryOffsetSize(name): + """Computes the zip entry offset and size.""" + info = zip_file.getinfo(name) + offset = info.header_offset + offset += zipfile.sizeFileHeader + offset += len(info.extra) + len(info.filename) + size = info.file_size + return '%s:%d:%d' % (os.path.basename(name), offset, size) + + tokens = [] + tokens.extend(self._GetPrecomputed(zip_file)) + for entry in self.required: + tokens.append(ComputeEntryOffsetSize(entry)) + for entry in self.optional: + if entry in zip_file.namelist(): + tokens.append(ComputeEntryOffsetSize(entry)) + + # 'META-INF/com/android/metadata' is required. We don't know its actual + # offset and length (as well as the values for other entries). So we reserve + # 15-byte as a placeholder ('offset:length'), which is sufficient to cover + # the space for metadata entry. Because 'offset' allows a max of 10-digit + # (i.e. ~9 GiB), with a max of 4-digit for the length. Note that all the + # reserved space serves the metadata entry only. + if reserve_space: + tokens.append('metadata:' + ' ' * 15) + else: + tokens.append(ComputeEntryOffsetSize(METADATA_NAME)) + + return ','.join(tokens) + + def _GetPrecomputed(self, input_zip): + """Computes the additional tokens to be included into the property-files. + + This applies to tokens without actual ZIP entries, such as + payload_metadata.bin. We want to expose the offset/size to updaters, so + that they can download the payload metadata directly with the info. + + Args: + input_zip: The input zip file. + + Returns: + A list of strings (tokens) to be added to the property-files string. + """ + # pylint: disable=no-self-use + # pylint: disable=unused-argument + return [] + + +def SignOutput(temp_zip_name, output_zip_name): + pw = OPTIONS.key_passwords[OPTIONS.package_key] + + SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw, + whole_file=True) diff --git a/tools/releasetools/test_non_ab_ota.py b/tools/releasetools/test_non_ab_ota.py new file mode 100644 index 0000000000..ee1b4113c9 --- /dev/null +++ b/tools/releasetools/test_non_ab_ota.py @@ -0,0 +1,169 @@ +# +# Copyright (C) 2020 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import copy +import zipfile + +import common +import test_utils + +from non_ab_ota import NonAbOtaPropertyFiles, WriteFingerprintAssertion +from test_utils import PropertyFilesTestCase + + +class NonAbOtaPropertyFilesTest(PropertyFilesTestCase): + """Additional validity checks specialized for NonAbOtaPropertyFiles.""" + def setUp(self): + common.OPTIONS.no_signing = False + def test_init(self): + property_files = NonAbOtaPropertyFiles() + self.assertEqual('ota-property-files', property_files.name) + self.assertEqual((), property_files.required) + self.assertEqual((), property_files.optional) + + def test_Compute(self): + entries = () + zip_file = self.construct_zip_package(entries) + property_files = NonAbOtaPropertyFiles() + with zipfile.ZipFile(zip_file) as zip_fp: + property_files_string = property_files.Compute(zip_fp) + + tokens = self._parse_property_files_string(property_files_string) + self.assertEqual(1, len(tokens)) + self._verify_entries(zip_file, tokens, entries) + + def test_Finalize(self): + entries = [ + 'META-INF/com/android/metadata', + ] + zip_file = self.construct_zip_package(entries) + property_files = NonAbOtaPropertyFiles() + with zipfile.ZipFile(zip_file) as zip_fp: + raw_metadata = property_files.GetPropertyFilesString( + zip_fp, reserve_space=False) + property_files_string = property_files.Finalize(zip_fp, len(raw_metadata)) + tokens = self._parse_property_files_string(property_files_string) + + self.assertEqual(1, len(tokens)) + # 'META-INF/com/android/metadata' will be key'd as 'metadata'. + entries[0] = 'metadata' + self._verify_entries(zip_file, tokens, entries) + + def test_Verify(self): + entries = ( + 'META-INF/com/android/metadata', + ) + zip_file = self.construct_zip_package(entries) + property_files = NonAbOtaPropertyFiles() + with zipfile.ZipFile(zip_file) as zip_fp: + raw_metadata = property_files.GetPropertyFilesString( + zip_fp, reserve_space=False) + + property_files.Verify(zip_fp, raw_metadata) + +class NonAbOTATest(test_utils.ReleaseToolsTestCase): + TEST_TARGET_INFO_DICT = { + 'build.prop': common.PartitionBuildProps.FromDictionary( + 'system', { + 'ro.product.device': 'product-device', + 'ro.build.fingerprint': 'build-fingerprint-target', + 'ro.build.version.incremental': 'build-version-incremental-target', + 'ro.build.version.sdk': '27', + 'ro.build.version.security_patch': '2017-12-01', + 'ro.build.date.utc': '1500000000'} + ) + } + TEST_INFO_DICT_USES_OEM_PROPS = { + 'build.prop': common.PartitionBuildProps.FromDictionary( + 'system', { + 'ro.product.name': 'product-name', + 'ro.build.thumbprint': 'build-thumbprint', + 'ro.build.bar': 'build-bar'} + ), + 'vendor.build.prop': common.PartitionBuildProps.FromDictionary( + 'vendor', { + 'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'} + ), + 'property1': 'value1', + 'property2': 4096, + 'oem_fingerprint_properties': 'ro.product.device ro.product.brand', + } + TEST_OEM_DICTS = [ + { + 'ro.product.brand': 'brand1', + 'ro.product.device': 'device1', + }, + { + 'ro.product.brand': 'brand2', + 'ro.product.device': 'device2', + }, + { + 'ro.product.brand': 'brand3', + 'ro.product.device': 'device3', + }, + ] + def test_WriteFingerprintAssertion_without_oem_props(self): + target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None) + source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT) + source_info_dict['build.prop'].build_props['ro.build.fingerprint'] = ( + 'source-build-fingerprint') + source_info = common.BuildInfo(source_info_dict, None) + + script_writer = test_utils.MockScriptWriter() + WriteFingerprintAssertion(script_writer, target_info, source_info) + self.assertEqual( + [('AssertSomeFingerprint', 'source-build-fingerprint', + 'build-fingerprint-target')], + script_writer.lines) + + def test_WriteFingerprintAssertion_with_source_oem_props(self): + target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None) + source_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, + self.TEST_OEM_DICTS) + + script_writer = test_utils.MockScriptWriter() + WriteFingerprintAssertion(script_writer, target_info, source_info) + self.assertEqual( + [('AssertFingerprintOrThumbprint', 'build-fingerprint-target', + 'build-thumbprint')], + script_writer.lines) + + def test_WriteFingerprintAssertion_with_target_oem_props(self): + target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, + self.TEST_OEM_DICTS) + source_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None) + + script_writer = test_utils.MockScriptWriter() + WriteFingerprintAssertion(script_writer, target_info, source_info) + self.assertEqual( + [('AssertFingerprintOrThumbprint', 'build-fingerprint-target', + 'build-thumbprint')], + script_writer.lines) + + def test_WriteFingerprintAssertion_with_both_oem_props(self): + target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, + self.TEST_OEM_DICTS) + source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS) + source_info_dict['build.prop'].build_props['ro.build.thumbprint'] = ( + 'source-build-thumbprint') + source_info = common.BuildInfo(source_info_dict, self.TEST_OEM_DICTS) + + script_writer = test_utils.MockScriptWriter() + WriteFingerprintAssertion(script_writer, target_info, source_info) + self.assertEqual( + [('AssertSomeThumbprint', 'build-thumbprint', + 'source-build-thumbprint')], + script_writer.lines) diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py index 07b2e05e4c..52aa487031 100644 --- a/tools/releasetools/test_ota_from_target_files.py +++ b/tools/releasetools/test_ota_from_target_files.py @@ -21,14 +21,15 @@ import zipfile import common import test_utils +from ota_utils import CalculateRuntimeDevicesAndFingerprints from ota_from_target_files import ( _LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata, GetPackageMetadata, GetTargetFilesZipForSecondaryImages, - GetTargetFilesZipWithoutPostinstallConfig, NonAbOtaPropertyFiles, + GetTargetFilesZipWithoutPostinstallConfig, Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles, - StreamingPropertyFiles, WriteFingerprintAssertion, - CalculateRuntimeDevicesAndFingerprints) - + StreamingPropertyFiles) +from non_ab_ota import NonAbOtaPropertyFiles +from test_utils import PropertyFilesTestCase def construct_target_files(secondary=False): """Returns a target-files.zip file for generating OTA packages.""" @@ -149,20 +150,6 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): 'oem_fingerprint_properties': 'ro.product.device ro.product.brand', } - TEST_OEM_DICTS = [ - { - 'ro.product.brand': 'brand1', - 'ro.product.device': 'device1', - }, - { - 'ro.product.brand': 'brand2', - 'ro.product.device': 'device2', - }, - { - 'ro.product.brand': 'brand3', - 'ro.product.device': 'device3', - }, - ] def setUp(self): self.testdata_dir = test_utils.get_testdata_dir() @@ -529,59 +516,6 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): FinalizeMetadata(metadata, zip_file, output_file, needed_property_files) self.assertIn('ota-test-property-files', metadata) - def test_WriteFingerprintAssertion_without_oem_props(self): - target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None) - source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT) - source_info_dict['build.prop'].build_props['ro.build.fingerprint'] = ( - 'source-build-fingerprint') - source_info = common.BuildInfo(source_info_dict, None) - - script_writer = test_utils.MockScriptWriter() - WriteFingerprintAssertion(script_writer, target_info, source_info) - self.assertEqual( - [('AssertSomeFingerprint', 'source-build-fingerprint', - 'build-fingerprint-target')], - script_writer.lines) - - def test_WriteFingerprintAssertion_with_source_oem_props(self): - target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None) - source_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, - self.TEST_OEM_DICTS) - - script_writer = test_utils.MockScriptWriter() - WriteFingerprintAssertion(script_writer, target_info, source_info) - self.assertEqual( - [('AssertFingerprintOrThumbprint', 'build-fingerprint-target', - 'build-thumbprint')], - script_writer.lines) - - def test_WriteFingerprintAssertion_with_target_oem_props(self): - target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, - self.TEST_OEM_DICTS) - source_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None) - - script_writer = test_utils.MockScriptWriter() - WriteFingerprintAssertion(script_writer, target_info, source_info) - self.assertEqual( - [('AssertFingerprintOrThumbprint', 'build-fingerprint-target', - 'build-thumbprint')], - script_writer.lines) - - def test_WriteFingerprintAssertion_with_both_oem_props(self): - target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS, - self.TEST_OEM_DICTS) - source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS) - source_info_dict['build.prop'].build_props['ro.build.thumbprint'] = ( - 'source-build-thumbprint') - source_info = common.BuildInfo(source_info_dict, self.TEST_OEM_DICTS) - - script_writer = test_utils.MockScriptWriter() - WriteFingerprintAssertion(script_writer, target_info, source_info) - self.assertEqual( - [('AssertSomeThumbprint', 'build-thumbprint', - 'source-build-thumbprint')], - script_writer.lines) - class TestPropertyFiles(PropertyFiles): """A class that extends PropertyFiles for testing purpose.""" @@ -598,41 +532,8 @@ class TestPropertyFiles(PropertyFiles): 'optional-entry2', ) +class PropertyFilesTest(PropertyFilesTestCase): -class PropertyFilesTest(test_utils.ReleaseToolsTestCase): - - def setUp(self): - common.OPTIONS.no_signing = False - - @staticmethod - def construct_zip_package(entries): - zip_file = common.MakeTempFile(suffix='.zip') - with zipfile.ZipFile(zip_file, 'w') as zip_fp: - for entry in entries: - zip_fp.writestr( - entry, - entry.replace('.', '-').upper(), - zipfile.ZIP_STORED) - return zip_file - - @staticmethod - def _parse_property_files_string(data): - result = {} - for token in data.split(','): - name, info = token.split(':', 1) - result[name] = info - return result - - def _verify_entries(self, input_file, tokens, entries): - for entry in entries: - offset, size = map(int, tokens[entry].split(':')) - with open(input_file, 'rb') as input_fp: - input_fp.seek(offset) - if entry == 'metadata': - expected = b'META-INF/COM/ANDROID/METADATA' - else: - expected = entry.replace('.', '-').upper().encode() - self.assertEqual(expected, input_fp.read(size)) @test_utils.SkipIfExternalToolsUnavailable() def test_Compute(self): @@ -753,7 +654,7 @@ class PropertyFilesTest(test_utils.ReleaseToolsTestCase): AssertionError, property_files.Verify, zip_fp, raw_metadata + 'x') -class StreamingPropertyFilesTest(PropertyFilesTest): +class StreamingPropertyFilesTest(PropertyFilesTestCase): """Additional validity checks specialized for StreamingPropertyFiles.""" def test_init(self): @@ -834,7 +735,7 @@ class StreamingPropertyFilesTest(PropertyFilesTest): AssertionError, property_files.Verify, zip_fp, raw_metadata + 'x') -class AbOtaPropertyFilesTest(PropertyFilesTest): +class AbOtaPropertyFilesTest(PropertyFilesTestCase): """Additional validity checks specialized for AbOtaPropertyFiles.""" # The size for payload and metadata signature size. @@ -1002,56 +903,6 @@ class AbOtaPropertyFilesTest(PropertyFilesTest): property_files.Verify(zip_fp, raw_metadata) -class NonAbOtaPropertyFilesTest(PropertyFilesTest): - """Additional validity checks specialized for NonAbOtaPropertyFiles.""" - - def test_init(self): - property_files = NonAbOtaPropertyFiles() - self.assertEqual('ota-property-files', property_files.name) - self.assertEqual((), property_files.required) - self.assertEqual((), property_files.optional) - - def test_Compute(self): - entries = () - zip_file = self.construct_zip_package(entries) - property_files = NonAbOtaPropertyFiles() - with zipfile.ZipFile(zip_file) as zip_fp: - property_files_string = property_files.Compute(zip_fp) - - tokens = self._parse_property_files_string(property_files_string) - self.assertEqual(1, len(tokens)) - self._verify_entries(zip_file, tokens, entries) - - def test_Finalize(self): - entries = [ - 'META-INF/com/android/metadata', - ] - zip_file = self.construct_zip_package(entries) - property_files = NonAbOtaPropertyFiles() - with zipfile.ZipFile(zip_file) as zip_fp: - raw_metadata = property_files.GetPropertyFilesString( - zip_fp, reserve_space=False) - property_files_string = property_files.Finalize(zip_fp, len(raw_metadata)) - tokens = self._parse_property_files_string(property_files_string) - - self.assertEqual(1, len(tokens)) - # 'META-INF/com/android/metadata' will be key'd as 'metadata'. - entries[0] = 'metadata' - self._verify_entries(zip_file, tokens, entries) - - def test_Verify(self): - entries = ( - 'META-INF/com/android/metadata', - ) - zip_file = self.construct_zip_package(entries) - property_files = NonAbOtaPropertyFiles() - with zipfile.ZipFile(zip_file) as zip_fp: - raw_metadata = property_files.GetPropertyFilesString( - zip_fp, reserve_space=False) - - property_files.Verify(zip_fp, raw_metadata) - - class PayloadSignerTest(test_utils.ReleaseToolsTestCase): SIGFILE = 'sigfile.bin' diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py index e99975765b..65092d84de 100755 --- a/tools/releasetools/test_utils.py +++ b/tools/releasetools/test_utils.py @@ -25,6 +25,7 @@ import os.path import struct import sys import unittest +import zipfile import common @@ -192,6 +193,41 @@ class ReleaseToolsTestCase(unittest.TestCase): def tearDown(self): common.Cleanup() +class PropertyFilesTestCase(ReleaseToolsTestCase): + + @staticmethod + def construct_zip_package(entries): + zip_file = common.MakeTempFile(suffix='.zip') + with zipfile.ZipFile(zip_file, 'w') as zip_fp: + for entry in entries: + zip_fp.writestr( + entry, + entry.replace('.', '-').upper(), + zipfile.ZIP_STORED) + return zip_file + + @staticmethod + def _parse_property_files_string(data): + result = {} + for token in data.split(','): + name, info = token.split(':', 1) + result[name] = info + return result + + def setUp(self): + common.OPTIONS.no_signing = False + + def _verify_entries(self, input_file, tokens, entries): + for entry in entries: + offset, size = map(int, tokens[entry].split(':')) + with open(input_file, 'rb') as input_fp: + input_fp.seek(offset) + if entry == 'metadata': + expected = b'META-INF/COM/ANDROID/METADATA' + else: + expected = entry.replace('.', '-').upper().encode() + self.assertEqual(expected, input_fp.read(size)) + if __name__ == '__main__': testsuite = unittest.TestLoader().discover( |