diff options
27 files changed, 592 insertions, 122 deletions
diff --git a/CleanSpec.mk b/CleanSpec.mk index 1c02da69f6..757c513a4c 100644 --- a/CleanSpec.mk +++ b/CleanSpec.mk @@ -617,6 +617,9 @@ $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/security/avb/) $(call add-clean-step, rm -rf $(PRODUCT_OUT)/super.img) $(call add-clean-step, find $(PRODUCT_OUT) -type f -name "generated_*_image_info.txt" -print0 | xargs -0 rm -f) + +# Clean up libicuuc.so and libicui18n.so +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib*/libicu*) # ************************************************ # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST # ************************************************ diff --git a/core/board_config.mk b/core/board_config.mk index 9ea3509cbb..d32e30134d 100644 --- a/core/board_config.mk +++ b/core/board_config.mk @@ -44,6 +44,7 @@ _board_strip_readonly_list := \ TARGET_BOARD_PLATFORM \ TARGET_BOARD_PLATFORM_GPU \ TARGET_BOOTLOADER_BOARD_NAME \ + TARGET_FS_CONFIG_GEN \ TARGET_NO_BOOTLOADER \ TARGET_NO_KERNEL \ TARGET_NO_RECOVERY \ diff --git a/core/main.mk b/core/main.mk index 3ff2fcdfcc..590bfcc7ff 100644 --- a/core/main.mk +++ b/core/main.mk @@ -218,8 +218,7 @@ include build/make/core/pdk_config.mk # # ----------------------------------------------------------------- -# Enable dynamic linker and hidden API developer warnings for -# userdebug, eng and non-REL builds +# Enable dynamic linker warnings for userdebug, eng and non-REL builds ifneq ($(TARGET_BUILD_VARIANT),user) ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 else @@ -257,6 +256,11 @@ ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)) ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER) endif +# Enable core platform API violation warnings on userdebug and eng builds. +ifneq ($(TARGET_BUILD_VARIANT),user) +ADDITIONAL_BUILD_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn +endif + # Sets the default value of ro.postinstall.fstab.prefix to /system. # Device board config should override the value to /product when needed by: # diff --git a/core/soong_config.mk b/core/soong_config.mk index 0275de6ec9..c01fc988dd 100644 --- a/core/soong_config.mk +++ b/core/soong_config.mk @@ -165,6 +165,8 @@ $(call add_json_list, ProductHiddenAPIStubs, $(PRODUCT_HIDDENAPI_STU $(call add_json_list, ProductHiddenAPIStubsSystem, $(PRODUCT_HIDDENAPI_STUBS_SYSTEM)) $(call add_json_list, ProductHiddenAPIStubsTest, $(PRODUCT_HIDDENAPI_STUBS_TEST)) +$(call add_json_str, TargetFSConfigGen, $(TARGET_FS_CONFIG_GEN)) + $(call add_json_map, VendorVars) $(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\ $(call add_json_map, $(namespace))\ diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk index 617b3c58f0..c871d7eab3 100644 --- a/target/board/BoardConfigEmuCommon.mk +++ b/target/board/BoardConfigEmuCommon.mk @@ -20,6 +20,9 @@ BUILD_QEMU_IMAGES := true # the GLES renderer disables itself if host GL acceleration isn't available. USE_OPENGL_RENDERER := true +# Emulator doesn't support sparse image format. +TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true + # ~140 MB vendor image. Please adjust system image / vendor image sizes # when finalizing them. The partition size needs to be a multiple of image # block size: 4096. diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk index eee65f42dd..d4025c3fcd 100644 --- a/target/board/BoardConfigGsiCommon.mk +++ b/target/board/BoardConfigGsiCommon.mk @@ -11,11 +11,6 @@ TARGET_NO_KERNEL := true # This flag is set by mainline but isn't desired for GSI. BOARD_USES_SYSTEM_OTHER_ODEX := -# GSIs are historically released in sparse format. -# Some vendors' bootloaders don't work properly with raw format images. So -# we explicit specify this need below (even though it's the current default). -TARGET_USERIMAGES_SPARSE_EXT_DISABLED := false - # system.img is always ext4 with sparse option # GSI also includes make_f2fs to support userdata parition in f2fs # for some devices diff --git a/target/product/base_system.mk b/target/product/base_system.mk index 87076a94bf..46bf350f8a 100644 --- a/target/product/base_system.mk +++ b/target/product/base_system.mk @@ -356,6 +356,7 @@ PRODUCT_PACKAGES_DEBUG := \ showmap \ sqlite3 \ ss \ + start_with_lockagent \ strace \ su \ sanitizer-status \ diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk index ecd5691d7a..d75809c44e 100644 --- a/target/product/mainline_system.mk +++ b/target/product/mainline_system.mk @@ -76,6 +76,7 @@ PRODUCT_PACKAGES += \ android.hardware.secure_element@1.0 \ android.hardware.wifi@1.0 \ libaudio-resampler \ + libdrm \ liblogwrap \ liblz4 \ libminui \ diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk index dbb4b8289e..e08b56be53 100644 --- a/target/product/runtime_libart.mk +++ b/target/product/runtime_libart.mk @@ -33,11 +33,6 @@ PRODUCT_PACKAGES += \ PRODUCT_PACKAGES += \ ext \ -# Libcore ICU. TODO(b/124218500): Remove them explicitly when the bug is resolved. -PRODUCT_PACKAGES += \ - libicui18n \ - libicuuc \ - # Android Runtime APEX module. PRODUCT_PACKAGES += com.android.runtime PRODUCT_HOST_PACKAGES += com.android.runtime diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp index 19a4624d92..d6fae2deaf 100644 --- a/tools/fs_config/Android.bp +++ b/tools/fs_config/Android.bp @@ -12,6 +12,19 @@ // See the License for the specific language governing permissions and // limitations under the License. +bootstrap_go_package { + name: "soong-fs_config", + pkgPath: "android/soong/fs_config", + deps: [ + "soong-android", + "soong-genrule", + ], + srcs: [ + "fs_config.go" + ], + pluginFor: ["soong_build"], +} + cc_binary_host { name: "fs_config", srcs: ["fs_config.c"], @@ -21,3 +34,64 @@ cc_binary_host { ], cflags: ["-Werror"], } + +target_fs_config_gen_filegroup { + name: "target_fs_config_gen", +} + +genrule { + name: "oemaids_header_gen", + tool_files: ["fs_config_generator.py"], + cmd: "$(location fs_config_generator.py) oemaid --aid-header=$(location :android_filesystem_config_header) $(location :target_fs_config_gen) >$(out)", + srcs: [ + ":target_fs_config_gen", + ":android_filesystem_config_header", + ], + out: ["generated_oem_aid.h"], +} + +cc_library_headers { + name: "oemaids_headers", + generated_headers: ["oemaids_header_gen"], + export_generated_headers: ["oemaids_header_gen"], +} + +// Generate the vendor/etc/passwd text file for the target +// This file may be empty if no AIDs are defined in +// TARGET_FS_CONFIG_GEN files. +genrule { + name: "passwd_gen", + tool_files: ["fs_config_generator.py"], + cmd: "$(location fs_config_generator.py) passwd --required-prefix=vendor_ --aid-header=$(location :android_filesystem_config_header) $(location :target_fs_config_gen) >$(out)", + srcs: [ + ":target_fs_config_gen", + ":android_filesystem_config_header", + ], + out: ["passwd"], +} + +prebuilt_etc { + name: "passwd", + vendor: true, + src: ":passwd_gen", +} + +// Generate the vendor/etc/group text file for the target +// This file may be empty if no AIDs are defined in +// TARGET_FS_CONFIG_GEN files. +genrule { + name: "group_gen", + tool_files: ["fs_config_generator.py"], + cmd: "$(location fs_config_generator.py) group --required-prefix=vendor_ --aid-header=$(location :android_filesystem_config_header) $(location :target_fs_config_gen) >$(out)", + srcs: [ + ":target_fs_config_gen", + ":android_filesystem_config_header", + ], + out: ["group"], +} + +prebuilt_etc { + name: "group", + vendor: true, + src: ":group_gen", +} diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk index 0e0b1dafba..96db0f39fb 100644 --- a/tools/fs_config/Android.mk +++ b/tools/fs_config/Android.mk @@ -382,67 +382,6 @@ $(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_G $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) endif -################################## -# Build the oemaid header library when fs config files are present. -# Intentionally break build if you require generated AIDs -# header file, but are not using any fs config files. -ifneq ($(TARGET_FS_CONFIG_GEN),) -include $(CLEAR_VARS) -LOCAL_MODULE := oemaids_headers - -LOCAL_MODULE_CLASS := ETC - -# Generate the "generated_oem_aid.h" file -oem := $(local-generated-sources-dir)/generated_oem_aid.h -$(oem): PRIVATE_LOCAL_PATH := $(LOCAL_PATH) -$(oem): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(oem): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config) -$(oem): PRIVATE_CUSTOM_TOOL = $(PRIVATE_LOCAL_PATH)/fs_config_generator.py oemaid --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(PRIVATE_TARGET_FS_CONFIG_GEN) > $@ -$(oem): $(TARGET_FS_CONFIG_GEN) $(LOCAL_PATH)/fs_config_generator.py - $(transform-generated-source) - -LOCAL_EXPORT_C_INCLUDE_DIRS := $(dir $(oem)) -LOCAL_EXPORT_C_INCLUDE_DEPS := $(oem) - -include $(BUILD_HEADER_LIBRARY) -endif - -################################## -# Generate the vendor/etc/passwd text file for the target -# This file may be empty if no AIDs are defined in -# TARGET_FS_CONFIG_GEN files. -include $(CLEAR_VARS) - -LOCAL_MODULE := passwd -LOCAL_MODULE_CLASS := ETC -LOCAL_VENDOR_MODULE := true - -include $(BUILD_SYSTEM)/base_rules.mk - -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) - @mkdir -p $(dir $@) - $(hide) $< passwd --required-prefix=vendor_ --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) > $@ - -################################## -# Generate the vendor/etc/group text file for the target -# This file may be empty if no AIDs are defined in -# TARGET_FS_CONFIG_GEN files. -include $(CLEAR_VARS) - -LOCAL_MODULE := group -LOCAL_MODULE_CLASS := ETC -LOCAL_VENDOR_MODULE := true - -include $(BUILD_SYSTEM)/base_rules.mk - -$(LOCAL_BUILT_MODULE): PRIVATE_TARGET_FS_CONFIG_GEN := $(TARGET_FS_CONFIG_GEN) -$(LOCAL_BUILT_MODULE): PRIVATE_ANDROID_FS_HDR := $(system_android_filesystem_config) -$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/fs_config_generator.py $(TARGET_FS_CONFIG_GEN) $(system_android_filesystem_config) - @mkdir -p $(dir $@) - $(hide) $< group --required-prefix=vendor_ --aid-header=$(PRIVATE_ANDROID_FS_HDR) $(or $(PRIVATE_TARGET_FS_CONFIG_GEN),/dev/null) > $@ - system_android_filesystem_config := system_capability_header := fs_config_generate_extra_partition_list := diff --git a/tools/fs_config/fs_config.go b/tools/fs_config/fs_config.go new file mode 100644 index 0000000000..869cb3de99 --- /dev/null +++ b/tools/fs_config/fs_config.go @@ -0,0 +1,60 @@ +// Copyright (C) 2019 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package fs_config + +import ( + "android/soong/android" +) + +var pctx = android.NewPackageContext("android/soong/fs_config") + +func init() { + android.RegisterModuleType("target_fs_config_gen_filegroup", targetFSConfigGenFactory) +} + +// target_fs_config_gen_filegroup is used to expose the file pointed to by TARGET_FS_CONFIG_GEN to +// genrules in Soong. If TARGET_FS_CONFIG_GEN is empty, it will export an empty file instead. +func targetFSConfigGenFactory() android.Module { + module := &targetFSConfigGen{} + android.InitAndroidModule(module) + return module +} + +var _ android.SourceFileProducer = (*targetFSConfigGen)(nil) + +type targetFSConfigGen struct { + android.ModuleBase + path android.Path +} + +func (targetFSConfigGen) DepsMutator(ctx android.BottomUpMutatorContext) {} + +func (t *targetFSConfigGen) GenerateAndroidBuildActions(ctx android.ModuleContext) { + if ret := ctx.DeviceConfig().TargetFSConfigGen(); ret != nil && *ret != "" { + t.path = android.PathForSource(ctx, *ret) + } else { + path := android.PathForModuleGen(ctx, "empty") + t.path = path + + rule := android.NewRuleBuilder() + rule.Command().Text("rm -rf").Output(path) + rule.Command().Text("touch").Output(path) + rule.Build(pctx, ctx, "fs_config_empty", "create empty file") + } +} + +func (t *targetFSConfigGen) Srcs() android.Paths { + return android.Paths{t.path} +} diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp new file mode 100644 index 0000000000..b5ae009511 --- /dev/null +++ b/tools/releasetools/Android.bp @@ -0,0 +1,72 @@ +// Copyright (C) 2019 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +python_defaults { + name: "releasetools_test_defaults", + version: { + py2: { + enabled: true, + embedded_launcher: false, + }, + py3: { + enabled: false, + }, + }, +} + +python_library_host { + name: "releasetools_lib", + defaults: ["releasetools_test_defaults"], + srcs: [ + "add_img_to_target_files.py", + "apex_utils.py", + "blockimgdiff.py", + "build_image.py", + "build_super_image.py", + "check_ota_package_signature.py", + "check_target_files_signatures.py", + "common.py", + "edify_generator.py", + "img_from_target_files.py", + "make_recovery_patch.py", + "merge_target_files.py", + "ota_from_target_files.py", + "ota_package_parser.py", + "rangelib.py", + "sign_target_files_apks.py", + "sparse_img.py", + "target_files_diff.py", + "validate_target_files.py", + "verity_utils.py", + ], +} + +python_test_host { + name: "releasetools_test", + defaults: ["releasetools_test_defaults"], + main: "test_utils.py", + srcs: [ + "test_*.py", + ], + libs: [ + "releasetools_lib", + ], + data: [ + "testdata/*", + ], + required: [ + "otatools", + ], + test_suites: ["general-tests"], +} diff --git a/tools/releasetools/TEST_MAPPING b/tools/releasetools/TEST_MAPPING new file mode 100644 index 0000000000..77cef07d84 --- /dev/null +++ b/tools/releasetools/TEST_MAPPING @@ -0,0 +1,8 @@ +{ + "presubmit": [ + { + "name": "releasetools_test", + "host": true + } + ] +} diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py index b7c33f54be..b23eef112c 100644 --- a/tools/releasetools/blockimgdiff.py +++ b/tools/releasetools/blockimgdiff.py @@ -187,6 +187,83 @@ class DataImage(Image): fd.write(data) +class FileImage(Image): + """An image wrapped around a raw image file.""" + + def __init__(self, path, hashtree_info_generator=None): + self.path = path + self.blocksize = 4096 + self._file_size = os.path.getsize(self.path) + self._file = open(self.path, 'r') + + if self._file_size % self.blocksize != 0: + raise ValueError("Size of file %s must be multiple of %d bytes, but is %d" + % self.path, self.blocksize, self._file_size) + + self.total_blocks = self._file_size / self.blocksize + self.care_map = RangeSet(data=(0, self.total_blocks)) + self.clobbered_blocks = RangeSet() + self.extended = RangeSet() + + self.generator_lock = threading.Lock() + + self.hashtree_info = None + if hashtree_info_generator: + self.hashtree_info = hashtree_info_generator.Generate(self) + + zero_blocks = [] + nonzero_blocks = [] + reference = '\0' * self.blocksize + + for i in range(self.total_blocks): + d = self._file.read(self.blocksize) + if d == reference: + zero_blocks.append(i) + zero_blocks.append(i+1) + else: + nonzero_blocks.append(i) + nonzero_blocks.append(i+1) + + assert zero_blocks or nonzero_blocks + + self.file_map = {} + if zero_blocks: + self.file_map["__ZERO"] = RangeSet(data=zero_blocks) + if nonzero_blocks: + self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks) + if self.hashtree_info: + self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range + + def __del__(self): + self._file.close() + + def _GetRangeData(self, ranges): + # Use a lock to protect the generator so that we will not run two + # instances of this generator on the same object simultaneously. + with self.generator_lock: + for s, e in ranges: + self._file.seek(s * self.blocksize) + for _ in range(s, e): + yield self._file.read(self.blocksize) + + def RangeSha1(self, ranges): + h = sha1() + for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable + h.update(data) + return h.hexdigest() + + def ReadRangeSet(self, ranges): + return list(self._GetRangeData(ranges)) + + def TotalSha1(self, include_clobbered_blocks=False): + assert not self.clobbered_blocks + return self.RangeSha1(self.care_map) + + def WriteRangeDataToFd(self, ranges, fd): + for data in self._GetRangeData(ranges): # pylint: disable=not-an-iterable + fd.write(data) + + class Transfer(object): def __init__(self, tgt_name, src_name, tgt_ranges, src_ranges, tgt_sha1, src_sha1, style, by_id): diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py index 171c794566..3e2a113e06 100644 --- a/tools/releasetools/common.py +++ b/tools/releasetools/common.py @@ -824,6 +824,77 @@ def UnzipTemp(filename, pattern=None): return tmp +def GetUserImage(which, tmpdir, input_zip, + info_dict=None, + allow_shared_blocks=None, + hashtree_info_generator=None, + reset_file_map=False): + """Returns an Image object suitable for passing to BlockImageDiff. + + This function loads the specified image from the given path. If the specified + image is sparse, it also performs additional processing for OTA purpose. For + example, it always adds block 0 to clobbered blocks list. It also detects + files that cannot be reconstructed from the block list, for whom we should + avoid applying imgdiff. + + Args: + which: The partition name. + tmpdir: The directory that contains the prebuilt image and block map file. + input_zip: The target-files ZIP archive. + info_dict: The dict to be looked up for relevant info. + allow_shared_blocks: If image is sparse, whether having shared blocks is + allowed. If none, it is looked up from info_dict. + hashtree_info_generator: If present and image is sparse, generates the + hashtree_info for this sparse image. + reset_file_map: If true and image is sparse, reset file map before returning + the image. + Returns: + A Image object. If it is a sparse image and reset_file_map is False, the + image will have file_map info loaded. + """ + if info_dict == None: + info_dict = LoadInfoDict(input_zip) + + is_sparse = info_dict.get("extfs_sparse_flag") + + # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain + # shared blocks (i.e. some blocks will show up in multiple files' block + # list). We can only allocate such shared blocks to the first "owner", and + # disable imgdiff for all later occurrences. + if allow_shared_blocks is None: + allow_shared_blocks = info_dict.get("ext4_share_dup_blocks") == "true" + + if is_sparse: + img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks, + hashtree_info_generator) + if reset_file_map: + img.ResetFileMap() + return img + else: + return GetNonSparseImage(which, tmpdir, hashtree_info_generator) + + +def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None): + """Returns a Image object suitable for passing to BlockImageDiff. + + This function loads the specified non-sparse image from the given path. + + Args: + which: The partition name. + tmpdir: The directory that contains the prebuilt image and block map file. + Returns: + A Image object. + """ + path = os.path.join(tmpdir, "IMAGES", which + ".img") + mappath = os.path.join(tmpdir, "IMAGES", which + ".map") + + # The image and map files must have been created prior to calling + # ota_from_target_files.py (since LMP). + assert os.path.exists(path) and os.path.exists(mappath) + + return blockimgdiff.FileImage(path, hashtree_info_generator= + hashtree_info_generator) + def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks, hashtree_info_generator=None): """Returns a SparseImage object suitable for passing to BlockImageDiff. @@ -2068,7 +2139,7 @@ class BlockDifference(object): DataImage = blockimgdiff.DataImage - +EmptyImage = blockimgdiff.EmptyImage # map recovery.fstab's fs_types to mount/format "partition types" PARTITION_TYPES = { diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index ad40bd4ac4..dd3e19030b 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -917,17 +917,14 @@ else if get_stage("%(bcb_dev)s") == "3/3" then script.ShowProgress(system_progress, 0) - # See the notes in WriteBlockIncrementalOTAPackage(). - allow_shared_blocks = target_info.get('ext4_share_dup_blocks') == "true" - def GetBlockDifference(partition): # Full OTA is done as an "incremental" against an empty source image. This # has the effect of writing new data from the package to the entire # partition, but lets us reuse the updater code that writes incrementals to # do it. - tgt = common.GetSparseImage(partition, OPTIONS.input_tmp, input_zip, - allow_shared_blocks) - tgt.ResetFileMap() + tgt = common.GetUserImage(partition, OPTIONS.input_tmp, input_zip, + info_dict=target_info, + reset_file_map=True) diff = common.BlockDifference(partition, tgt, src=None) return diff @@ -1512,8 +1509,10 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file): device_specific = common.DeviceSpecificParams( source_zip=source_zip, source_version=source_api_version, + source_tmp=OPTIONS.source_tmp, target_zip=target_zip, target_version=target_api_version, + target_tmp=OPTIONS.target_tmp, output_zip=output_zip, script=script, metadata=metadata, @@ -1529,20 +1528,20 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file): target_recovery = common.GetBootableImage( "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY") - # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain - # shared blocks (i.e. some blocks will show up in multiple files' block - # list). We can only allocate such shared blocks to the first "owner", and - # disable imgdiff for all later occurrences. + # See notes in common.GetUserImage() allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or target_info.get('ext4_share_dup_blocks') == "true") - system_src = common.GetSparseImage("system", OPTIONS.source_tmp, source_zip, - allow_shared_blocks) + system_src = common.GetUserImage("system", OPTIONS.source_tmp, source_zip, + info_dict=source_info, + allow_shared_blocks=allow_shared_blocks) hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator( "system", 4096, target_info) - system_tgt = common.GetSparseImage("system", OPTIONS.target_tmp, target_zip, - allow_shared_blocks, - hashtree_info_generator) + system_tgt = common.GetUserImage("system", OPTIONS.target_tmp, target_zip, + info_dict=target_info, + allow_shared_blocks=allow_shared_blocks, + hashtree_info_generator= + hashtree_info_generator) blockimgdiff_version = max( int(i) for i in target_info.get("blockimgdiff_versions", "1").split(",")) @@ -1567,13 +1566,16 @@ def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file): if HasVendorPartition(target_zip): if not HasVendorPartition(source_zip): raise RuntimeError("can't generate incremental that adds /vendor") - vendor_src = common.GetSparseImage("vendor", OPTIONS.source_tmp, source_zip, - allow_shared_blocks) + vendor_src = common.GetUserImage("vendor", OPTIONS.source_tmp, source_zip, + info_dict=source_info, + allow_shared_blocks=allow_shared_blocks) hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator( "vendor", 4096, target_info) - vendor_tgt = common.GetSparseImage( - "vendor", OPTIONS.target_tmp, target_zip, allow_shared_blocks, - hashtree_info_generator) + vendor_tgt = common.GetUserImage( + "vendor", OPTIONS.target_tmp, target_zip, + info_dict=target_info, + allow_shared_blocks=allow_shared_blocks, + hashtree_info_generator=hashtree_info_generator) # Check first block of vendor partition for remount R/W only if # disk type is ext4 diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py index 482f86c7cd..013ade65b6 100644 --- a/tools/releasetools/test_add_img_to_target_files.py +++ b/tools/releasetools/test_add_img_to_target_files.py @@ -34,18 +34,6 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): def setUp(self): OPTIONS.input_tmp = common.MakeTempDir() - def _verifyCareMap(self, expected, file_name): - """Parses the care_map.pb; and checks the content in plain text.""" - text_file = common.MakeTempFile(prefix="caremap-", suffix=".txt") - - # Calls an external binary to convert the proto message. - cmd = ["care_map_generator", "--parse_proto", file_name, text_file] - common.RunAndCheckOutput(cmd) - - with open(text_file, 'r') as verify_fp: - plain_text = verify_fp.read() - self.assertEqual('\n'.join(expected), plain_text) - @staticmethod def _create_images(images, prefix): """Creates images under OPTIONS.input_tmp/prefix.""" @@ -164,6 +152,19 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): } return image_paths + def _verifyCareMap(self, expected, file_name): + """Parses the care_map.pb; and checks the content in plain text.""" + text_file = common.MakeTempFile(prefix="caremap-", suffix=".txt") + + # Calls an external binary to convert the proto message. + cmd = ["care_map_generator", "--parse_proto", file_name, text_file] + common.RunAndCheckOutput(cmd) + + with open(text_file) as verify_fp: + plain_text = verify_fp.read() + self.assertEqual('\n'.join(expected), plain_text) + + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta(self): image_paths = self._test_AddCareMapForAbOta() @@ -179,6 +180,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): self._verifyCareMap(expected, care_map_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta_withNonCareMapPartitions(self): """Partitions without care_map should be ignored.""" image_paths = self._test_AddCareMapForAbOta() @@ -196,6 +198,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): self._verifyCareMap(expected, care_map_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta_withAvb(self): """Tests the case for device using AVB.""" image_paths = self._test_AddCareMapForAbOta() @@ -223,6 +226,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): self._verifyCareMap(expected, care_map_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta_noFingerprint(self): """Tests the case for partitions without fingerprint.""" image_paths = self._test_AddCareMapForAbOta() @@ -240,6 +244,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): self._verifyCareMap(expected, care_map_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta_withThumbprint(self): """Tests the case for partitions with thumbprint.""" image_paths = self._test_AddCareMapForAbOta() @@ -282,6 +287,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): self.assertRaises(AssertionError, AddCareMapForAbOta, None, ['system', 'vendor'], image_paths) + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta_zipOutput(self): """Tests the case with ZIP output.""" image_paths = self._test_AddCareMapForAbOta() @@ -304,6 +310,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): "google/sailfish/678:user/dev-keys"] self._verifyCareMap(expected, os.path.join(temp_dir, care_map_name)) + @test_utils.SkipIfExternalToolsUnavailable() def test_AddCareMapForAbOta_zipOutput_careMapEntryExists(self): """Tests the case with ZIP output which already has care_map entry.""" image_paths = self._test_AddCareMapForAbOta() @@ -338,6 +345,7 @@ class AddImagesToTargetFilesTest(test_utils.ReleaseToolsTestCase): self.assertEqual( ['--include_descriptors_from_image', '/path/to/system.img'], cmd) + @test_utils.SkipIfExternalToolsUnavailable() def test_AppendVBMetaArgsForPartition_vendorAsChainedPartition(self): testdata_dir = test_utils.get_testdata_dir() pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem') diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py index 2f8ee49823..c7d5807302 100644 --- a/tools/releasetools/test_apex_utils.py +++ b/tools/releasetools/test_apex_utils.py @@ -39,6 +39,7 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): payload_fp.write(os.urandom(8192)) return payload_file + @test_utils.SkipIfExternalToolsUnavailable() def test_ParseApexPayloadInfo(self): payload_file = self._GetTestPayload() apex_utils.SignApexPayload( @@ -48,16 +49,20 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): self.assertEqual(self.SALT, payload_info['Salt']) self.assertEqual('testkey', payload_info['apex.key']) + @test_utils.SkipIfExternalToolsUnavailable() def test_SignApexPayload(self): payload_file = self._GetTestPayload() apex_utils.SignApexPayload( payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT) apex_utils.VerifyApexPayload(payload_file, self.payload_key) + @test_utils.SkipIfExternalToolsUnavailable() def test_SignApexPayload_withSignerHelper(self): payload_file = self._GetTestPayload() + signing_helper = os.path.join(self.testdata_dir, 'signing_helper.sh') + os.chmod(signing_helper, 0o700) payload_signer_args = '--signing_helper_with_files {}'.format( - os.path.join(self.testdata_dir, 'signing_helper.sh')) + signing_helper) apex_utils.SignApexPayload( payload_file, self.payload_key, @@ -65,6 +70,7 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): payload_signer_args) apex_utils.VerifyApexPayload(payload_file, self.payload_key) + @test_utils.SkipIfExternalToolsUnavailable() def test_SignApexPayload_invalidKey(self): self.assertRaises( apex_utils.ApexSigningError, @@ -75,6 +81,7 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): 'SHA256_RSA2048', self.SALT) + @test_utils.SkipIfExternalToolsUnavailable() def test_VerifyApexPayload_wrongKey(self): payload_file = self._GetTestPayload() apex_utils.SignApexPayload( diff --git a/tools/releasetools/test_blockimgdiff.py b/tools/releasetools/test_blockimgdiff.py index 1aabaa2162..b6d47d4a0b 100644 --- a/tools/releasetools/test_blockimgdiff.py +++ b/tools/releasetools/test_blockimgdiff.py @@ -14,9 +14,13 @@ # limitations under the License. # +import os +from hashlib import sha1 + import common from blockimgdiff import ( - BlockImageDiff, DataImage, EmptyImage, HeapItem, ImgdiffStats, Transfer) + BlockImageDiff, DataImage, EmptyImage, FileImage, HeapItem, ImgdiffStats, + Transfer) from rangelib import RangeSet from test_utils import ReleaseToolsTestCase @@ -264,7 +268,42 @@ class ImgdiffStatsTest(ReleaseToolsTestCase): class DataImageTest(ReleaseToolsTestCase): - def test_read_range_set(self): - data = "file" + ('\0' * 4092) - image = DataImage(data) - self.assertEqual(data, "".join(image.ReadRangeSet(image.care_map))) + def test_read_range_set(self): + data = "file" + ('\0' * 4092) + image = DataImage(data) + self.assertEqual(data, "".join(image.ReadRangeSet(image.care_map))) + + +class FileImageTest(ReleaseToolsTestCase): + def setUp(self): + self.file_path = common.MakeTempFile() + self.data = os.urandom(4096 * 4) + with open(self.file_path, 'w') as f: + f.write(self.data) + self.file = FileImage(self.file_path) + + def test_totalsha1(self): + self.assertEqual(sha1(self.data).hexdigest(), self.file.TotalSha1()) + + def test_ranges(self): + blocksize = self.file.blocksize + for s in range(4): + for e in range(s, 4): + expected_data = self.data[s * blocksize : e * blocksize] + + rs = RangeSet([s, e]) + data = "".join(self.file.ReadRangeSet(rs)) + self.assertEqual(expected_data, data) + + sha1sum = self.file.RangeSha1(rs) + self.assertEqual(sha1(expected_data).hexdigest(), sha1sum) + + tmpfile = common.MakeTempFile() + with open(tmpfile, 'w') as f: + self.file.WriteRangeDataToFd(rs, f) + with open(tmpfile, 'r') as f: + self.assertEqual(expected_data, f.read()) + + def test_read_all(self): + data = "".join(self.file.ReadRangeSet(self.file.care_map)) + self.assertEqual(self.data, data) diff --git a/tools/releasetools/test_build_image.py b/tools/releasetools/test_build_image.py index 1cebd0c095..b24805f319 100644 --- a/tools/releasetools/test_build_image.py +++ b/tools/releasetools/test_build_image.py @@ -18,12 +18,13 @@ import filecmp import os.path import common +import test_utils from build_image import ( - BuildImageError, CheckHeadroom, GetFilesystemCharacteristics, SetUpInDirAndFsConfig) -from test_utils import ReleaseToolsTestCase + BuildImageError, CheckHeadroom, GetFilesystemCharacteristics, + SetUpInDirAndFsConfig) -class BuildImageTest(ReleaseToolsTestCase): +class BuildImageTest(test_utils.ReleaseToolsTestCase): # Available: 1000 blocks. EXT4FS_OUTPUT = ( @@ -48,6 +49,7 @@ class BuildImageTest(ReleaseToolsTestCase): self.assertRaises( BuildImageError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict) + @test_utils.SkipIfExternalToolsUnavailable() def test_CheckHeadroom_WrongFsType(self): prop_dict = { 'fs_type' : 'f2fs', @@ -72,6 +74,7 @@ class BuildImageTest(ReleaseToolsTestCase): self.assertRaises( AssertionError, CheckHeadroom, self.EXT4FS_OUTPUT, prop_dict) + @test_utils.SkipIfExternalToolsUnavailable() def test_CheckHeadroom_WithMke2fsOutput(self): """Tests the result parsing from actual call to mke2fs.""" input_dir = common.MakeTempDir() @@ -177,13 +180,14 @@ class BuildImageTest(ReleaseToolsTestCase): self.assertIn('fs-config-root\n', fs_config_data) self.assertEqual('/', prop_dict['mount_point']) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetFilesystemCharacteristics(self): input_dir = common.MakeTempDir() output_image = common.MakeTempFile(suffix='.img') command = ['mkuserimg_mke2fs', input_dir, output_image, 'ext4', '/system', '409600', '-j', '0'] proc = common.Run(command) - ext4fs_output, _ = proc.communicate() + proc.communicate() self.assertEqual(0, proc.returncode) output_file = common.MakeTempFile(suffix='.img') diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py index 89add4093c..9b76734fab 100644 --- a/tools/releasetools/test_common.py +++ b/tools/releasetools/test_common.py @@ -315,6 +315,7 @@ class CommonZipTest(test_utils.ReleaseToolsTestCase): finally: os.remove(zip_file_name) + @test_utils.SkipIfExternalToolsUnavailable() def test_ZipDelete(self): zip_file = tempfile.NamedTemporaryFile(delete=False, suffix='.zip') output_zip = zipfile.ZipFile(zip_file.name, 'w', @@ -376,6 +377,7 @@ class CommonZipTest(test_utils.ReleaseToolsTestCase): common.ZipClose(output_zip) return zip_file + @test_utils.SkipIfExternalToolsUnavailable() def test_UnzipTemp(self): zip_file = self._test_UnzipTemp_createZipFile() unzipped_dir = common.UnzipTemp(zip_file) @@ -385,6 +387,7 @@ class CommonZipTest(test_utils.ReleaseToolsTestCase): self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) self.assertTrue(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) + @test_utils.SkipIfExternalToolsUnavailable() def test_UnzipTemp_withPatterns(self): zip_file = self._test_UnzipTemp_createZipFile() @@ -425,6 +428,7 @@ class CommonZipTest(test_utils.ReleaseToolsTestCase): self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Bar4'))) self.assertFalse(os.path.exists(os.path.join(unzipped_dir, 'Dir5/Baz5'))) + @test_utils.SkipIfExternalToolsUnavailable() def test_UnzipTemp_withPartiallyMatchingPatterns(self): zip_file = self._test_UnzipTemp_createZipFile() unzipped_dir = common.UnzipTemp(zip_file, ['Test*', 'Nonexistent*']) @@ -575,6 +579,7 @@ class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase): wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8') self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input) + @test_utils.SkipIfExternalToolsUnavailable() def test_ExtractAvbPublicKey(self): privkey = os.path.join(self.testdata_dir, 'testkey.key') pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') @@ -594,18 +599,22 @@ class CommonApkUtilsTest(test_utils.ReleaseToolsTestCase): actual = common.ParseCertificate(cert_fp.read()) self.assertEqual(expected, actual) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetMinSdkVersion(self): test_app = os.path.join(self.testdata_dir, 'TestApp.apk') self.assertEqual('24', common.GetMinSdkVersion(test_app)) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetMinSdkVersion_invalidInput(self): self.assertRaises( common.ExternalError, common.GetMinSdkVersion, 'does-not-exist.apk') + @test_utils.SkipIfExternalToolsUnavailable() def test_GetMinSdkVersionInt(self): test_app = os.path.join(self.testdata_dir, 'TestApp.apk') self.assertEqual(24, common.GetMinSdkVersionInt(test_app, {})) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetMinSdkVersionInt_invalidInput(self): self.assertRaises( common.ExternalError, common.GetMinSdkVersionInt, 'does-not-exist.apk', @@ -617,6 +626,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): def setUp(self): self.testdata_dir = test_utils.get_testdata_dir() + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_emptyBlockMapFile(self): target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') with zipfile.ZipFile(target_files, 'w') as target_files_zip: @@ -649,6 +659,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): AssertionError, common.GetSparseImage, 'unknown', self.testdata_dir, None, False) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_missingBlockMapFile(self): target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') with zipfile.ZipFile(target_files, 'w') as target_files_zip: @@ -667,6 +678,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, False) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_sharedBlocks_notAllowed(self): """Tests the case of having overlapping blocks but disallowed.""" target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') @@ -689,6 +701,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, False) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_sharedBlocks_allowed(self): """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true.""" target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') @@ -731,6 +744,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra) self.assertFalse(sparse_image.file_map['/system/file1'].extra) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_incompleteRanges(self): """Tests the case of ext4 images with holes.""" target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') @@ -754,6 +768,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertFalse(sparse_image.file_map['/system/file1'].extra) self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete']) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_systemRootImage_filenameWithExtraLeadingSlash(self): target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') with zipfile.ZipFile(target_files, 'w') as target_files_zip: @@ -781,6 +796,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertTrue( sparse_image.file_map['/system/app/file3'].extra['incomplete']) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_systemRootImage_nonSystemFiles(self): target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') with zipfile.ZipFile(target_files, 'w') as target_files_zip: @@ -803,6 +819,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertFalse(sparse_image.file_map['//system/file1'].extra) self.assertTrue(sparse_image.file_map['//init.rc'].extra['incomplete']) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetSparseImage_fileNotFound(self): target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip') with zipfile.ZipFile(target_files, 'w') as target_files_zip: @@ -822,6 +839,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): AssertionError, common.GetSparseImage, 'system', tempdir, input_zip, False) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetAvbChainedPartitionArg(self): pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem') info_dict = { @@ -835,6 +853,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertEqual('2', args[1]) self.assertTrue(os.path.exists(args[2])) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetAvbChainedPartitionArg_withPrivateKey(self): key = os.path.join(self.testdata_dir, 'testkey.key') info_dict = { @@ -848,6 +867,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertEqual('2', args[1]) self.assertTrue(os.path.exists(args[2])) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetAvbChainedPartitionArg_withSpecifiedKey(self): info_dict = { 'avb_avbtool': 'avbtool', @@ -862,6 +882,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertEqual('2', args[1]) self.assertTrue(os.path.exists(args[2])) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetAvbChainedPartitionArg_invalidKey(self): pubkey = os.path.join(self.testdata_dir, 'testkey_with_passwd.x509.pem') info_dict = { @@ -922,6 +943,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertIn('/', loaded_dict['fstab']) self.assertIn('/system', loaded_dict['fstab']) + @test_utils.SkipIfExternalToolsUnavailable() def test_LoadInfoDict_dirInput(self): target_files = self._test_LoadInfoDict_createTargetFiles( self.INFO_DICT_DEFAULT, @@ -933,6 +955,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertIn('/', loaded_dict['fstab']) self.assertIn('/system', loaded_dict['fstab']) + @test_utils.SkipIfExternalToolsUnavailable() def test_LoadInfoDict_dirInput_legacyRecoveryFstabPath(self): target_files = self._test_LoadInfoDict_createTargetFiles( self.INFO_DICT_DEFAULT, @@ -990,6 +1013,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): self.assertEqual(2, loaded_dict['fstab_version']) self.assertIsNone(loaded_dict['fstab']) + @test_utils.SkipIfExternalToolsUnavailable() def test_LoadInfoDict_missingMetaMiscInfoTxt(self): target_files = self._test_LoadInfoDict_createTargetFiles( self.INFO_DICT_DEFAULT, @@ -998,6 +1022,7 @@ class CommonUtilsTest(test_utils.ReleaseToolsTestCase): with zipfile.ZipFile(target_files, 'r') as target_files_zip: self.assertRaises(ValueError, common.LoadInfoDict, target_files_zip) + @test_utils.SkipIfExternalToolsUnavailable() def test_LoadInfoDict_repacking(self): target_files = self._test_LoadInfoDict_createTargetFiles( self.INFO_DICT_DEFAULT, @@ -1066,6 +1091,7 @@ class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase): validate_target_files.ValidateInstallRecoveryScript(self._tempdir, self._info) + @test_utils.SkipIfExternalToolsUnavailable() def test_recovery_from_boot(self): recovery_image = common.File("recovery.img", self.recovery_data) self._out_tmp_sink("recovery.img", recovery_image.data, "IMAGES") diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py index bb9ce8e276..ec452a8cc6 100644 --- a/tools/releasetools/test_merge_target_files.py +++ b/tools/releasetools/test_merge_target_files.py @@ -16,7 +16,6 @@ import os.path -import common import test_utils from merge_target_files import ( read_config_list, validate_config_lists, default_system_item_list, diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py index 466fde1af3..ee831e3ae2 100644 --- a/tools/releasetools/test_ota_from_target_files.py +++ b/tools/releasetools/test_ota_from_target_files.py @@ -425,7 +425,6 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): } common.OPTIONS.search_path = test_utils.get_search_path() - self.assertIsNotNone(common.OPTIONS.search_path) def test_GetPackageMetadata_abOta_full(self): target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT) @@ -582,6 +581,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): }, metadata) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetTargetFilesZipForSecondaryImages(self): input_file = construct_target_files(secondary=True) target_file = GetTargetFilesZipForSecondaryImages(input_file) @@ -600,6 +600,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): self.assertNotIn('IMAGES/system_other.img', namelist) self.assertNotIn('IMAGES/system.map', namelist) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetTargetFilesZipForSecondaryImages_skipPostinstall(self): input_file = construct_target_files(secondary=True) target_file = GetTargetFilesZipForSecondaryImages( @@ -619,6 +620,7 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): self.assertNotIn('IMAGES/system.map', namelist) self.assertNotIn(POSTINSTALL_CONFIG, namelist) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetTargetFilesZipForSecondaryImages_withoutRadioImages(self): input_file = construct_target_files(secondary=True) common.ZipDelete(input_file, 'RADIO/bootloader.img') @@ -639,12 +641,14 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): self.assertNotIn('RADIO/bootloader.img', namelist) self.assertNotIn('RADIO/modem.img', namelist) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetTargetFilesZipWithoutPostinstallConfig(self): input_file = construct_target_files() target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file) with zipfile.ZipFile(target_file) as verify_zip: self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist()) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetTargetFilesZipWithoutPostinstallConfig_missingEntry(self): input_file = construct_target_files() common.ZipDelete(input_file, POSTINSTALL_CONFIG) @@ -675,20 +679,25 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): FinalizeMetadata(metadata, zip_file, output_file, needed_property_files) self.assertIn('ota-test-property-files', metadata) + @test_utils.SkipIfExternalToolsUnavailable() def test_FinalizeMetadata(self): self._test_FinalizeMetadata() + @test_utils.SkipIfExternalToolsUnavailable() def test_FinalizeMetadata_withNoSigning(self): common.OPTIONS.no_signing = True self._test_FinalizeMetadata() + @test_utils.SkipIfExternalToolsUnavailable() def test_FinalizeMetadata_largeEntry(self): self._test_FinalizeMetadata(large_entry=True) + @test_utils.SkipIfExternalToolsUnavailable() def test_FinalizeMetadata_largeEntry_withNoSigning(self): common.OPTIONS.no_signing = True self._test_FinalizeMetadata(large_entry=True) + @test_utils.SkipIfExternalToolsUnavailable() def test_FinalizeMetadata_insufficientSpace(self): entries = [ 'required-entry1', @@ -766,6 +775,7 @@ class PropertyFilesTest(test_utils.ReleaseToolsTestCase): expected = entry.replace('.', '-').upper().encode() self.assertEqual(expected, input_fp.read(size)) + @test_utils.SkipIfExternalToolsUnavailable() def test_Compute(self): entries = ( 'required-entry1', @@ -805,6 +815,7 @@ class PropertyFilesTest(test_utils.ReleaseToolsTestCase): with zipfile.ZipFile(zip_file, 'r') as zip_fp: self.assertRaises(KeyError, property_files.Compute, zip_fp) + @test_utils.SkipIfExternalToolsUnavailable() def test_Finalize(self): entries = [ 'required-entry1', @@ -825,6 +836,7 @@ class PropertyFilesTest(test_utils.ReleaseToolsTestCase): entries[2] = 'metadata' self._verify_entries(zip_file, tokens, entries) + @test_utils.SkipIfExternalToolsUnavailable() def test_Finalize_assertReservedLength(self): entries = ( 'required-entry1', @@ -998,6 +1010,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest): ), property_files.optional) + @test_utils.SkipIfExternalToolsUnavailable() def test_GetPayloadMetadataOffsetAndSize(self): target_file = construct_target_files() payload = Payload() @@ -1071,6 +1084,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest): return zip_file + @test_utils.SkipIfExternalToolsUnavailable() def test_Compute(self): zip_file = self.construct_zip_package_withValidPayload() property_files = AbOtaPropertyFiles() @@ -1084,6 +1098,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest): self._verify_entries( zip_file, tokens, ('care_map.txt', 'compatibility.zip')) + @test_utils.SkipIfExternalToolsUnavailable() def test_Finalize(self): zip_file = self.construct_zip_package_withValidPayload(with_metadata=True) property_files = AbOtaPropertyFiles() @@ -1099,6 +1114,7 @@ class AbOtaPropertyFilesTest(PropertyFilesTest): self._verify_entries( zip_file, tokens, ('care_map.txt', 'compatibility.zip')) + @test_utils.SkipIfExternalToolsUnavailable() def test_Verify(self): zip_file = self.construct_zip_package_withValidPayload(with_metadata=True) property_files = AbOtaPropertyFiles() @@ -1204,6 +1220,7 @@ class PayloadSignerTest(test_utils.ReleaseToolsTestCase): def test_GetKeySizeInBytes_512Bytes(self): signing_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key') + # pylint: disable=protected-access key_size = PayloadSigner._GetKeySizeInBytes(signing_key) self.assertEqual(512, key_size) @@ -1233,6 +1250,7 @@ class PayloadSignerTest(test_utils.ReleaseToolsTestCase): """Uses testdata/payload_signer.sh as the external payload signer.""" common.OPTIONS.payload_signer = os.path.join( self.testdata_dir, 'payload_signer.sh') + os.chmod(common.OPTIONS.payload_signer, 0o700) common.OPTIONS.payload_signer_args = [ os.path.join(self.testdata_dir, 'testkey.pk8')] payload_signer = PayloadSigner() @@ -1272,14 +1290,17 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): payload.Generate(target_file, source_file) return payload + @test_utils.SkipIfExternalToolsUnavailable() def test_Generate_full(self): payload = self._create_payload_full() self.assertTrue(os.path.exists(payload.payload_file)) + @test_utils.SkipIfExternalToolsUnavailable() def test_Generate_incremental(self): payload = self._create_payload_incremental() self.assertTrue(os.path.exists(payload.payload_file)) + @test_utils.SkipIfExternalToolsUnavailable() def test_Generate_additionalArgs(self): target_file = construct_target_files() source_file = construct_target_files() @@ -1290,12 +1311,14 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): target_file, additional_args=["--source_image", source_file]) self.assertTrue(os.path.exists(payload.payload_file)) + @test_utils.SkipIfExternalToolsUnavailable() def test_Generate_invalidInput(self): target_file = construct_target_files() common.ZipDelete(target_file, 'IMAGES/vendor.img') payload = Payload() self.assertRaises(common.ExternalError, payload.Generate, target_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_Sign_full(self): payload = self._create_payload_full() payload.Sign(PayloadSigner()) @@ -1309,6 +1332,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): os.path.join(self.testdata_dir, 'testkey.x509.pem'), output_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_Sign_incremental(self): payload = self._create_payload_incremental() payload.Sign(PayloadSigner()) @@ -1322,6 +1346,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): os.path.join(self.testdata_dir, 'testkey.x509.pem'), output_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_Sign_withDataWipe(self): common.OPTIONS.wipe_user_data = True payload = self._create_payload_full() @@ -1330,6 +1355,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): with open(payload.payload_properties) as properties_fp: self.assertIn("POWERWASH=1", properties_fp.read()) + @test_utils.SkipIfExternalToolsUnavailable() def test_Sign_secondary(self): payload = self._create_payload_full(secondary=True) payload.Sign(PayloadSigner()) @@ -1337,6 +1363,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): with open(payload.payload_properties) as properties_fp: self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read()) + @test_utils.SkipIfExternalToolsUnavailable() def test_Sign_badSigner(self): """Tests that signing failure can be captured.""" payload = self._create_payload_full() @@ -1344,6 +1371,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): payload_signer.signer_args.append('bad-option') self.assertRaises(common.ExternalError, payload.Sign, payload_signer) + @test_utils.SkipIfExternalToolsUnavailable() def test_WriteToZip(self): payload = self._create_payload_full() payload.Sign(PayloadSigner()) @@ -1365,6 +1393,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): continue self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type) + @test_utils.SkipIfExternalToolsUnavailable() def test_WriteToZip_unsignedPayload(self): """Unsigned payloads should not be allowed to be written to zip.""" payload = self._create_payload_full() @@ -1380,6 +1409,7 @@ class PayloadTest(test_utils.ReleaseToolsTestCase): with zipfile.ZipFile(output_file, 'w') as output_zip: self.assertRaises(AssertionError, payload.WriteToZip, output_zip) + @test_utils.SkipIfExternalToolsUnavailable() def test_WriteToZip_secondary(self): payload = self._create_payload_full(secondary=True) payload.Sign(PayloadSigner()) diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py index edb3d41d29..1e919f7bb5 100644..100755 --- a/tools/releasetools/test_utils.py +++ b/tools/releasetools/test_utils.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python # # Copyright (C) 2018 The Android Open Source Project # @@ -30,6 +31,18 @@ import common # Some test runner doesn't like outputs from stderr. logging.basicConfig(stream=sys.stdout) +# Use ANDROID_BUILD_TOP as an indicator to tell if the needed tools (e.g. +# avbtool, mke2fs) are available while running the tests. Not having the var or +# having empty string means we can't run the tests that require external tools. +EXTERNAL_TOOLS_UNAVAILABLE = not os.environ.get("ANDROID_BUILD_TOP") + + +def SkipIfExternalToolsUnavailable(): + """Decorator function that allows skipping tests per tools availability.""" + if EXTERNAL_TOOLS_UNAVAILABLE: + return unittest.skip('External tools unavailable') + return lambda func: func + def get_testdata_dir(): """Returns the testdata dir, in relative to the script dir.""" @@ -40,6 +53,19 @@ def get_testdata_dir(): def get_search_path(): """Returns the search path that has 'framework/signapk.jar' under.""" + + def signapk_exists(path): + signapk_path = os.path.realpath( + os.path.join(path, 'framework', 'signapk.jar')) + return os.path.exists(signapk_path) + + # Try with ANDROID_BUILD_TOP first. + full_path = os.path.realpath(os.path.join( + os.environ.get('ANDROID_BUILD_TOP', ''), 'out', 'host', 'linux-x86')) + if signapk_exists(full_path): + return full_path + + # Otherwise try going with relative pathes. current_dir = os.path.dirname(os.path.realpath(__file__)) for path in ( # In relative to 'build/make/tools/releasetools' in the Android source. @@ -47,9 +73,7 @@ def get_search_path(): # Or running the script unpacked from otatools.zip. ['..']): full_path = os.path.realpath(os.path.join(current_dir, *path)) - signapk_path = os.path.realpath( - os.path.join(full_path, 'framework', 'signapk.jar')) - if os.path.exists(signapk_path): + if signapk_exists(full_path): return full_path return None @@ -123,3 +147,10 @@ class ReleaseToolsTestCase(unittest.TestCase): def tearDown(self): common.Cleanup() + + +if __name__ == '__main__': + testsuite = unittest.TestLoader().discover( + os.path.dirname(os.path.realpath(__file__))) + # atest needs a verbosity level of >= 2 to correctly parse the result. + unittest.TextTestRunner(verbosity=2).run(testsuite) diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py index 5f619ec79a..70e3b497af 100644 --- a/tools/releasetools/test_validate_target_files.py +++ b/tools/releasetools/test_validate_target_files.py @@ -55,6 +55,7 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase): 0, proc.returncode, "Failed to sign boot image with boot_signer: {}".format(stdoutdata)) + @test_utils.SkipIfExternalToolsUnavailable() def test_ValidateVerifiedBootImages_bootImage(self): input_tmp = common.MakeTempDir() os.mkdir(os.path.join(input_tmp, 'IMAGES')) @@ -69,6 +70,7 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase): } ValidateVerifiedBootImages(input_tmp, info_dict, options) + @test_utils.SkipIfExternalToolsUnavailable() def test_ValidateVerifiedBootImages_bootImage_wrongKey(self): input_tmp = common.MakeTempDir() os.mkdir(os.path.join(input_tmp, 'IMAGES')) @@ -85,6 +87,7 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase): AssertionError, ValidateVerifiedBootImages, input_tmp, info_dict, options) + @test_utils.SkipIfExternalToolsUnavailable() def test_ValidateVerifiedBootImages_bootImage_corrupted(self): input_tmp = common.MakeTempDir() os.mkdir(os.path.join(input_tmp, 'IMAGES')) @@ -139,6 +142,7 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase): # Append the verity metadata. verity_image_builder.Build(output_file) + @test_utils.SkipIfExternalToolsUnavailable() def test_ValidateVerifiedBootImages_systemImage(self): input_tmp = common.MakeTempDir() os.mkdir(os.path.join(input_tmp, 'IMAGES')) @@ -162,6 +166,7 @@ class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase): } ValidateVerifiedBootImages(input_tmp, info_dict, options) + @test_utils.SkipIfExternalToolsUnavailable() def test_ValidateFileConsistency_incompleteRange(self): input_tmp = common.MakeTempDir() os.mkdir(os.path.join(input_tmp, 'IMAGES')) diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py index e0607c8831..1cc539f201 100644 --- a/tools/releasetools/test_verity_utils.py +++ b/tools/releasetools/test_verity_utils.py @@ -24,7 +24,8 @@ import random import common import sparse_img from rangelib import RangeSet -from test_utils import get_testdata_dir, ReleaseToolsTestCase +from test_utils import ( + get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable) from verity_utils import ( CreateHashtreeInfoGenerator, CreateVerityImageBuilder, HashtreeInfo, VerifiedBootVersion1HashtreeInfoGenerator) @@ -89,6 +90,7 @@ class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase): return output_file + @SkipIfExternalToolsUnavailable() def test_CreateHashtreeInfoGenerator(self): image_file = sparse_img.SparseImage(self._generate_image()) @@ -99,6 +101,7 @@ class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase): self.assertEqual(self.partition_size, generator.partition_size) self.assertTrue(generator.fec_supported) + @SkipIfExternalToolsUnavailable() def test_DecomposeSparseImage(self): image_file = sparse_img.SparseImage(self._generate_image()) @@ -109,6 +112,7 @@ class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase): self.assertEqual(12288, generator.hashtree_size) self.assertEqual(32768, generator.metadata_size) + @SkipIfExternalToolsUnavailable() def test_ParseHashtreeMetadata(self): image_file = sparse_img.SparseImage(self._generate_image()) generator = VerifiedBootVersion1HashtreeInfoGenerator( @@ -123,6 +127,7 @@ class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase): self.assertEqual(self.fixed_salt, generator.hashtree_info.salt) self.assertEqual(self.expected_root_hash, generator.hashtree_info.root_hash) + @SkipIfExternalToolsUnavailable() def test_ValidateHashtree_smoke(self): generator = VerifiedBootVersion1HashtreeInfoGenerator( self.partition_size, 4096, True) @@ -138,6 +143,7 @@ class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase): self.assertTrue(generator.ValidateHashtree()) + @SkipIfExternalToolsUnavailable() def test_ValidateHashtree_failure(self): generator = VerifiedBootVersion1HashtreeInfoGenerator( self.partition_size, 4096, True) @@ -153,6 +159,7 @@ class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase): self.assertFalse(generator.ValidateHashtree()) + @SkipIfExternalToolsUnavailable() def test_Generate(self): image_file = sparse_img.SparseImage(self._generate_image()) generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict) @@ -193,6 +200,7 @@ class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase): del prop_dict['verity_block_device'] self.assertIsNone(CreateVerityImageBuilder(prop_dict)) + @SkipIfExternalToolsUnavailable() def test_CalculateMaxImageSize(self): verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT) size = verity_image_builder.CalculateMaxImageSize() @@ -221,11 +229,13 @@ class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase): cmd = ['verity_verifier', image, '-mincrypt', verify_key] common.RunAndCheckOutput(cmd) + @SkipIfExternalToolsUnavailable() def test_Build(self): self._BuildAndVerify( self.DEFAULT_PROP_DICT, os.path.join(get_testdata_dir(), 'testkey_mincrypt')) + @SkipIfExternalToolsUnavailable() def test_Build_SanityCheck(self): # A sanity check for the test itself: the image shouldn't be verifiable # with wrong key. @@ -235,6 +245,7 @@ class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase): self.DEFAULT_PROP_DICT, os.path.join(get_testdata_dir(), 'verity_mincrypt')) + @SkipIfExternalToolsUnavailable() def test_Build_FecDisabled(self): prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT) del prop_dict['verity_fec'] @@ -242,6 +253,7 @@ class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase): prop_dict, os.path.join(get_testdata_dir(), 'testkey_mincrypt')) + @SkipIfExternalToolsUnavailable() def test_Build_SquashFs(self): verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT) verity_image_builder.CalculateMaxImageSize() @@ -282,6 +294,7 @@ class VerifiedBootVersion2VerityImageBuilderTest(ReleaseToolsTestCase): verity_image_builder = CreateVerityImageBuilder(prop_dict) self.assertIsNone(verity_image_builder) + @SkipIfExternalToolsUnavailable() def test_Build(self): prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT) verity_image_builder = CreateVerityImageBuilder(prop_dict) |