diff options
Diffstat (limited to 'tools')
| -rw-r--r-- | tools/releasetools/apex_utils.py | 22 | ||||
| -rwxr-xr-x | tools/releasetools/merge_target_files.py | 10 | ||||
| -rwxr-xr-x | tools/releasetools/ota_from_target_files.py | 24 | ||||
| -rwxr-xr-x | tools/releasetools/sign_apex.py | 4 | ||||
| -rwxr-xr-x | tools/releasetools/sign_target_files_apks.py | 3 | ||||
| -rw-r--r-- | tools/releasetools/test_apex_utils.py | 42 | ||||
| -rw-r--r-- | tools/releasetools/test_ota_from_target_files.py | 14 | ||||
| -rwxr-xr-x | tools/warn.py | 47 |
8 files changed, 123 insertions, 43 deletions
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py index 18ad8cec0d..ee3c463650 100644 --- a/tools/releasetools/apex_utils.py +++ b/tools/releasetools/apex_utils.py @@ -42,7 +42,7 @@ class ApexSigningError(Exception): def SignApexPayload(avbtool, payload_file, payload_key_path, payload_key_name, - algorithm, salt, signing_args=None): + algorithm, salt, no_hashtree, signing_args=None): """Signs a given payload_file with the payload key.""" # Add the new footer. Old footer, if any, will be replaced by avbtool. cmd = [avbtool, 'add_hashtree_footer', @@ -52,6 +52,8 @@ def SignApexPayload(avbtool, payload_file, payload_key_path, payload_key_name, '--prop', 'apex.key:{}'.format(payload_key_name), '--image', payload_file, '--salt', salt] + if no_hashtree: + cmd.append('--no_hashtree') if signing_args: cmd.extend(shlex.split(signing_args)) @@ -64,13 +66,15 @@ def SignApexPayload(avbtool, payload_file, payload_key_path, payload_key_name, # Verify the signed payload image with specified public key. logger.info('Verifying %s', payload_file) - VerifyApexPayload(avbtool, payload_file, payload_key_path) + VerifyApexPayload(avbtool, payload_file, payload_key_path, no_hashtree) -def VerifyApexPayload(avbtool, payload_file, payload_key): +def VerifyApexPayload(avbtool, payload_file, payload_key, no_hashtree=False): """Verifies the APEX payload signature with the given key.""" cmd = [avbtool, 'verify_image', '--image', payload_file, '--key', payload_key] + if no_hashtree: + cmd.append('--accept_zeroed_hashtree') try: common.RunAndCheckOutput(cmd) except common.ExternalError as e: @@ -91,7 +95,7 @@ def ParseApexPayloadInfo(avbtool, payload_path): Returns: A dict that contains payload property-value pairs. The dict should at least - contain Algorithm, Salt and apex.key. + contain Algorithm, Salt, Tree Size and apex.key. """ if not os.path.exists(payload_path): raise ApexInfoError('Failed to find image: {}'.format(payload_path)) @@ -104,11 +108,11 @@ def ParseApexPayloadInfo(avbtool, payload_path): 'Failed to get APEX payload info for {}:\n{}'.format( payload_path, e)) - # Extract the Algorithm / Salt / Prop info from payload (i.e. an image signed - # with avbtool). For example, + # Extract the Algorithm / Salt / Prop info / Tree size from payload (i.e. an + # image signed with avbtool). For example, # Algorithm: SHA256_RSA4096 PAYLOAD_INFO_PATTERN = ( - r'^\s*(?P<key>Algorithm|Salt|Prop)\:\s*(?P<value>.*?)$') + r'^\s*(?P<key>Algorithm|Salt|Prop|Tree Size)\:\s*(?P<value>.*?)$') payload_info_matcher = re.compile(PAYLOAD_INFO_PATTERN) payload_info = {} @@ -151,7 +155,7 @@ def ParseApexPayloadInfo(avbtool, payload_path): def SignApex(avbtool, apex_data, payload_key, container_key, container_pw, - codename_to_api_level_map, signing_args=None): + codename_to_api_level_map, no_hashtree, signing_args=None): """Signs the current APEX with the given payload/container keys. Args: @@ -160,6 +164,7 @@ def SignApex(avbtool, apex_data, payload_key, container_key, container_pw, container_key: The path to container signing key (w/o extension). container_pw: The matching password of the container_key, or None. codename_to_api_level_map: A dict that maps from codename to API level. + no_hashtree: Don't include hashtree in the signed APEX. signing_args: Additional args to be passed to the payload signer. Returns: @@ -187,6 +192,7 @@ def SignApex(avbtool, apex_data, payload_key, container_key, container_pw, payload_info['apex.key'], payload_info['Algorithm'], payload_info['Salt'], + no_hashtree, signing_args) # 1b. Update the embedded payload public key. diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py index 61c4f4ecc9..ba70986459 100755 --- a/tools/releasetools/merge_target_files.py +++ b/tools/releasetools/merge_target_files.py @@ -914,11 +914,6 @@ def merge_target_files(temp_dir, framework_target_files, framework_item_list, generate_super_empty_image(output_target_files_temp_dir, output_super_empty) - if output_img: - # Create the IMG package from the merged target files (before zipping, in - # order to avoid an unnecessary unzip and copy). - img_from_target_files.main([output_target_files_temp_dir, output_img]) - # Finally, create the output target files zip archive and/or copy the # output items to the output target files directory. @@ -932,6 +927,11 @@ def merge_target_files(temp_dir, framework_target_files, framework_item_list, output_target_files_temp_dir, temp_dir) + # Create the IMG package from the merged target files package. + + if output_img: + img_from_target_files.main([output_zip, output_img]) + # Create the OTA package from the merged target files package. if output_ota: diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index 82ea53970f..de947f329a 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -250,7 +250,12 @@ UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*'] TARGET_DIFFING_UNZIP_PATTERN = ['BOOT', 'RECOVERY', 'SYSTEM/*', 'VENDOR/*', 'PRODUCT/*', 'SYSTEM_EXT/*', 'ODM/*'] RETROFIT_DAP_UNZIP_PATTERN = ['OTA/super_*.img', AB_PARTITIONS] -SECONDARY_IMAGES_SKIP_PARTITIONS = ['odm', 'product', 'system_ext', 'vendor'] + +# Images to be excluded from secondary payload. We essentially only keep +# 'system_other' and bootloader partitions. +SECONDARY_PAYLOAD_SKIPPED_IMAGES = [ + 'boot', 'dtbo', 'modem', 'odm', 'product', 'radio', 'recovery', + 'system_ext', 'vbmeta', 'vbmeta_system', 'vbmeta_vendor', 'vendor'] class BuildInfo(object): @@ -1834,7 +1839,7 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): if key == 'dynamic_partition_list' or key.endswith(LIST_SUFFIX): partitions = value.split() partitions = [partition for partition in partitions if partition - not in SECONDARY_IMAGES_SKIP_PARTITIONS] + not in SECONDARY_PAYLOAD_SKIPPED_IMAGES] output_list.append('{}={}'.format(key, ' '.join(partitions))) else: output_list.append(line) @@ -1856,10 +1861,13 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): elif info.filename in ('IMAGES/system.img', 'IMAGES/system.map'): pass - # Images like vendor and product are not needed in the secondary payload. - elif info.filename in ['IMAGES/{}.img'.format(partition) for partition in - SECONDARY_IMAGES_SKIP_PARTITIONS]: - pass + + # Copy images that are not in SECONDARY_PAYLOAD_SKIPPED_IMAGES. + elif info.filename.startswith(('IMAGES/', 'RADIO/')): + image_name = os.path.basename(info.filename) + if image_name not in ['{}.img'.format(partition) for partition in + SECONDARY_PAYLOAD_SKIPPED_IMAGES]: + common.ZipWrite(target_zip, unzipped_file, arcname=info.filename) # Skip copying the postinstall config if requested. elif skip_postinstall and info.filename == POSTINSTALL_CONFIG: @@ -1872,7 +1880,7 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): with open(unzipped_file) as f: partition_list = f.read().splitlines() partition_list = [partition for partition in partition_list if partition - and partition not in SECONDARY_IMAGES_SKIP_PARTITIONS] + and partition not in SECONDARY_PAYLOAD_SKIPPED_IMAGES] common.ZipWriteStr(target_zip, info.filename, '\n'.join(partition_list)) # Remove the unnecessary partitions from the dynamic partitions list. elif (info.filename == 'META/misc_info.txt' or @@ -1881,8 +1889,6 @@ def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False): common.ZipWriteStr(target_zip, info.filename, modified_info) else: common.ZipWrite(target_zip, unzipped_file, arcname=info.filename) - elif info.filename.startswith(('IMAGES/', 'RADIO/')): - common.ZipWrite(target_zip, unzipped_file, arcname=info.filename) common.ZipClose(target_zip) diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py index 2516e15ca4..f2daa46e92 100755 --- a/tools/releasetools/sign_apex.py +++ b/tools/releasetools/sign_apex.py @@ -56,6 +56,7 @@ def SignApexFile(avbtool, apex_file, payload_key, container_key, container_key=container_key, container_pw=None, codename_to_api_level_map=None, + no_hashtree=False, signing_args=signing_args) @@ -103,7 +104,8 @@ def main(argv): args[0], options['payload_key'], options['container_key'], - options.get('payload_extra_args')) + no_hashtree=False, + signing_args=options.get('payload_extra_args')) shutil.copyfile(signed_apex, args[1]) logger.info("done.") diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py index 1f41431b78..710147bb23 100755 --- a/tools/releasetools/sign_target_files_apks.py +++ b/tools/releasetools/sign_target_files_apks.py @@ -479,7 +479,8 @@ def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info, container_key, key_passwords[container_key], codename_to_api_level_map, - OPTIONS.avb_extra_args.get('apex')) + no_hashtree=True, + signing_args=OPTIONS.avb_extra_args.get('apex')) common.ZipWrite(output_tf_zip, signed_apex, filename) else: diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py index e9c26f0d9e..5d4cc77494 100644 --- a/tools/releasetools/test_apex_utils.py +++ b/tools/releasetools/test_apex_utils.py @@ -44,19 +44,42 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): payload_file = self._GetTestPayload() apex_utils.SignApexPayload( 'avbtool', payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', - self.SALT) + self.SALT, no_hashtree=True) payload_info = apex_utils.ParseApexPayloadInfo('avbtool', payload_file) self.assertEqual('SHA256_RSA2048', payload_info['Algorithm']) self.assertEqual(self.SALT, payload_info['Salt']) self.assertEqual('testkey', payload_info['apex.key']) + self.assertEqual('0 bytes', payload_info['Tree Size']) @test_utils.SkipIfExternalToolsUnavailable() def test_SignApexPayload(self): payload_file = self._GetTestPayload() apex_utils.SignApexPayload( 'avbtool', payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', - self.SALT) + self.SALT, no_hashtree=True) + apex_utils.VerifyApexPayload( + 'avbtool', payload_file, self.payload_key, True) + + @test_utils.SkipIfExternalToolsUnavailable() + def test_SignApexPayload_withHashtree(self): + payload_file = self._GetTestPayload() + apex_utils.SignApexPayload( + 'avbtool', payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', + self.SALT, no_hashtree=False) apex_utils.VerifyApexPayload('avbtool', payload_file, self.payload_key) + payload_info = apex_utils.ParseApexPayloadInfo('avbtool', payload_file) + self.assertEqual('4096 bytes', payload_info['Tree Size']) + + @test_utils.SkipIfExternalToolsUnavailable() + def test_SignApexPayload_noHashtree(self): + payload_file = self._GetTestPayload() + apex_utils.SignApexPayload( + 'avbtool', payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', + self.SALT, no_hashtree=True) + apex_utils.VerifyApexPayload('avbtool', payload_file, self.payload_key, + no_hashtree=True) + payload_info = apex_utils.ParseApexPayloadInfo('avbtool', payload_file) + self.assertEqual('0 bytes', payload_info['Tree Size']) @test_utils.SkipIfExternalToolsUnavailable() def test_SignApexPayload_withSignerHelper(self): @@ -70,8 +93,10 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', self.SALT, + True, payload_signer_args) - apex_utils.VerifyApexPayload('avbtool', payload_file, self.payload_key) + apex_utils.VerifyApexPayload( + 'avbtool', payload_file, self.payload_key, True) @test_utils.SkipIfExternalToolsUnavailable() def test_SignApexPayload_invalidKey(self): @@ -83,18 +108,21 @@ class ApexUtilsTest(test_utils.ReleaseToolsTestCase): os.path.join(self.testdata_dir, 'testkey.x509.pem'), 'testkey', 'SHA256_RSA2048', - self.SALT) + self.SALT, + no_hashtree=True) @test_utils.SkipIfExternalToolsUnavailable() def test_VerifyApexPayload_wrongKey(self): payload_file = self._GetTestPayload() apex_utils.SignApexPayload( 'avbtool', payload_file, self.payload_key, 'testkey', 'SHA256_RSA2048', - self.SALT) - apex_utils.VerifyApexPayload('avbtool', payload_file, self.payload_key) + self.SALT, True) + apex_utils.VerifyApexPayload( + 'avbtool', payload_file, self.payload_key, True) self.assertRaises( apex_utils.ApexSigningError, apex_utils.VerifyApexPayload, 'avbtool', payload_file, - os.path.join(self.testdata_dir, 'testkey_with_passwd.key')) + os.path.join(self.testdata_dir, 'testkey_with_passwd.key'), + no_hashtree=True) diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py index 9b2fbb6771..9825a5ea13 100644 --- a/tools/releasetools/test_ota_from_target_files.py +++ b/tools/releasetools/test_ota_from_target_files.py @@ -599,16 +599,16 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): ab_partitions = verify_zip.read('META/ab_partitions.txt') self.assertIn('META/ab_partitions.txt', namelist) - self.assertIn('IMAGES/boot.img', namelist) self.assertIn('IMAGES/system.img', namelist) self.assertIn('RADIO/bootloader.img', namelist) - self.assertIn('RADIO/modem.img', namelist) self.assertIn(POSTINSTALL_CONFIG, namelist) + self.assertNotIn('IMAGES/boot.img', namelist) self.assertNotIn('IMAGES/system_other.img', namelist) self.assertNotIn('IMAGES/system.map', namelist) + self.assertNotIn('RADIO/modem.img', namelist) - expected_ab_partitions = ['boot', 'system', 'bootloader', 'modem'] + expected_ab_partitions = ['system', 'bootloader'] self.assertEqual('\n'.join(expected_ab_partitions), ab_partitions) @test_utils.SkipIfExternalToolsUnavailable() @@ -621,13 +621,13 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): namelist = verify_zip.namelist() self.assertIn('META/ab_partitions.txt', namelist) - self.assertIn('IMAGES/boot.img', namelist) self.assertIn('IMAGES/system.img', namelist) self.assertIn('RADIO/bootloader.img', namelist) - self.assertIn('RADIO/modem.img', namelist) + self.assertNotIn('IMAGES/boot.img', namelist) self.assertNotIn('IMAGES/system_other.img', namelist) self.assertNotIn('IMAGES/system.map', namelist) + self.assertNotIn('RADIO/modem.img', namelist) self.assertNotIn(POSTINSTALL_CONFIG, namelist) @test_utils.SkipIfExternalToolsUnavailable() @@ -641,10 +641,10 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): namelist = verify_zip.namelist() self.assertIn('META/ab_partitions.txt', namelist) - self.assertIn('IMAGES/boot.img', namelist) self.assertIn('IMAGES/system.img', namelist) self.assertIn(POSTINSTALL_CONFIG, namelist) + self.assertNotIn('IMAGES/boot.img', namelist) self.assertNotIn('IMAGES/system_other.img', namelist) self.assertNotIn('IMAGES/system.map', namelist) self.assertNotIn('RADIO/bootloader.img', namelist) @@ -681,12 +681,12 @@ class OtaFromTargetFilesTest(test_utils.ReleaseToolsTestCase): 'META/dynamic_partitions_info.txt') self.assertIn('META/ab_partitions.txt', namelist) - self.assertIn('IMAGES/boot.img', namelist) self.assertIn('IMAGES/system.img', namelist) self.assertIn(POSTINSTALL_CONFIG, namelist) self.assertIn('META/misc_info.txt', namelist) self.assertIn('META/dynamic_partitions_info.txt', namelist) + self.assertNotIn('IMAGES/boot.img', namelist) self.assertNotIn('IMAGES/system_other.img', namelist) self.assertNotIn('IMAGES/system.map', namelist) diff --git a/tools/warn.py b/tools/warn.py index 48feb49ac0..86fa8c0239 100755 --- a/tools/warn.py +++ b/tools/warn.py @@ -2684,6 +2684,17 @@ warn_patterns = [ 'patterns': [r".*: .+\[clang-analyzer-.+\]$", r".*: Call Path : .+$"]}, + # rustc warnings + {'category': 'rust', 'severity': Severity.HIGH, + 'description': 'Does not derive Copy', + 'patterns': [r".*: warning: .+ does not derive Copy"]}, + {'category': 'rust', 'severity': Severity.MEDIUM, + 'description': 'Deprecated range pattern', + 'patterns': [r".*: warning: .+ range patterns are deprecated"]}, + {'category': 'rust', 'severity': Severity.MEDIUM, + 'description': 'Deprecated missing explicit \'dyn\'', + 'patterns': [r".*: warning: .+ without an explicit `dyn` are deprecated"]}, + # catch-all for warnings this script doesn't know about yet {'category': 'C/C++', 'severity': Severity.UNKNOWN, 'description': 'Unclassified/unrecognized warnings', @@ -3208,16 +3219,42 @@ def parse_input_file(infile): global target_variant line_counter = 0 - # handle only warning messages with a file path - warning_pattern = re.compile('^[^ ]*/[^ ]*: warning: .*') + # rustc warning messages have two lines that should be combined: + # warning: description + # --> file_path:line_number:column_number + # Some warning messages have no file name: + # warning: macro replacement list ... [bugprone-macro-parentheses] + # Some makefile warning messages have no line number: + # some/path/file.mk: warning: description + # C/C++ compiler warning messages have line and column numbers: + # some/path/file.c:line_number:column_number: warning: description + warning_pattern = re.compile('(^[^ ]*/[^ ]*: warning: .*)|(^warning: .*)') + warning_without_file = re.compile('^warning: .*') + rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+') # Collect all warnings into the warning_lines set. warning_lines = set() + prev_warning = '' for line in infile: + if prev_warning: + if rustc_file_position.match(line): + # must be a rustc warning, combine 2 lines into one warning + line = line.strip().replace('--> ', '') + ': ' + prev_warning + warning_lines.add(normalize_warning_line(line)) + prev_warning = '' + continue + # add prev_warning, and then process the current line + prev_warning = 'unknown_source_file: ' + prev_warning + warning_lines.add(normalize_warning_line(prev_warning)) + prev_warning = '' if warning_pattern.match(line): - line = normalize_warning_line(line) - warning_lines.add(line) - elif line_counter < 100: + if warning_without_file.match(line): + # save this line and combine it with the next line + prev_warning = line + else: + warning_lines.add(normalize_warning_line(line)) + continue + if line_counter < 100: # save a little bit of time by only doing this for the first few lines line_counter += 1 m = re.search('(?<=^PLATFORM_VERSION=).*', line) |