summaryrefslogtreecommitdiff
path: root/tools/releasetools/verity_utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'tools/releasetools/verity_utils.py')
-rw-r--r--tools/releasetools/verity_utils.py255
1 files changed, 0 insertions, 255 deletions
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index efb30080b1..ee197e03eb 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -49,107 +49,6 @@ class BuildVerityImageError(Exception):
Exception.__init__(self, message)
-def GetVerityFECSize(image_size):
- cmd = ["fec", "-s", str(image_size)]
- output = common.RunAndCheckOutput(cmd, verbose=False)
- return int(output)
-
-
-def GetVerityTreeSize(image_size):
- cmd = ["build_verity_tree", "-s", str(image_size)]
- output = common.RunAndCheckOutput(cmd, verbose=False)
- return int(output)
-
-
-def GetVerityMetadataSize(image_size):
- cmd = ["build_verity_metadata", "size", str(image_size)]
- output = common.RunAndCheckOutput(cmd, verbose=False)
- return int(output)
-
-
-def GetVeritySize(image_size, fec_supported):
- verity_tree_size = GetVerityTreeSize(image_size)
- verity_metadata_size = GetVerityMetadataSize(image_size)
- verity_size = verity_tree_size + verity_metadata_size
- if fec_supported:
- fec_size = GetVerityFECSize(image_size + verity_size)
- return verity_size + fec_size
- return verity_size
-
-
-def GetSimgSize(image_file):
- simg = sparse_img.SparseImage(image_file, build_map=False)
- return simg.blocksize * simg.total_blocks
-
-
-def ZeroPadSimg(image_file, pad_size):
- blocks = pad_size // BLOCK_SIZE
- logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
- simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
- simg.AppendFillChunk(0, blocks)
-
-
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
- padding_size):
- cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
- verity_path, verity_fec_path]
- common.RunAndCheckOutput(cmd)
-
-
-def BuildVerityTree(sparse_image_path, verity_image_path):
- cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
- verity_image_path]
- output = common.RunAndCheckOutput(cmd)
- root, salt = output.split()
- return root, salt
-
-
-def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
- block_device, signer_path, key, signer_args,
- verity_disable):
- cmd = ["build_verity_metadata", "build", str(image_size),
- verity_metadata_path, root_hash, salt, block_device, signer_path, key]
- if signer_args:
- cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
- if verity_disable:
- cmd.append("--verity_disable")
- common.RunAndCheckOutput(cmd)
-
-
-def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
- """Appends the unsparse image to the given sparse image.
-
- Args:
- sparse_image_path: the path to the (sparse) image
- unsparse_image_path: the path to the (unsparse) image
-
- Raises:
- BuildVerityImageError: On error.
- """
- cmd = ["append2simg", sparse_image_path, unsparse_image_path]
- try:
- common.RunAndCheckOutput(cmd)
- except:
- logger.exception(error_message)
- raise BuildVerityImageError(error_message)
-
-
-def Append(target, file_to_append, error_message):
- """Appends file_to_append to target.
-
- Raises:
- BuildVerityImageError: On error.
- """
- try:
- with open(target, 'ab') as out_file, \
- open(file_to_append, 'rb') as input_file:
- for line in input_file:
- out_file.write(line)
- except IOError:
- logger.exception(error_message)
- raise BuildVerityImageError(error_message)
-
-
def CreateVerityImageBuilder(prop_dict):
"""Returns a verity image builder based on the given build properties.
@@ -414,160 +313,6 @@ class HashtreeInfoGenerator(object):
raise NotImplementedError
-class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
- """A class that parses the metadata of hashtree for a given partition."""
-
- def __init__(self, partition_size, block_size, fec_supported):
- """Initialize VerityTreeInfo with the sparse image and input property.
-
- Arguments:
- partition_size: The whole size in bytes of a partition, including the
- filesystem size, padding size, and verity size.
- block_size: Expected size in bytes of each block for the sparse image.
- fec_supported: True if the verity section contains fec data.
- """
-
- self.block_size = block_size
- self.partition_size = partition_size
- self.fec_supported = fec_supported
-
- self.image = None
- self.filesystem_size = None
- self.hashtree_size = None
- self.metadata_size = None
-
- prop_dict = {
- 'partition_size': str(partition_size),
- 'verity': 'true',
- 'verity_fec': 'true' if fec_supported else None,
- # 'verity_block_device' needs to be present to indicate a verity-enabled
- # partition.
- 'verity_block_device': '',
- # We don't need the following properties that are needed for signing the
- # verity metadata.
- 'verity_key': '',
- 'verity_signer_cmd': None,
- }
- self.verity_image_builder = CreateVerityImageBuilder(prop_dict)
-
- self.hashtree_info = HashtreeInfo()
-
- def DecomposeSparseImage(self, image):
- """Calculate the verity size based on the size of the input image.
-
- Since we already know the structure of a verity enabled image to be:
- [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
- calculate the size and offset of each section.
- """
-
- self.image = image
- assert self.block_size == image.blocksize
- assert self.partition_size == image.total_blocks * self.block_size, \
- "partition size {} doesn't match with the calculated image size." \
- " total_blocks: {}".format(self.partition_size, image.total_blocks)
-
- adjusted_size = self.verity_image_builder.CalculateMaxImageSize()
- assert adjusted_size % self.block_size == 0
-
- verity_tree_size = GetVerityTreeSize(adjusted_size)
- assert verity_tree_size % self.block_size == 0
-
- metadata_size = GetVerityMetadataSize(adjusted_size)
- assert metadata_size % self.block_size == 0
-
- self.filesystem_size = adjusted_size
- self.hashtree_size = verity_tree_size
- self.metadata_size = metadata_size
-
- self.hashtree_info.filesystem_range = RangeSet(
- data=[0, adjusted_size // self.block_size])
- self.hashtree_info.hashtree_range = RangeSet(
- data=[adjusted_size // self.block_size,
- (adjusted_size + verity_tree_size) // self.block_size])
-
- def _ParseHashtreeMetadata(self):
- """Parses the hash_algorithm, root_hash, salt from the metadata block."""
-
- metadata_start = self.filesystem_size + self.hashtree_size
- metadata_range = RangeSet(
- data=[metadata_start // self.block_size,
- (metadata_start + self.metadata_size) // self.block_size])
- meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
-
- # More info about the metadata structure available in:
- # system/extras/verity/build_verity_metadata.py
- META_HEADER_SIZE = 268
- header_bin = meta_data[0:META_HEADER_SIZE]
- header = struct.unpack("II256sI", header_bin)
-
- # header: magic_number, version, signature, table_len
- assert header[0] == 0xb001b001, header[0]
- table_len = header[3]
- verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
- table_entries = verity_table.rstrip().split()
-
- # Expected verity table format: "1 block_device block_device block_size
- # block_size data_blocks data_blocks hash_algorithm root_hash salt"
- assert len(table_entries) == 10, "Unexpected verity table size {}".format(
- len(table_entries))
- assert (int(table_entries[3]) == self.block_size and
- int(table_entries[4]) == self.block_size)
- assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
- int(table_entries[6]) * self.block_size == self.filesystem_size)
-
- self.hashtree_info.hash_algorithm = table_entries[7].decode()
- self.hashtree_info.root_hash = table_entries[8].decode()
- self.hashtree_info.salt = table_entries[9].decode()
-
- def ValidateHashtree(self):
- """Checks that we can reconstruct the verity hash tree."""
-
- # Writes the filesystem section to a temp file; and calls the executable
- # build_verity_tree to construct the hash tree.
- adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
- with open(adjusted_partition, "wb") as fd:
- self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
-
- generated_verity_tree = common.MakeTempFile(prefix="verity")
- root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
-
- # The salt should be always identical, as we use fixed value.
- assert salt == self.hashtree_info.salt, \
- "Calculated salt {} doesn't match the one in metadata {}".format(
- salt, self.hashtree_info.salt)
-
- if root_hash != self.hashtree_info.root_hash:
- logger.warning(
- "Calculated root hash %s doesn't match the one in metadata %s",
- root_hash, self.hashtree_info.root_hash)
- return False
-
- # Reads the generated hash tree and checks if it has the exact same bytes
- # as the one in the sparse image.
- with open(generated_verity_tree, 'rb') as fd:
- return fd.read() == b''.join(self.image.ReadRangeSet(
- self.hashtree_info.hashtree_range))
-
- def Generate(self, image):
- """Parses and validates the hashtree info in a sparse image.
-
- Returns:
- hashtree_info: The information needed to reconstruct the hashtree.
-
- Raises:
- HashtreeInfoGenerationError: If we fail to generate the exact bytes of
- the hashtree.
- """
-
- self.DecomposeSparseImage(image)
- self._ParseHashtreeMetadata()
-
- if not self.ValidateHashtree():
- raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
-
- return self.hashtree_info
-
-
def CreateCustomImageBuilder(info_dict, partition_name, partition_size,
key_path, algorithm, signing_args):
builder = None