diff options
202 files changed, 8142 insertions, 2625 deletions
diff --git a/Changes.md b/Changes.md index fc15e601b6..9f2449c2c3 100644 --- a/Changes.md +++ b/Changes.md @@ -43,14 +43,9 @@ within a product configuration .mk file, board config .mk file, or buildspec.mk. The path set when running builds now makes the `python` executable point to python 3, whereas on previous versions it pointed to python 2. If you still have python 2 scripts, you can change the shebang line to use `python2` explicitly. This only applies for -scripts run directly from makefiles, or from soong genrules. This behavior can be -temporarily overridden by setting the `BUILD_BROKEN_PYTHON_IS_PYTHON2` environment -variable to `true`. It's only an environment variable and not a product config variable -because product config sometimes calls python code. - -In addition, `python_*` soong modules no longer allow python 2. This can be temporarily -overridden by setting the `BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES` product configuration -variable to `true`. +scripts run directly from makefiles, or from soong genrules. + +In addition, `python_*` soong modules no longer allow python 2. Python 2 is slated for complete removal in V. diff --git a/ci/Android.bp b/ci/Android.bp index 104f517ccd..6d4ac35517 100644 --- a/ci/Android.bp +++ b/ci/Android.bp @@ -71,11 +71,37 @@ python_test_host { }, } +python_test_host { + name: "optimized_targets_test", + main: "optimized_targets_test.py", + pkg_path: "testdata", + srcs: [ + "optimized_targets_test.py", + ], + libs: [ + "build_test_suites", + "pyfakefs", + ], + test_options: { + unit_test: true, + }, + data: [ + ":py3-cmd", + ], + version: { + py3: { + embedded_launcher: true, + }, + }, +} + python_library_host { name: "build_test_suites", srcs: [ "build_test_suites.py", "optimized_targets.py", + "test_mapping_module_retriever.py", + "build_context.py", ], } diff --git a/ci/build_context.py b/ci/build_context.py new file mode 100644 index 0000000000..cc48d53992 --- /dev/null +++ b/ci/build_context.py @@ -0,0 +1,64 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Container class for build context with utility functions.""" + +import re + + +class BuildContext: + + def __init__(self, build_context_dict: dict[str, any]): + self.enabled_build_features = set() + for opt in build_context_dict.get('enabledBuildFeatures', []): + self.enabled_build_features.add(opt.get('name')) + self.test_infos = set() + for test_info_dict in build_context_dict.get('testContext', dict()).get( + 'testInfos', [] + ): + self.test_infos.add(self.TestInfo(test_info_dict)) + + def build_target_used(self, target: str) -> bool: + return any(test.build_target_used(target) for test in self.test_infos) + + class TestInfo: + + _DOWNLOAD_OPTS = { + 'test-config-only-zip', + 'test-zip-file-filter', + 'extra-host-shared-lib-zip', + 'sandbox-tests-zips', + 'additional-files-filter', + 'cts-package-name', + } + + def __init__(self, test_info_dict: dict[str, any]): + self.is_test_mapping = False + self.test_mapping_test_groups = set() + self.file_download_options = set() + for opt in test_info_dict.get('extraOptions', []): + key = opt.get('key') + if key == 'test-mapping-test-group': + self.is_test_mapping = True + self.test_mapping_test_groups.update(opt.get('values', set())) + + if key in self._DOWNLOAD_OPTS: + self.file_download_options.update(opt.get('values', set())) + + def build_target_used(self, target: str) -> bool: + # For all of a targets' outputs, check if any of the regexes used by tests + # to download artifacts would match it. If any of them do then this target + # is necessary. + regex = r'\b(%s)\b' % re.escape(target) + return any(re.search(regex, opt) for opt in self.file_download_options) diff --git a/ci/build_metadata b/ci/build_metadata new file mode 100755 index 0000000000..cd011c8679 --- /dev/null +++ b/ci/build_metadata @@ -0,0 +1,28 @@ +#/bin/bash + +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ex + +export TARGET_PRODUCT=aosp_arm64 +export TARGET_RELEASE=trunk_staging +export TARGET_BUILD_VARIANT=eng + +TARGETS=( + all_teams + release_config_metadata +) + +build/soong/bin/m dist ${TARGETS[@]} diff --git a/ci/build_test_suites.py b/ci/build_test_suites.py index 6e1f88c36c..b8c4a385e0 100644 --- a/ci/build_test_suites.py +++ b/ci/build_test_suites.py @@ -23,11 +23,13 @@ import pathlib import subprocess import sys from typing import Callable +from build_context import BuildContext import optimized_targets REQUIRED_ENV_VARS = frozenset(['TARGET_PRODUCT', 'TARGET_RELEASE', 'TOP']) SOONG_UI_EXE_REL_PATH = 'build/soong/soong_ui.bash' +LOG_PATH = 'logs/build_test_suites.log' class Error(Exception): @@ -53,7 +55,7 @@ class BuildPlanner: def __init__( self, - build_context: dict[str, any], + build_context: BuildContext, args: argparse.Namespace, target_optimizations: dict[str, optimized_targets.OptimizedBuildTarget], ): @@ -63,12 +65,17 @@ class BuildPlanner: def create_build_plan(self): - if 'optimized_build' not in self.build_context['enabled_build_features']: + if 'optimized_build' not in self.build_context.enabled_build_features: return BuildPlan(set(self.args.extra_targets), set()) build_targets = set() - packaging_functions = set() + packaging_commands_getters = [] for target in self.args.extra_targets: + if self._unused_target_exclusion_enabled( + target + ) and not self.build_context.build_target_used(target): + continue + target_optimizer_getter = self.target_optimizations.get(target, None) if not target_optimizer_getter: build_targets.add(target) @@ -78,15 +85,23 @@ class BuildPlanner: target, self.build_context, self.args ) build_targets.update(target_optimizer.get_build_targets()) - packaging_functions.add(target_optimizer.package_outputs) + packaging_commands_getters.append( + target_optimizer.get_package_outputs_commands + ) - return BuildPlan(build_targets, packaging_functions) + return BuildPlan(build_targets, packaging_commands_getters) + + def _unused_target_exclusion_enabled(self, target: str) -> bool: + return ( + f'{target}_unused_exclusion' + in self.build_context.enabled_build_features + ) @dataclass(frozen=True) class BuildPlan: build_targets: set[str] - packaging_functions: set[Callable[..., None]] + packaging_commands_getters: list[Callable[[], list[list[str]]]] def build_test_suites(argv: list[str]) -> int: @@ -100,7 +115,7 @@ def build_test_suites(argv: list[str]) -> int: """ args = parse_args(argv) check_required_env() - build_context = load_build_context() + build_context = BuildContext(load_build_context()) build_planner = BuildPlanner( build_context, args, optimized_targets.OPTIMIZED_BUILD_TARGETS ) @@ -154,7 +169,7 @@ def load_build_context(): def empty_build_context(): - return {'enabled_build_features': []} + return {'enabledBuildFeatures': []} def execute_build_plan(build_plan: BuildPlan): @@ -168,8 +183,12 @@ def execute_build_plan(build_plan: BuildPlan): except subprocess.CalledProcessError as e: raise BuildFailureError(e.returncode) from e - for packaging_function in build_plan.packaging_functions: - packaging_function() + for packaging_commands_getter in build_plan.packaging_commands_getters: + try: + for packaging_command in packaging_commands_getter(): + run_command(packaging_command) + except subprocess.CalledProcessError as e: + raise BuildFailureError(e.returncode) from e def get_top() -> pathlib.Path: @@ -181,4 +200,12 @@ def run_command(args: list[str], stdout=None): def main(argv): + dist_dir = os.environ.get('DIST_DIR') + if dist_dir: + log_file = pathlib.Path(dist_dir) / LOG_PATH + logging.basicConfig( + level=logging.DEBUG, + format='%(asctime)s %(levelname)s %(message)s', + filename=log_file, + ) sys.exit(build_test_suites(argv)) diff --git a/ci/build_test_suites_test.py b/ci/build_test_suites_test.py index a9ff3fbf4d..2afaab7711 100644 --- a/ci/build_test_suites_test.py +++ b/ci/build_test_suites_test.py @@ -15,6 +15,7 @@ """Tests for build_test_suites.py""" import argparse +import functools from importlib import resources import json import multiprocessing @@ -31,6 +32,7 @@ import time from typing import Callable import unittest from unittest import mock +from build_context import BuildContext import build_test_suites import ci_test_lib import optimized_targets @@ -238,14 +240,21 @@ class BuildPlannerTest(unittest.TestCase): class TestOptimizedBuildTarget(optimized_targets.OptimizedBuildTarget): - def __init__(self, output_targets): + def __init__( + self, target, build_context, args, output_targets, packaging_commands + ): + super().__init__(target, build_context, args) self.output_targets = output_targets + self.packaging_commands = packaging_commands - def get_build_targets(self): + def get_build_targets_impl(self): return self.output_targets - def package_outputs(self): - return f'packaging {" ".join(self.output_targets)}' + def get_package_outputs_commands_impl(self): + return self.packaging_commands + + def get_enabled_flag(self): + return f'{self.target}_enabled' def test_build_optimization_off_builds_everything(self): build_targets = {'target_1', 'target_2'} @@ -267,14 +276,15 @@ class BuildPlannerTest(unittest.TestCase): build_plan = build_planner.create_build_plan() - self.assertEqual(len(build_plan.packaging_functions), 0) + for packaging_command in self.run_packaging_commands(build_plan): + self.assertEqual(len(packaging_command), 0) def test_build_optimization_on_optimizes_target(self): build_targets = {'target_1', 'target_2'} build_planner = self.create_build_planner( build_targets=build_targets, build_context=self.create_build_context( - enabled_build_features={self.get_target_flag('target_1')} + enabled_build_features=[{'name': self.get_target_flag('target_1')}] ), ) @@ -285,20 +295,19 @@ class BuildPlannerTest(unittest.TestCase): def test_build_optimization_on_packages_target(self): build_targets = {'target_1', 'target_2'} + optimized_target_name = self.get_optimized_target_name('target_1') + packaging_commands = [[f'packaging {optimized_target_name}']] build_planner = self.create_build_planner( build_targets=build_targets, build_context=self.create_build_context( - enabled_build_features={self.get_target_flag('target_1')} + enabled_build_features=[{'name': self.get_target_flag('target_1')}] ), + packaging_commands=packaging_commands, ) build_plan = build_planner.create_build_plan() - optimized_target_name = self.get_optimized_target_name('target_1') - self.assertIn( - f'packaging {optimized_target_name}', - self.run_packaging_functions(build_plan), - ) + self.assertIn(packaging_commands, self.run_packaging_commands(build_plan)) def test_individual_build_optimization_off_doesnt_optimize(self): build_targets = {'target_1', 'target_2'} @@ -312,26 +321,94 @@ class BuildPlannerTest(unittest.TestCase): def test_individual_build_optimization_off_doesnt_package(self): build_targets = {'target_1', 'target_2'} + packaging_commands = [['packaging command']] build_planner = self.create_build_planner( build_targets=build_targets, + packaging_commands=packaging_commands, + ) + + build_plan = build_planner.create_build_plan() + + for packaging_command in self.run_packaging_commands(build_plan): + self.assertEqual(len(packaging_command), 0) + + def test_target_output_used_target_built(self): + build_target = 'test_target' + build_planner = self.create_build_planner( + build_targets={build_target}, + build_context=self.create_build_context( + test_context=self.get_test_context(build_target), + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], + ), ) build_plan = build_planner.create_build_plan() - expected_packaging_function_outputs = {None, None} - self.assertSetEqual( - expected_packaging_function_outputs, - self.run_packaging_functions(build_plan), + self.assertSetEqual(build_plan.build_targets, {build_target}) + + def test_target_regex_used_target_built(self): + build_target = 'test_target' + test_context = self.get_test_context(build_target) + test_context['testInfos'][0]['extraOptions'] = [{ + 'key': 'additional-files-filter', + 'values': [f'.*{build_target}.*\.zip'], + }] + build_planner = self.create_build_planner( + build_targets={build_target}, + build_context=self.create_build_context( + test_context=test_context, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], + ), ) + build_plan = build_planner.create_build_plan() + + self.assertSetEqual(build_plan.build_targets, {build_target}) + + def test_target_output_not_used_target_not_built(self): + build_target = 'test_target' + test_context = self.get_test_context(build_target) + test_context['testInfos'][0]['extraOptions'] = [] + build_planner = self.create_build_planner( + build_targets={build_target}, + build_context=self.create_build_context( + test_context=test_context, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], + ), + ) + + build_plan = build_planner.create_build_plan() + + self.assertSetEqual(build_plan.build_targets, set()) + + def test_target_regex_matching_not_too_broad(self): + build_target = 'test_target' + test_context = self.get_test_context(build_target) + test_context['testInfos'][0]['extraOptions'] = [{ + 'key': 'additional-files-filter', + 'values': [f'.*a{build_target}.*\.zip'], + }] + build_planner = self.create_build_planner( + build_targets={build_target}, + build_context=self.create_build_context( + test_context=test_context, + enabled_build_features=[{'name': 'test_target_unused_exclusion'}], + ), + ) + + build_plan = build_planner.create_build_plan() + + self.assertSetEqual(build_plan.build_targets, set()) + def create_build_planner( self, build_targets: set[str], - build_context: dict[str, any] = None, + build_context: BuildContext = None, args: argparse.Namespace = None, target_optimizations: dict[ str, optimized_targets.OptimizedBuildTarget ] = None, + packaging_commands: list[list[str]] = [], ) -> build_test_suites.BuildPlanner: if not build_context: build_context = self.create_build_context() @@ -339,7 +416,9 @@ class BuildPlannerTest(unittest.TestCase): args = self.create_args(extra_build_targets=build_targets) if not target_optimizations: target_optimizations = self.create_target_optimizations( - build_context, build_targets + build_context, + build_targets, + packaging_commands, ) return build_test_suites.BuildPlanner( build_context, args, target_optimizations @@ -348,15 +427,17 @@ class BuildPlannerTest(unittest.TestCase): def create_build_context( self, optimized_build_enabled: bool = True, - enabled_build_features: set[str] = set(), + enabled_build_features: list[dict[str, str]] = [], test_context: dict[str, any] = {}, - ) -> dict[str, any]: - build_context = {} - build_context['enabled_build_features'] = enabled_build_features + ) -> BuildContext: + build_context_dict = {} + build_context_dict['enabledBuildFeatures'] = enabled_build_features if optimized_build_enabled: - build_context['enabled_build_features'].add('optimized_build') - build_context['test_context'] = test_context - return build_context + build_context_dict['enabledBuildFeatures'].append( + {'name': 'optimized_build'} + ) + build_context_dict['testContext'] = test_context + return BuildContext(build_context_dict) def create_args( self, extra_build_targets: set[str] = set() @@ -366,19 +447,17 @@ class BuildPlannerTest(unittest.TestCase): return parser.parse_args(extra_build_targets) def create_target_optimizations( - self, build_context: dict[str, any], build_targets: set[str] + self, + build_context: BuildContext, + build_targets: set[str], + packaging_commands: list[list[str]] = [], ): target_optimizations = dict() for target in build_targets: - target_optimizations[target] = ( - lambda target, build_context, args: optimized_targets.get_target_optimizer( - target, - self.get_target_flag(target), - build_context, - self.TestOptimizedBuildTarget( - {self.get_optimized_target_name(target)} - ), - ) + target_optimizations[target] = functools.partial( + self.TestOptimizedBuildTarget, + output_targets={self.get_optimized_target_name(target)}, + packaging_commands=packaging_commands, ) return target_optimizations @@ -389,14 +468,30 @@ class BuildPlannerTest(unittest.TestCase): def get_optimized_target_name(self, target: str): return f'{target}_optimized' - def run_packaging_functions( - self, build_plan: build_test_suites.BuildPlan - ) -> set[str]: - output = set() - for packaging_function in build_plan.packaging_functions: - output.add(packaging_function()) - - return output + def get_test_context(self, target: str): + return { + 'testInfos': [ + { + 'name': 'atp_test', + 'target': 'test_target', + 'branch': 'branch', + 'extraOptions': [{ + 'key': 'additional-files-filter', + 'values': [f'{target}.zip'], + }], + 'command': '/tf/command', + 'extraBuildTargets': [ + 'extra_build_target', + ], + }, + ], + } + + def run_packaging_commands(self, build_plan: build_test_suites.BuildPlan): + return [ + packaging_command_getter() + for packaging_command_getter in build_plan.packaging_commands_getters + ] def wait_until( diff --git a/ci/buildbot.py b/ci/buildbot.py new file mode 100644 index 0000000000..97097be598 --- /dev/null +++ b/ci/buildbot.py @@ -0,0 +1,43 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for interacting with buildbot, with a simulation in a local environment""" + +import os +import sys + +# Check that the script is running from the root of the tree. Prevents subtle +# errors later, and CI always runs from the root of the tree. +if not os.path.exists("build/make/ci/buildbot.py"): + raise Exception("CI script must be run from the root of the tree instead of: " + + os.getcwd()) + +# Check that we are using the hermetic interpreter +if "prebuilts/build-tools/" not in sys.executable: + raise Exception("CI script must be run using the hermetic interpreter from " + + "prebuilts/build-tools instead of: " + sys.executable) + + +def OutDir(): + "Get the out directory. Will create it if needed." + result = os.environ.get("OUT_DIR", "out") + os.makedirs(result, exist_ok=True) + return result + +def DistDir(): + "Get the dist directory. Will create it if needed." + result = os.environ.get("DIST_DIR", os.path.join(OutDir(), "dist")) + os.makedirs(result, exist_ok=True) + return result + diff --git a/ci/dump_product_config b/ci/dump_product_config new file mode 100755 index 0000000000..77b51dd281 --- /dev/null +++ b/ci/dump_product_config @@ -0,0 +1,353 @@ +#!prebuilts/build-tools/linux-x86/bin/py3-cmd -B + +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Script to collect all of the make variables from all product config combos. + +This script must be run from the root of the source tree. + +See GetArgs() below or run dump_product_config for more information. +""" + +import argparse +import asyncio +import contextlib +import csv +import dataclasses +import json +import multiprocessing +import os +import subprocess +import sys +import time +from typing import List, Dict, Tuple, Optional + +import buildbot + +# We have some BIG variables +csv.field_size_limit(sys.maxsize) + + +class DataclassJSONEncoder(json.JSONEncoder): + """JSONEncoder for our custom types.""" + def default(self, o): + if dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + return super().default(o) + + +def GetProducts(): + """Get the all of the available TARGET_PRODUCT values.""" + try: + stdout = subprocess.check_output(["build/soong/bin/list_products"], text=True) + except subprocess.CalledProcessError: + sys.exit(1) + return [s.strip() for s in stdout.splitlines() if s.strip()] + + +def GetReleases(product): + """For a given product, get the release configs available to it.""" + if True: + # Hard code the list + mainline_products = [ + "module_arm", + "module_x86", + "module_arm64", + "module_riscv64", + "module_x86_64", + "module_arm64only", + "module_x86_64only", + ] + if product in mainline_products: + return ["trunk_staging", "trunk", "mainline"] + else: + return ["trunk_staging", "trunk", "next"] + else: + # Get it from the build system + try: + stdout = subprocess.check_output(["build/soong/bin/list_releases", product], text=True) + except subprocess.CalledProcessError: + sys.exit(1) + return [s.strip() for s in stdout.splitlines() if s.strip()] + + +def GenerateAllLunchTargets(): + """Generate the full list of lunch targets.""" + for product in GetProducts(): + for release in GetReleases(product): + for variant in ["user", "userdebug", "eng"]: + yield (product, release, variant) + + +async def ParallelExec(parallelism, tasks): + ''' + ParallelExec takes a parallelism number, and an iterator of tasks to run. + Then it will run all the tasks, but a maximum of parallelism will be run at + any given time. The tasks must be async functions that accept one argument, + which will be an integer id of the worker that they're running on. + ''' + tasks = iter(tasks) + + overall_start = time.monotonic() + # lists so they can be modified from the inner function + total_duration = [0] + count = [0] + async def dispatch(worker): + while True: + try: + task = next(tasks) + item_start = time.monotonic() + await task(worker) + now = time.monotonic() + item_duration = now - item_start + count[0] += 1 + total_duration[0] += item_duration + sys.stderr.write(f"Timing: Items processed: {count[0]}, Wall time: {now-overall_start:0.1f} sec, Throughput: {(now-overall_start)/count[0]:0.3f} sec per item, Average duration: {total_duration[0]/count[0]:0.1f} sec\n") + except StopIteration: + return + + await asyncio.gather(*[dispatch(worker) for worker in range(parallelism)]) + + +async def DumpProductConfigs(out, generator, out_dir): + """Collects all of the product config data and store it in file.""" + # Write the outer json list by hand so we can stream it + out.write("[") + try: + first_result = [True] # a list so it can be modified from the inner function + def run(lunch): + async def curried(worker): + sys.stderr.write(f"running: {'-'.join(lunch)}\n") + result = await DumpOneProductConfig(lunch, os.path.join(out_dir, f"lunchable_{worker}")) + if first_result[0]: + out.write("\n") + first_result[0] = False + else: + out.write(",\n") + result.dumpToFile(out) + sys.stderr.write(f"finished: {'-'.join(lunch)}\n") + return curried + + await ParallelExec(multiprocessing.cpu_count(), (run(lunch) for lunch in generator)) + finally: + # Close the json regardless of how we exit + out.write("\n]\n") + + +@dataclasses.dataclass(frozen=True) +class Variable: + """A variable name, value and where it was set.""" + name: str + value: str + location: str + + +@dataclasses.dataclass(frozen=True) +class ProductResult: + product: str + release: str + variant: str + board_includes: List[str] + product_includes: Dict[str, List[str]] + product_graph: List[Tuple[str, str]] + board_vars: List[Variable] + product_vars: List[Variable] + + def dumpToFile(self, f): + json.dump(self, f, sort_keys=True, indent=2, cls=DataclassJSONEncoder) + + +@dataclasses.dataclass(frozen=True) +class ProductError: + product: str + release: str + variant: str + error: str + + def dumpToFile(self, f): + json.dump(self, f, sort_keys=True, indent=2, cls=DataclassJSONEncoder) + + +def NormalizeInheritGraph(lists): + """Flatten the inheritance graph to a simple list for easier querying.""" + result = set() + for item in lists: + for i in range(len(item)): + result.add((item[i+1] if i < len(item)-1 else "", item[i])) + return sorted(list(result)) + + +def ParseDump(lunch, filename) -> ProductResult: + """Parses the csv and returns a tuple of the data.""" + def diff(initial, final): + return [after for after in final.values() if + initial.get(after.name, Variable(after.name, "", "<unset>")).value != after.value] + product_initial = {} + product_final = {} + board_initial = {} + board_final = {} + inherit_product = [] # The stack of inherit-product calls + product_includes = {} # Other files included by each of the properly imported files + board_includes = [] # Files included by boardconfig + with open(filename) as f: + phase = "" + for line in csv.reader(f): + if line[0] == "phase": + phase = line[1] + elif line[0] == "val": + # TOOD: We should skip these somewhere else. + if line[3].startswith("_ALL_RELEASE_FLAGS"): + continue + if line[3].startswith("PRODUCTS."): + continue + if phase == "PRODUCTS": + if line[2] == "initial": + product_initial[line[3]] = Variable(line[3], line[4], line[5]) + if phase == "PRODUCT-EXPAND": + if line[2] == "final": + product_final[line[3]] = Variable(line[3], line[4], line[5]) + if phase == "BOARD": + if line[2] == "initial": + board_initial[line[3]] = Variable(line[3], line[4], line[5]) + if line[2] == "final": + board_final[line[3]] = Variable(line[3], line[4], line[5]) + elif line[0] == "imported": + imports = [s.strip() for s in line[1].split()] + if imports: + inherit_product.append(imports) + inc = [s.strip() for s in line[2].split()] + for f in inc: + product_includes.setdefault(imports[0], []).append(f) + elif line[0] == "board_config_files": + board_includes += [s.strip() for s in line[1].split()] + return ProductResult( + product = lunch[0], + release = lunch[1], + variant = lunch[2], + product_vars = diff(product_initial, product_final), + board_vars = diff(board_initial, board_final), + product_graph = NormalizeInheritGraph(inherit_product), + product_includes = product_includes, + board_includes = board_includes + ) + + +async def DumpOneProductConfig(lunch, out_dir) -> ProductResult | ProductError: + """Print a single config's lunch info to stdout.""" + product, release, variant = lunch + + dumpconfig_file = os.path.join(out_dir, f"{product}-{release}-{variant}.csv") + + # Run get_build_var to bootstrap soong_ui for this target + env = dict(os.environ) + env["TARGET_PRODUCT"] = product + env["TARGET_RELEASE"] = release + env["TARGET_BUILD_VARIANT"] = variant + env["OUT_DIR"] = out_dir + process = await asyncio.create_subprocess_exec( + "build/soong/bin/get_build_var", + "TARGET_PRODUCT", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env + ) + stdout, _ = await process.communicate() + stdout = stdout.decode() + + if process.returncode != 0: + return ProductError( + product = product, + release = release, + variant = variant, + error = stdout + ) + else: + # Run kati to extract the data + process = await asyncio.create_subprocess_exec( + "prebuilts/build-tools/linux-x86/bin/ckati", + "-f", + "build/make/core/dumpconfig.mk", + f"TARGET_PRODUCT={product}", + f"TARGET_RELEASE={release}", + f"TARGET_BUILD_VARIANT={variant}", + f"DUMPCONFIG_FILE={dumpconfig_file}", + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + env=env + ) + stdout, _ = await process.communicate() + if process.returncode != 0: + stdout = stdout.decode() + return ProductError( + product = product, + release = release, + variant = variant, + error = stdout + ) + else: + # Parse and record the output + return ParseDump(lunch, dumpconfig_file) + + +def GetArgs(): + """Parse command line arguments.""" + parser = argparse.ArgumentParser( + description="Collect all of the make variables from product config.", + epilog="NOTE: This script must be run from the root of the source tree.") + parser.add_argument("--lunch", nargs="*") + parser.add_argument("--dist", action="store_true") + + return parser.parse_args() + + +async def main(): + args = GetArgs() + + out_dir = buildbot.OutDir() + + if args.dist: + cm = open(os.path.join(buildbot.DistDir(), "all_product_config.json"), "w") + else: + cm = contextlib.nullcontext(sys.stdout) + + + with cm as out: + if args.lunch: + lunches = [lunch.split("-") for lunch in args.lunch] + fail = False + for i in range(len(lunches)): + if len(lunches[i]) != 3: + sys.stderr.write(f"Malformed lunch targets: {args.lunch[i]}\n") + fail = True + if fail: + sys.exit(1) + if len(lunches) == 1: + result = await DumpOneProductConfig(lunches[0], out_dir) + result.dumpToFile(out) + out.write("\n") + else: + await DumpProductConfigs(out, lunches, out_dir) + else: + # All configs mode. This will exec single config mode in parallel + # for each lunch combo. Write output to $DIST_DIR. + await DumpProductConfigs(out, GenerateAllLunchTargets(), out_dir) + + +if __name__ == "__main__": + asyncio.run(main()) + + +# vim: set syntax=python ts=4 sw=4 sts=4: + diff --git a/ci/optimized_targets.py b/ci/optimized_targets.py index 224c8c0221..688bdd8370 100644 --- a/ci/optimized_targets.py +++ b/ci/optimized_targets.py @@ -14,6 +14,16 @@ # limitations under the License. from abc import ABC +import argparse +import functools +import json +import logging +import os +import pathlib +import subprocess + +from build_context import BuildContext +import test_mapping_module_retriever class OptimizedBuildTarget(ABC): @@ -24,15 +34,132 @@ class OptimizedBuildTarget(ABC): build. """ - def __init__(self, build_context, args): + _SOONG_UI_BASH_PATH = 'build/soong/soong_ui.bash' + _PREBUILT_SOONG_ZIP_PATH = 'prebuilts/build-tools/linux-x86/bin/soong_zip' + + def __init__( + self, + target: str, + build_context: BuildContext, + args: argparse.Namespace, + ): + self.target = target self.build_context = build_context self.args = args - def get_build_targets(self): - pass + def get_build_targets(self) -> set[str]: + features = self.build_context.enabled_build_features + if self.get_enabled_flag() in features: + self.modules_to_build = self.get_build_targets_impl() + return self.modules_to_build + + self.modules_to_build = {self.target} + return {self.target} + + def get_package_outputs_commands(self) -> list[list[str]]: + features = self.build_context.enabled_build_features + if self.get_enabled_flag() in features: + return self.get_package_outputs_commands_impl() + + return [] + + def get_package_outputs_commands_impl(self) -> list[list[str]]: + raise NotImplementedError( + 'get_package_outputs_commands_impl not implemented in' + f' {type(self).__name__}' + ) - def package_outputs(self): - pass + def get_enabled_flag(self): + raise NotImplementedError( + f'get_enabled_flag not implemented in {type(self).__name__}' + ) + + def get_build_targets_impl(self) -> set[str]: + raise NotImplementedError( + f'get_build_targets_impl not implemented in {type(self).__name__}' + ) + + def _generate_zip_options_for_items( + self, + prefix: str = '', + relative_root: str = '', + list_files: list[str] | None = None, + files: list[str] | None = None, + directories: list[str] | None = None, + ) -> list[str]: + if not list_files and not files and not directories: + raise RuntimeError( + f'No items specified to be added to zip! Prefix: {prefix}, Relative' + f' root: {relative_root}' + ) + command_segment = [] + # These are all soong_zip options so consult soong_zip --help for specifics. + if prefix: + command_segment.append('-P') + command_segment.append(prefix) + if relative_root: + command_segment.append('-C') + command_segment.append(relative_root) + if list_files: + for list_file in list_files: + command_segment.append('-l') + command_segment.append(list_file) + if files: + for file in files: + command_segment.append('-f') + command_segment.append(file) + if directories: + for directory in directories: + command_segment.append('-D') + command_segment.append(directory) + + return command_segment + + def _query_soong_vars( + self, src_top: pathlib.Path, soong_vars: list[str] + ) -> dict[str, str]: + process_result = subprocess.run( + args=[ + f'{src_top / self._SOONG_UI_BASH_PATH}', + '--dumpvars-mode', + f'--abs-vars={" ".join(soong_vars)}', + ], + env=os.environ, + check=False, + capture_output=True, + text=True, + ) + if not process_result.returncode == 0: + logging.error('soong dumpvars command failed! stderr:') + logging.error(process_result.stderr) + raise RuntimeError('Soong dumpvars failed! See log for stderr.') + + if not process_result.stdout: + raise RuntimeError( + 'Necessary soong variables ' + soong_vars + ' not found.' + ) + + try: + return { + line.split('=')[0]: line.split('=')[1].strip("'") + for line in process_result.stdout.strip().split('\n') + } + except IndexError as e: + raise RuntimeError( + 'Error parsing soong dumpvars output! See output here:' + f' {process_result.stdout}', + e, + ) + + def _base_zip_command( + self, src_top: pathlib.Path, dist_dir: pathlib.Path, name: str + ) -> list[str]: + return [ + f'{src_top / self._PREBUILT_SOONG_ZIP_PATH }', + '-d', + '-o', + f'{dist_dir / name}', + ] class NullOptimizer(OptimizedBuildTarget): @@ -48,22 +175,308 @@ class NullOptimizer(OptimizedBuildTarget): def get_build_targets(self): return {self.target} - def package_outputs(self): - pass + def get_package_outputs_commands(self): + return [] + + +class ChangeInfo: + + def __init__(self, change_info_file_path): + try: + with open(change_info_file_path) as change_info_file: + change_info_contents = json.load(change_info_file) + except json.decoder.JSONDecodeError: + logging.error(f'Failed to load CHANGE_INFO: {change_info_file_path}') + raise + + self._change_info_contents = change_info_contents + + def find_changed_files(self) -> set[str]: + changed_files = set() + + for change in self._change_info_contents['changes']: + project_path = change.get('projectPath') + '/' + + for revision in change.get('revisions'): + for file_info in revision.get('fileInfos'): + changed_files.add(project_path + file_info.get('path')) + + return changed_files + + +class GeneralTestsOptimizer(OptimizedBuildTarget): + """general-tests optimizer + + This optimizer reads in the list of changed files from the file located in + env[CHANGE_INFO] and uses this list alongside the normal TEST MAPPING logic to + determine what test mapping modules will run for the given changes. It then + builds those modules and packages them in the same way general-tests.zip is + normally built. + """ + + # List of modules that are built alongside general-tests as dependencies. + _REQUIRED_MODULES = frozenset([ + 'cts-tradefed', + 'vts-tradefed', + 'compatibility-host-util', + 'general-tests-shared-libs', + ]) + + def get_build_targets_impl(self) -> set[str]: + change_info_file_path = os.environ.get('CHANGE_INFO') + if not change_info_file_path: + logging.info( + 'No CHANGE_INFO env var found, general-tests optimization disabled.' + ) + return {'general-tests'} + + test_infos = self.build_context.test_infos + test_mapping_test_groups = set() + for test_info in test_infos: + is_test_mapping = test_info.is_test_mapping + current_test_mapping_test_groups = test_info.test_mapping_test_groups + uses_general_tests = test_info.build_target_used('general-tests') + + if uses_general_tests and not is_test_mapping: + logging.info( + 'Test uses general-tests.zip but is not test-mapping, general-tests' + ' optimization disabled.' + ) + return {'general-tests'} + + if is_test_mapping: + test_mapping_test_groups.update(current_test_mapping_test_groups) + + change_info = ChangeInfo(change_info_file_path) + changed_files = change_info.find_changed_files() + + test_mappings = test_mapping_module_retriever.GetTestMappings( + changed_files, set() + ) + + modules_to_build = set(self._REQUIRED_MODULES) + + modules_to_build.update( + test_mapping_module_retriever.FindAffectedModules( + test_mappings, changed_files, test_mapping_test_groups + ) + ) + + return modules_to_build + + def get_package_outputs_commands_impl(self): + src_top = pathlib.Path(os.environ.get('TOP', os.getcwd())) + dist_dir = pathlib.Path(os.environ.get('DIST_DIR')) + + soong_vars = self._query_soong_vars( + src_top, + [ + 'HOST_OUT_TESTCASES', + 'TARGET_OUT_TESTCASES', + 'PRODUCT_OUT', + 'SOONG_HOST_OUT', + 'HOST_OUT', + ], + ) + host_out_testcases = pathlib.Path(soong_vars.get('HOST_OUT_TESTCASES')) + target_out_testcases = pathlib.Path(soong_vars.get('TARGET_OUT_TESTCASES')) + product_out = pathlib.Path(soong_vars.get('PRODUCT_OUT')) + soong_host_out = pathlib.Path(soong_vars.get('SOONG_HOST_OUT')) + host_out = pathlib.Path(soong_vars.get('HOST_OUT')) + + host_paths = [] + target_paths = [] + host_config_files = [] + target_config_files = [] + for module in self.modules_to_build: + # The required modules are handled separately, no need to package. + if module in self._REQUIRED_MODULES: + continue + + host_path = host_out_testcases / module + if os.path.exists(host_path): + host_paths.append(host_path) + self._collect_config_files(src_top, host_path, host_config_files) + + target_path = target_out_testcases / module + if os.path.exists(target_path): + target_paths.append(target_path) + self._collect_config_files(src_top, target_path, target_config_files) + + if not os.path.exists(host_path) and not os.path.exists(target_path): + logging.info(f'No host or target build outputs found for {module}.') + + zip_commands = [] + + zip_commands.extend( + self._get_zip_test_configs_zips_commands( + src_top, + dist_dir, + host_out, + product_out, + host_config_files, + target_config_files, + ) + ) + + zip_command = self._base_zip_command(src_top, dist_dir, 'general-tests.zip') + + # Add host testcases. + if host_paths: + zip_command.extend( + self._generate_zip_options_for_items( + prefix='host', + relative_root=f'{src_top / soong_host_out}', + directories=host_paths, + ) + ) + + # Add target testcases. + if target_paths: + zip_command.extend( + self._generate_zip_options_for_items( + prefix='target', + relative_root=f'{src_top / product_out}', + directories=target_paths, + ) + ) + + # TODO(lucafarsi): Push this logic into a general-tests-minimal build command + # Add necessary tools. These are also hardcoded in general-tests.mk. + framework_path = soong_host_out / 'framework' + + zip_command.extend( + self._generate_zip_options_for_items( + prefix='host/tools', + relative_root=str(framework_path), + files=[ + f"{framework_path / 'cts-tradefed.jar'}", + f"{framework_path / 'compatibility-host-util.jar'}", + f"{framework_path / 'vts-tradefed.jar'}", + ], + ) + ) + + zip_commands.append(zip_command) + return zip_commands + + def _collect_config_files( + self, + src_top: pathlib.Path, + root_dir: pathlib.Path, + config_files: list[str], + ): + for root, dirs, files in os.walk(src_top / root_dir): + for file in files: + if file.endswith('.config'): + config_files.append(root_dir / file) + + def _get_zip_test_configs_zips_commands( + self, + src_top: pathlib.Path, + dist_dir: pathlib.Path, + host_out: pathlib.Path, + product_out: pathlib.Path, + host_config_files: list[str], + target_config_files: list[str], + ) -> tuple[list[str], list[str]]: + """Generate general-tests_configs.zip and general-tests_list.zip. + + general-tests_configs.zip contains all of the .config files that were + built and general-tests_list.zip contains a text file which lists + all of the .config files that are in general-tests_configs.zip. + + general-tests_configs.zip is organized as follows: + / + host/ + testcases/ + test_1.config + test_2.config + ... + target/ + testcases/ + test_1.config + test_2.config + ... + + So the process is we write out the paths to all the host config files into + one + file and all the paths to the target config files in another. We also write + the paths to all the config files into a third file to use for + general-tests_list.zip. + + Args: + dist_dir: dist directory. + host_out: host out directory. + product_out: product out directory. + host_config_files: list of all host config files. + target_config_files: list of all target config files. + + Returns: + The commands to generate general-tests_configs.zip and + general-tests_list.zip + """ + with open( + f"{host_out / 'host_general-tests_list'}", 'w' + ) as host_list_file, open( + f"{product_out / 'target_general-tests_list'}", 'w' + ) as target_list_file, open( + f"{host_out / 'general-tests_list'}", 'w' + ) as list_file: + + for config_file in host_config_files: + host_list_file.write(f'{config_file}' + '\n') + list_file.write('host/' + os.path.relpath(config_file, host_out) + '\n') + + for config_file in target_config_files: + target_list_file.write(f'{config_file}' + '\n') + list_file.write( + 'target/' + os.path.relpath(config_file, product_out) + '\n' + ) + + zip_commands = [] + + tests_config_zip_command = self._base_zip_command( + src_top, dist_dir, 'general-tests_configs.zip' + ) + tests_config_zip_command.extend( + self._generate_zip_options_for_items( + prefix='host', + relative_root=str(host_out), + list_files=[f"{host_out / 'host_general-tests_list'}"], + ) + ) + + tests_config_zip_command.extend( + self._generate_zip_options_for_items( + prefix='target', + relative_root=str(product_out), + list_files=[f"{product_out / 'target_general-tests_list'}"], + ), + ) + + zip_commands.append(tests_config_zip_command) + + tests_list_zip_command = self._base_zip_command( + src_top, dist_dir, 'general-tests_list.zip' + ) + tests_list_zip_command.extend( + self._generate_zip_options_for_items( + relative_root=str(host_out), + files=[f"{host_out / 'general-tests_list'}"], + ) + ) + zip_commands.append(tests_list_zip_command) + return zip_commands -def get_target_optimizer(target, enabled_flag, build_context, optimizer): - if enabled_flag in build_context['enabled_build_features']: - return optimizer + def get_enabled_flag(self): + return 'general_tests_optimized' - return NullOptimizer(target) + @classmethod + def get_optimized_targets(cls) -> dict[str, OptimizedBuildTarget]: + return {'general-tests': functools.partial(cls)} -# To be written as: -# 'target': lambda target, build_context, args: get_target_optimizer( -# target, -# 'target_enabled_flag', -# build_context, -# TargetOptimizer(build_context, args), -# ) -OPTIMIZED_BUILD_TARGETS = dict() +OPTIMIZED_BUILD_TARGETS = {} +OPTIMIZED_BUILD_TARGETS.update(GeneralTestsOptimizer.get_optimized_targets()) diff --git a/ci/optimized_targets_test.py b/ci/optimized_targets_test.py new file mode 100644 index 0000000000..0b0c0ec087 --- /dev/null +++ b/ci/optimized_targets_test.py @@ -0,0 +1,350 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for optimized_targets.py""" + +import json +import logging +import os +import pathlib +import re +import subprocess +import textwrap +import unittest +from unittest import mock +from build_context import BuildContext +import optimized_targets +from pyfakefs import fake_filesystem_unittest + + +class GeneralTestsOptimizerTest(fake_filesystem_unittest.TestCase): + + def setUp(self): + self.setUpPyfakefs() + + os_environ_patcher = mock.patch.dict('os.environ', {}) + self.addCleanup(os_environ_patcher.stop) + self.mock_os_environ = os_environ_patcher.start() + + self._setup_working_build_env() + self._write_change_info_file() + test_mapping_dir = pathlib.Path('/project/path/file/path') + test_mapping_dir.mkdir(parents=True) + self._write_test_mapping_file() + + def _setup_working_build_env(self): + self.change_info_file = pathlib.Path('/tmp/change_info') + self._write_soong_ui_file() + self._host_out_testcases = pathlib.Path('/tmp/top/host_out_testcases') + self._host_out_testcases.mkdir(parents=True) + self._target_out_testcases = pathlib.Path('/tmp/top/target_out_testcases') + self._target_out_testcases.mkdir(parents=True) + self._product_out = pathlib.Path('/tmp/top/product_out') + self._product_out.mkdir(parents=True) + self._soong_host_out = pathlib.Path('/tmp/top/soong_host_out') + self._soong_host_out.mkdir(parents=True) + self._host_out = pathlib.Path('/tmp/top/host_out') + self._host_out.mkdir(parents=True) + + self._dist_dir = pathlib.Path('/tmp/top/out/dist') + self._dist_dir.mkdir(parents=True) + + self.mock_os_environ.update({ + 'CHANGE_INFO': str(self.change_info_file), + 'TOP': '/tmp/top', + 'DIST_DIR': '/tmp/top/out/dist', + }) + + def _write_soong_ui_file(self): + soong_path = pathlib.Path('/tmp/top/build/soong') + soong_path.mkdir(parents=True) + with open(os.path.join(soong_path, 'soong_ui.bash'), 'w') as f: + f.write(""" + #/bin/bash + echo HOST_OUT_TESTCASES='/tmp/top/host_out_testcases' + echo TARGET_OUT_TESTCASES='/tmp/top/target_out_testcases' + echo PRODUCT_OUT='/tmp/top/product_out' + echo SOONG_HOST_OUT='/tmp/top/soong_host_out' + echo HOST_OUT='/tmp/top/host_out' + """) + os.chmod(os.path.join(soong_path, 'soong_ui.bash'), 0o666) + + def _write_change_info_file(self): + change_info_contents = { + 'changes': [{ + 'projectPath': '/project/path', + 'revisions': [{ + 'fileInfos': [{ + 'path': 'file/path/file_name', + }], + }], + }] + } + + with open(self.change_info_file, 'w') as f: + json.dump(change_info_contents, f) + + def _write_test_mapping_file(self): + test_mapping_contents = { + 'test-mapping-group': [ + { + 'name': 'test_mapping_module', + }, + ], + } + + with open('/project/path/file/path/TEST_MAPPING', 'w') as f: + json.dump(test_mapping_contents, f) + + def test_general_tests_optimized(self): + optimizer = self._create_general_tests_optimizer() + + build_targets = optimizer.get_build_targets() + + expected_build_targets = set( + optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES + ) + expected_build_targets.add('test_mapping_module') + + self.assertSetEqual(build_targets, expected_build_targets) + + def test_no_change_info_no_optimization(self): + del os.environ['CHANGE_INFO'] + + optimizer = self._create_general_tests_optimizer() + + build_targets = optimizer.get_build_targets() + + self.assertSetEqual(build_targets, {'general-tests'}) + + def test_mapping_groups_unused_module_not_built(self): + test_context = self._create_test_context() + test_context['testInfos'][0]['extraOptions'] = [ + { + 'key': 'additional-files-filter', + 'values': ['general-tests.zip'], + }, + { + 'key': 'test-mapping-test-group', + 'values': ['unused-test-mapping-group'], + }, + ] + optimizer = self._create_general_tests_optimizer( + build_context=self._create_build_context(test_context=test_context) + ) + + build_targets = optimizer.get_build_targets() + + expected_build_targets = set( + optimized_targets.GeneralTestsOptimizer._REQUIRED_MODULES + ) + self.assertSetEqual(build_targets, expected_build_targets) + + def test_general_tests_used_by_non_test_mapping_test_no_optimization(self): + test_context = self._create_test_context() + test_context['testInfos'][0]['extraOptions'] = [{ + 'key': 'additional-files-filter', + 'values': ['general-tests.zip'], + }] + optimizer = self._create_general_tests_optimizer( + build_context=self._create_build_context(test_context=test_context) + ) + + build_targets = optimizer.get_build_targets() + + self.assertSetEqual(build_targets, {'general-tests'}) + + def test_malformed_change_info_raises(self): + with open(self.change_info_file, 'w') as f: + f.write('not change info') + + optimizer = self._create_general_tests_optimizer() + + with self.assertRaises(json.decoder.JSONDecodeError): + build_targets = optimizer.get_build_targets() + + def test_malformed_test_mapping_raises(self): + with open('/project/path/file/path/TEST_MAPPING', 'w') as f: + f.write('not test mapping') + + optimizer = self._create_general_tests_optimizer() + + with self.assertRaises(json.decoder.JSONDecodeError): + build_targets = optimizer.get_build_targets() + + @mock.patch('subprocess.run') + def test_packaging_outputs_success(self, subprocess_run): + subprocess_run.return_value = self._get_soong_vars_output() + optimizer = self._create_general_tests_optimizer() + self._set_up_build_outputs(['test_mapping_module']) + + targets = optimizer.get_build_targets() + package_commands = optimizer.get_package_outputs_commands() + + self._verify_soong_zip_commands(package_commands, ['test_mapping_module']) + + @mock.patch('subprocess.run') + def test_get_soong_dumpvars_fails_raises(self, subprocess_run): + subprocess_run.return_value = self._get_soong_vars_output(return_code=-1) + optimizer = self._create_general_tests_optimizer() + self._set_up_build_outputs(['test_mapping_module']) + + targets = optimizer.get_build_targets() + + with self.assertRaisesRegex(RuntimeError, 'Soong dumpvars failed!'): + package_commands = optimizer.get_package_outputs_commands() + + @mock.patch('subprocess.run') + def test_get_soong_dumpvars_bad_output_raises(self, subprocess_run): + subprocess_run.return_value = self._get_soong_vars_output( + stdout='This output is bad' + ) + optimizer = self._create_general_tests_optimizer() + self._set_up_build_outputs(['test_mapping_module']) + + targets = optimizer.get_build_targets() + + with self.assertRaisesRegex( + RuntimeError, 'Error parsing soong dumpvars output' + ): + package_commands = optimizer.get_package_outputs_commands() + + def _create_general_tests_optimizer(self, build_context: BuildContext = None): + if not build_context: + build_context = self._create_build_context() + return optimized_targets.GeneralTestsOptimizer( + 'general-tests', build_context, None + ) + + def _create_build_context( + self, + general_tests_optimized: bool = True, + test_context: dict[str, any] = None, + ) -> BuildContext: + if not test_context: + test_context = self._create_test_context() + build_context_dict = {} + build_context_dict['enabledBuildFeatures'] = [{'name': 'optimized_build'}] + if general_tests_optimized: + build_context_dict['enabledBuildFeatures'].append( + {'name': 'general_tests_optimized'} + ) + build_context_dict['testContext'] = test_context + return BuildContext(build_context_dict) + + def _create_test_context(self): + return { + 'testInfos': [ + { + 'name': 'atp_test', + 'target': 'test_target', + 'branch': 'branch', + 'extraOptions': [ + { + 'key': 'additional-files-filter', + 'values': ['general-tests.zip'], + }, + { + 'key': 'test-mapping-test-group', + 'values': ['test-mapping-group'], + }, + ], + 'command': '/tf/command', + 'extraBuildTargets': [ + 'extra_build_target', + ], + }, + ], + } + + def _get_soong_vars_output( + self, return_code: int = 0, stdout: str = '' + ) -> subprocess.CompletedProcess: + return_value = subprocess.CompletedProcess(args=[], returncode=return_code) + if not stdout: + stdout = textwrap.dedent(f"""\ + HOST_OUT_TESTCASES='{self._host_out_testcases}' + TARGET_OUT_TESTCASES='{self._target_out_testcases}' + PRODUCT_OUT='{self._product_out}' + SOONG_HOST_OUT='{self._soong_host_out}' + HOST_OUT='{self._host_out}'""") + + return_value.stdout = stdout + return return_value + + def _set_up_build_outputs(self, targets: list[str]): + for target in targets: + host_dir = self._host_out_testcases / target + host_dir.mkdir() + (host_dir / f'{target}.config').touch() + (host_dir / f'test_file').touch() + + target_dir = self._target_out_testcases / target + target_dir.mkdir() + (target_dir / f'{target}.config').touch() + (target_dir / f'test_file').touch() + + def _verify_soong_zip_commands(self, commands: list[str], targets: list[str]): + """Verify the structure of the zip commands. + + Zip commands have to start with the soong_zip binary path, then are followed + by a couple of options and the name of the file being zipped. Depending on + which zip we are creating look for a few essential items being added in + those zips. + + Args: + commands: list of command lists + targets: list of targets expected to be in general-tests.zip + """ + for command in commands: + self.assertEqual( + '/tmp/top/prebuilts/build-tools/linux-x86/bin/soong_zip', + command[0], + ) + self.assertEqual('-d', command[1]) + self.assertEqual('-o', command[2]) + match (command[3]): + case '/tmp/top/out/dist/general-tests_configs.zip': + self.assertIn(f'{self._host_out}/host_general-tests_list', command) + self.assertIn( + f'{self._product_out}/target_general-tests_list', command + ) + return + case '/tmp/top/out/dist/general-tests_list.zip': + self.assertIn('-f', command) + self.assertIn(f'{self._host_out}/general-tests_list', command) + return + case '/tmp/top/out/dist/general-tests.zip': + for target in targets: + self.assertIn(f'{self._host_out_testcases}/{target}', command) + self.assertIn(f'{self._target_out_testcases}/{target}', command) + self.assertIn( + f'{self._soong_host_out}/framework/cts-tradefed.jar', command + ) + self.assertIn( + f'{self._soong_host_out}/framework/compatibility-host-util.jar', + command, + ) + self.assertIn( + f'{self._soong_host_out}/framework/vts-tradefed.jar', command + ) + return + case _: + self.fail(f'malformed command: {command}') + + +if __name__ == '__main__': + # Setup logging to be silent so unit tests can pass through TF. + logging.disable(logging.ERROR) + unittest.main() diff --git a/ci/test_mapping_module_retriever.py b/ci/test_mapping_module_retriever.py index d2c13c0e7d..c93cdd5953 100644 --- a/ci/test_mapping_module_retriever.py +++ b/ci/test_mapping_module_retriever.py @@ -17,11 +17,13 @@ Simple parsing code to scan test_mapping files and determine which modules are needed to build for the given list of changed files. TODO(lucafarsi): Deduplicate from artifact_helper.py """ +# TODO(lucafarsi): Share this logic with the original logic in +# test_mapping_test_retriever.py -from typing import Any, Dict, Set, Text import json import os import re +from typing import Any # Regex to extra test name from the path of test config file. TEST_NAME_REGEX = r'(?:^|.*/)([^/]+)\.config' @@ -39,7 +41,7 @@ TEST_MAPPING = 'TEST_MAPPING' _COMMENTS_RE = re.compile(r'(\"(?:[^\"\\]|\\.)*\"|(?=//))(?://.*)?') -def FilterComments(test_mapping_file: Text) -> Text: +def FilterComments(test_mapping_file: str) -> str: """Remove comments in TEST_MAPPING file to valid format. Only '//' is regarded as comments. @@ -52,8 +54,8 @@ def FilterComments(test_mapping_file: Text) -> Text: """ return re.sub(_COMMENTS_RE, r'\1', test_mapping_file) -def GetTestMappings(paths: Set[Text], - checked_paths: Set[Text]) -> Dict[Text, Dict[Text, Any]]: +def GetTestMappings(paths: set[str], + checked_paths: set[str]) -> dict[str, dict[str, Any]]: """Get the affected TEST_MAPPING files. TEST_MAPPING files in source code are packaged into a build artifact @@ -123,3 +125,68 @@ def GetTestMappings(paths: Set[Text], pass return test_mappings + + +def FindAffectedModules( + test_mappings: dict[str, Any], + changed_files: set[str], + test_mapping_test_groups: set[str], +) -> set[str]: + """Find affected test modules. + + Find the affected set of test modules that would run in a test mapping run based on the given test mappings, changed files, and test mapping test group. + + Args: + test_mappings: A set of test mappings returned by GetTestMappings in the following format: + { + 'test_mapping_file_path': { + 'group_name' : [ + 'name': 'module_name', + ], + } + } + changed_files: A set of files changed for the given run. + test_mapping_test_groups: A set of test mapping test groups that are being considered for the given run. + + Returns: + A set of test module names which would run for a test mapping test run with the given parameters. + """ + + modules = set() + + for test_mapping in test_mappings.values(): + for group_name, group in test_mapping.items(): + # If a module is not in any of the test mapping groups being tested skip + # it. + if group_name not in test_mapping_test_groups: + continue + + for entry in group: + module_name = entry.get('name') + + if not module_name: + continue + + file_patterns = entry.get('file_patterns') + if not file_patterns: + modules.add(module_name) + continue + + if matches_file_patterns(file_patterns, changed_files): + modules.add(module_name) + + return modules + +def MatchesFilePatterns( + file_patterns: list[set], changed_files: set[str] +) -> bool: + """Checks if any of the changed files match any of the file patterns. + + Args: + file_patterns: A list of file patterns to match against. + changed_files: A set of files to check against the file patterns. + + Returns: + True if any of the changed files match any of the file patterns. + """ + return any(re.search(pattern, "|".join(changed_files)) for pattern in file_patterns) diff --git a/cogsetup.sh b/cogsetup.sh deleted file mode 100644 index ef1485d5f2..0000000000 --- a/cogsetup.sh +++ /dev/null @@ -1,71 +0,0 @@ -# -# Copyright (C) 2023 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# This file is executed by build/envsetup.sh, and can use anything -# defined in envsetup.sh. -function _create_out_symlink_for_cog() { - if [[ "${OUT_DIR}" == "" ]]; then - OUT_DIR="out" - fi - - # getoutdir ensures paths are absolute. envsetup could be called from a - # directory other than the root of the source tree - local outdir=$(getoutdir) - if [[ -L "${outdir}" ]]; then - return - fi - if [ -d "${outdir}" ]; then - echo -e "\tOutput directory ${outdir} cannot be present in a Cog workspace." - echo -e "\tDelete \"${outdir}\" or create a symlink from \"${outdir}\" to a directory outside your workspace." - return 1 - fi - - DEFAULT_OUTPUT_DIR="${HOME}/.cog/android-build-out" - mkdir -p ${DEFAULT_OUTPUT_DIR} - ln -s ${DEFAULT_OUTPUT_DIR} ${outdir} -} - -# This function sets up the build environment to be appropriate for Cog. -function _setup_cog_env() { - _create_out_symlink_for_cog - if [ "$?" -eq "1" ]; then - echo -e "\e[0;33mWARNING:\e[00m Cog environment setup failed!" - return 1 - fi - - export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog" - - # Running repo command within Cog workspaces is not supported, so override - # it with this function. If the user is running repo within a Cog workspace, - # we'll fail with an error, otherwise, we run the original repo command with - # the given args. - if ! ORIG_REPO_PATH=`which repo`; then - return 0 - fi - function repo { - if [[ "${PWD}" == /google/cog/* ]]; then - echo "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces." - return 1 - fi - ${ORIG_REPO_PATH} "$@" - } -} - -if [[ "${PWD}" != /google/cog/* ]]; then - echo -e "\e[01;31mERROR:\e[0m This script must be run from a Cog workspace." -fi - -_setup_cog_env diff --git a/core/Makefile b/core/Makefile index 7d7b9e7789..81ae6f784c 100644 --- a/core/Makefile +++ b/core/Makefile @@ -717,7 +717,7 @@ $(foreach kmd,$(BOARD_KERNEL_MODULE_DIRS), \ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \ - $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \ + $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT)),system,modules.load,,$(kmd))) \ $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,$(GENERIC_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))))) @@ -1267,9 +1267,8 @@ boototapackage_16k: $(BUILT_BOOT_OTA_PACKAGE_16K) endif - +# The value of RAMDISK_NODE_LIST is defined in system/core/rootdir/Android.bp. # This file contains /dev nodes description added to the generic ramdisk -RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list # We just build this directly to the install location. INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET) @@ -1965,7 +1964,7 @@ target_system_dlkm_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYST installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz ALL_INSTALLED_NOTICE_FILES := \ - $(installed_notice_html_or_xml_gz) \ + $(if $(USE_SOONG_DEFINED_SYSTEM_IMAGE),,$(installed_notice_html_or_xml_gz)) \ $(installed_vendor_notice_xml_gz) \ $(installed_product_notice_xml_gz) \ $(installed_system_ext_notice_xml_gz) \ @@ -2052,7 +2051,9 @@ endif endif # PRODUCT_NOTICE_SPLIT +ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true) ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz) +endif need_vendor_notice:=false ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true) @@ -3417,8 +3418,10 @@ endif # PRODUCT_FSVERITY_GENERATE_METADATA # system image INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES)) +ifdef BUILDING_SYSTEM_IMAGE INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \ $(ALL_DEFAULT_INSTALLED_MODULES))) +endif # Create symlink /system/vendor to /vendor if necessary. ifdef BOARD_USES_VENDORIMAGE @@ -3510,6 +3513,8 @@ $(SYSTEM_LINKER_CONFIG): $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SO --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)" $(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \ --value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)" + $(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key provideLibs \ + --value "$(foreach lib,$(PRODUCT_EXTRA_STUB_LIBRARIES), $(lib).so)" $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),) $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE)) @@ -3562,14 +3567,24 @@ ifneq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),) file_list_diff := $(HOST_OUT_EXECUTABLES)/file_list_diff$(HOST_EXECUTABLE_SUFFIX) system_file_diff_timestamp := $(systemimage_intermediates)/file_diff.timestamp +# The build configuration to build the REL version may have more files to allow. +# Use allowlist_next in addition to the allowlist in this case. +system_file_diff_allowlist_next := +ifeq (REL,$(PLATFORM_VERSION_CODENAME)) +system_file_diff_allowlist_next := $(ALL_MODULES.system_image_diff_allowlist_next.INSTALLED) +$(system_file_diff_timestamp): PRIVATE_ALLOWLIST_NEXT := $(system_file_diff_allowlist_next) +endif $(system_file_diff_timestamp): \ $(systemimage_intermediates)/file_list.txt \ $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \ $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \ + $(system_file_diff_allowlist_next) \ $(file_list_diff) $(file_list_diff) $(systemimage_intermediates)/file_list.txt \ $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST) \ - $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) + $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) \ + --allowlists $(ALL_MODULES.system_image_diff_allowlist.INSTALLED) \ + $(PRIVATE_ALLOWLIST_NEXT) touch $@ $(BUILT_SYSTEMIMAGE): $(system_file_diff_timestamp) @@ -3587,10 +3602,10 @@ ifeq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true) ifeq ($(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE),) $(error PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE must be set if USE_SOONG_DEFINED_SYSTEM_IMAGE is true) endif -soong_defined_system_image := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) -$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(soong_defined_system_image) -$(eval $(call copy-one-file, $(soong_defined_system_image), $(BUILT_SYSTEMIMAGE))) -soong_defined_system_image := +SOONG_DEFINED_SYSTEM_IMAGE_PATH := $(call intermediates-dir-for,ETC,$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE))/$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE) +SOONG_DEFINED_SYSTEM_IMAGE_BASE := $(dir $(ALL_MODULES.$(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE).FILESYSTEM_FILELIST)) +$(BUILT_SYSTEMIMAGE): $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(SOONG_DEFINED_SYSTEM_IMAGE_PATH) +$(eval $(call copy-one-file, $(SOONG_DEFINED_SYSTEM_IMAGE_PATH), $(BUILT_SYSTEMIMAGE))) else $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE) $(systemimage_intermediates)/file_list.txt $(call build-systemimage-target,$@) @@ -3675,10 +3690,10 @@ platform-java: # ----------------------------------------------------------------- # data partition image INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT_DATA)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES)) +ifdef BUILDING_USERDATA_IMAGE INTERNAL_USERDATAIMAGE_FILES := \ $(filter $(TARGET_OUT_DATA)/%,$(ALL_DEFAULT_INSTALLED_MODULES)) -ifdef BUILDING_USERDATA_IMAGE userdataimage_intermediates := \ $(call intermediates-dir-for,PACKAGING,userdata) BUILT_USERDATAIMAGE_TARGET := $(PRODUCT_OUT)/userdata.img @@ -3995,6 +4010,21 @@ INTERNAL_PRODUCTIMAGE_FILES := \ $(filter $(TARGET_OUT_PRODUCT)/%,\ $(ALL_DEFAULT_INSTALLED_MODULES)) +# Install product/etc/linker.config.pb with PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS +product_linker_config_file := $(TARGET_OUT_PRODUCT)/etc/linker.config.pb +$(product_linker_config_file): private_linker_config_fragments := $(PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS) +$(product_linker_config_file): $(INTERNAL_PRODUCTIMAGE_FILES) | $(HOST_OUT_EXECUTABLES)/conv_linker_config + @echo Creating linker config: $@ + @mkdir -p $(dir $@) + @rm -f $@ + $(HOST_OUT_EXECUTABLES)/conv_linker_config proto \ + --source $(call normalize-path-list,$(private_linker_config_fragments)) \ + --output $@ +$(call define declare-1p-target,$(product_linker_config_file),) +INTERNAL_PRODUCTIMAGE_FILES += $(product_linker_config_file) +ALL_DEFAULT_INSTALLED_MODULES += $(product_linker_config_file) + + INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json) $(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT) @@ -5136,6 +5166,7 @@ apex_dirs := \ $(TARGET_OUT)/apex/% \ $(TARGET_OUT_SYSTEM_EXT)/apex/% \ $(TARGET_OUT_VENDOR)/apex/% \ + $(TARGET_OUT_ODM)/apex/% \ $(TARGET_OUT_PRODUCT)/apex/% \ apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES))) @@ -5188,6 +5219,7 @@ apex_dirs := \ $(TARGET_OUT_PRODUCT)/apex/% \ $(TARGET_OUT_SYSTEM_EXT)/apex/% \ $(TARGET_OUT_VENDOR)/apex/% \ + $(TARGET_OUT_ODM)/apex/% \ apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES))) @@ -5206,6 +5238,7 @@ $(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/apexd_host $(apex_files) --system_ext_path $(TARGET_OUT_SYSTEM_EXT) \ --product_path $(TARGET_OUT_PRODUCT) \ --vendor_path $(TARGET_OUT_VENDOR) \ + --odm_path $(TARGET_OUT_ODM) \ --apex_path $(APEX_OUT) apex_files := @@ -6130,6 +6163,9 @@ $(BUILT_TARGET_FILES_PACKAGE): zip_root := $(intermediates)/$(name) $(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name) $(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates) +ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),) + $(BUILT_TARGET_FILES_DIR): $(SOONG_DEFINED_SYSTEM_IMAGE_PATH) +endif # $(1): Directory to copy # $(2): Location to copy it to @@ -6458,8 +6494,11 @@ $(BUILT_TARGET_FILES_DIR): \ $(INSTALLED_RAMDISK_TARGET) \ $(INSTALLED_DTBIMAGE_TARGET) \ $(INSTALLED_2NDBOOTLOADER_TARGET) \ + $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \ $(BUILT_RAMDISK_16K_TARGET) \ $(BUILT_KERNEL_16K_TARGET) \ + $(BUILT_BOOTIMAGE_16K_TARGET) \ + $(INSTALLED_DTBOIMAGE_16KB_TARGET) \ $(BOARD_PREBUILT_DTBOIMAGE) \ $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \ $(BOARD_RECOVERY_ACPIO) \ @@ -6613,8 +6652,13 @@ endif endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET ifdef BUILDING_SYSTEM_IMAGE @# Contents of the system image +ifneq ($(SOONG_DEFINED_SYSTEM_IMAGE_PATH),) + $(hide) $(call package_files-copy-root, \ + $(SOONG_DEFINED_SYSTEM_IMAGE_BASE)/root/system,$(zip_root)/SYSTEM) +else $(hide) $(call package_files-copy-root, \ $(SYSTEMIMAGE_SOURCE_DIR),$(zip_root)/SYSTEM) +endif else ifdef INSTALLED_BUILD_PROP_TARGET @# Copy the system build.prop even if not building a system image @# because add_img_to_target_files may need it to build other partition @@ -6808,14 +6852,22 @@ ifdef BOARD_PREBUILT_DTBOIMAGE $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/ endif # BOARD_PREBUILT_DTBOIMAGE -ifdef BUILT_KERNEL_16K_TARGET +ifdef BOARD_KERNEL_PATH_16K $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(BUILT_KERNEL_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/ -endif # BUILT_KERNEL_16K_TARGET -ifdef BUILT_RAMDISK_16K_TARGET +endif # BOARD_KERNEL_PATH_16K +ifdef BOARD_KERNEL_MODULES_16K $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(BUILT_RAMDISK_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/ -endif # BUILT_RAMDISK_16K_TARGET +endif # BOARD_KERNEL_MODULES_16K +ifdef BUILT_BOOTIMAGE_16K_TARGET + $(hide) mkdir -p $(zip_root)/IMAGES + $(hide) cp $(BUILT_BOOTIMAGE_16K_TARGET) $(zip_root)/IMAGES/ +endif # BUILT_BOOTIMAGE_16K_TARGET +ifdef INSTALLED_DTBOIMAGE_16KB_TARGET + $(hide) mkdir -p $(zip_root)/IMAGES + $(hide) cp $(INSTALLED_DTBOIMAGE_16KB_TARGET) $(zip_root)/IMAGES/ +endif # INSTALLED_DTBOIMAGE_16KB_TARGET ifeq ($(BOARD_USES_PVMFWIMAGE),true) $(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES $(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/ @@ -6883,6 +6935,33 @@ ifdef BOARD_KERNEL_PAGESIZE $(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize endif # BOARD_KERNEL_PAGESIZE endif # BUILDING_INIT_BOOT_IMAGE +ifdef BOARD_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_EROFS_COMPRESS_HINTS) $(zip_root)/META/erofs_default_compress_hints.txt +endif +ifdef BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_SYSTEMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_erofs_compress_hints.txt +endif +ifdef BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_SYSTEM_EXTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_ext_erofs_compress_hints.txt +endif +ifdef BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_PRODUCTIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/product_erofs_compress_hints.txt +endif +ifdef BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_VENDORIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_erofs_compress_hints.txt +endif +ifdef BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_ODMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_erofs_compress_hints.txt +endif +ifdef BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_VENDOR_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/vendor_dlkm_erofs_compress_hints.txt +endif +ifdef BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_ODM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/odm_dlkm_erofs_compress_hints.txt +endif +ifdef BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS + $(hide) cp $(BOARD_SYSTEM_DLKMIMAGE_EROFS_COMPRESS_HINTS) $(zip_root)/META/system_dlkm_erofs_compress_hints.txt +endif ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),) $(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt endif @@ -7859,13 +7938,11 @@ haiku-presubmit: $(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_PRESUBMIT_ $(call dist-for-goals,haiku-presubmit,$(SOONG_PRESUBMIT_FUZZ_PACKAGING_ARCH_MODULES)) # ----------------------------------------------------------------- -# Extract platform fonts used in Layoutlib +# Extract additional data files used in Layoutlib include $(BUILD_SYSTEM)/layoutlib_data.mk # ----------------------------------------------------------------- -# Desktop pack image hook. -ifneq (,$(strip $(PACK_DESKTOP_FILESYSTEM_IMAGES))) -PACK_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_image.bin +# Desktop pack common variables. PACK_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_image IMAGES := $(INSTALLED_BOOTIMAGE_TARGET) \ $(INSTALLED_SUPERIMAGE_TARGET) \ @@ -7874,6 +7951,11 @@ IMAGES := $(INSTALLED_BOOTIMAGE_TARGET) \ $(INSTALLED_VBMETAIMAGE_TARGET) \ $(INSTALLED_USERDATAIMAGE_TARGET) +# ----------------------------------------------------------------- +# Desktop pack image hook. +ifneq (,$(strip $(PACK_DESKTOP_FILESYSTEM_IMAGES))) +PACK_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_image.bin + $(PACK_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT) $(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive @@ -7882,11 +7964,86 @@ PACKED_IMAGE_ARCHIVE_TARGET := $(PACK_IMAGE_TARGET).gz $(PACKED_IMAGE_ARCHIVE_TARGET): $(PACK_IMAGE_TARGET) | $(GZIP) $(GZIP) -fk $(PACK_IMAGE_TARGET) -droidcore-unbundled: $(PACKED_IMAGE_ARCHIVE_TARGET) +$(call dist-for-goals,dist_files,$(PACKED_IMAGE_ARCHIVE_TARGET)) + +.PHONY: pack-image +pack-image: $(PACK_IMAGE_TARGET) endif # PACK_DESKTOP_FILESYSTEM_IMAGES # ----------------------------------------------------------------- +# Desktop pack recovery image hook. +ifneq (,$(strip $(PACK_DESKTOP_RECOVERY_IMAGE))) +PACK_RECOVERY_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_recovery_image.bin +PACK_RECOVERY_IMAGE_ARGS := --noarchive --recovery + +ifneq (,$(strip $(PACK_RECOVERY_IMAGE_EXPERIMENTAL))) +PACK_RECOVERY_IMAGE_ARGS += --experimental +endif # PACK_RECOVERY_IMAGE_EXPERIMENTAL + +$(PACK_RECOVERY_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT) + $(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_RECOVERY_IMAGE_ARGS) + +PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET := $(PACK_RECOVERY_IMAGE_TARGET).gz + +$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET): $(PACK_RECOVERY_IMAGE_TARGET) | $(GZIP) + $(GZIP) -fk $(PACK_RECOVERY_IMAGE_TARGET) + +$(call dist-for-goals,dist_files,$(PACKED_RECOVERY_IMAGE_ARCHIVE_TARGET)) + +.PHONY: pack-recovery-image +pack-recovery-image: $(PACK_RECOVERY_IMAGE_TARGET) + +endif # PACK_DESKTOP_RECOVERY_IMAGE + +# ----------------------------------------------------------------- +# Desktop pack update image hook. +ifneq (,$(strip $(PACK_DESKTOP_UPDATE_IMAGE))) +PACK_UPDATE_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_update_image.bin +PACK_UPDATE_IMAGE_ARGS := --noarchive --update + +ifneq (,$(strip $(PACK_UPDATE_IMAGE_EXPERIMENTAL))) +PACK_UPDATE_IMAGE_ARGS += --experimental +endif # PACK_UPDATE_IMAGE_EXPERIMENTAL + +$(PACK_UPDATE_IMAGE_TARGET): $(IMAGES) $(PACK_IMAGE_SCRIPT) + $(PACK_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) $(PACK_UPDATE_IMAGE_ARGS) + +PACKED_UPDATE_IMAGE_ARCHIVE_TARGET := $(PACK_UPDATE_IMAGE_TARGET).gz + +$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET): $(PACK_UPDATE_IMAGE_TARGET) | $(GZIP) + $(GZIP) -fk $(PACK_UPDATE_IMAGE_TARGET) + +$(call dist-for-goals,dist_files,$(PACKED_UPDATE_IMAGE_ARCHIVE_TARGET)) + +.PHONY: pack-update-image +pack-update-image: $(PACK_UPDATE_IMAGE_TARGET) + +endif # PACK_DESKTOP_UPDATE_IMAGE + +PACK_MIGRATION_IMAGE_SCRIPT := $(HOST_OUT_EXECUTABLES)/pack_migration_image + +# ----------------------------------------------------------------- +# Desktop pack migration image hook. +ifeq ($(ANDROID_DESKTOP_MIGRATION_IMAGE),true) +PACK_MIGRATION_IMAGE_TARGET := $(PRODUCT_OUT)/android-desktop_migration_image.bin + +$(PACK_MIGRATION_IMAGE_TARGET): $(IMAGES) $(PACK_MIGRATION_IMAGE_SCRIPT) + $(PACK_MIGRATION_IMAGE_SCRIPT) --out_dir $(PRODUCT_OUT) --noarchive + +PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET := $(PACK_MIGRATION_IMAGE_TARGET).gz + +$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET): $(PACK_MIGRATION_IMAGE_TARGET) | $(GZIP) + $(GZIP) -fk $(PACK_MIGRATION_IMAGE_TARGET) + +$(call dist-for-goals,dist_files,$(PACKED_MIGRATION_IMAGE_ARCHIVE_TARGET)) + +.PHONY: pack-migration-image +pack-migration-image: $(PACK_MIGRATION_IMAGE_TARGET) + +endif # ANDROID_DESKTOP_MIGRATION_IMAGE + +# ----------------------------------------------------------------- # OS Licensing include $(BUILD_SYSTEM)/os_licensing.mk diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk index f2ff286481..06dc54c5b1 100644 --- a/core/android_soong_config_vars.mk +++ b/core/android_soong_config_vars.mk @@ -28,6 +28,7 @@ $(call add_soong_config_namespace,ANDROID) $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE) $(call soong_config_set_bool,ANDROID,BOARD_USES_RECOVERY_AS_BOOT,$(BOARD_USES_RECOVERY_AS_BOOT)) +$(call soong_config_set_bool,ANDROID,BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT,$(BOARD_MOVE_GSI_AVB_KEYS_TO_VENDOR_BOOT)) $(call add_soong_config_var,ANDROID,CHECK_DEV_TYPE_VIOLATIONS) $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_VERSION) $(call add_soong_config_var,ANDROID,PLATFORM_SEPOLICY_COMPAT_VERSIONS) @@ -36,12 +37,28 @@ $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER) $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64) $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER) +# For Sanitizers +$(call soong_config_set_bool,ANDROID,ASAN_ENABLED,$(if $(filter address,$(SANITIZE_TARGET)),true,false)) +$(call soong_config_set_bool,ANDROID,HWASAN_ENABLED,$(if $(filter hwaddress,$(SANITIZE_TARGET)),true,false)) +$(call soong_config_set_bool,ANDROID,SANITIZE_TARGET_SYSTEM_ENABLED,$(if $(filter true,$(SANITIZE_TARGET_SYSTEM)),true,false)) + +# For init.environ.rc +$(call soong_config_set_bool,ANDROID,GCOV_COVERAGE,$(NATIVE_COVERAGE)) +$(call soong_config_set_bool,ANDROID,CLANG_COVERAGE,$(CLANG_COVERAGE)) +$(call soong_config_set,ANDROID,SCUDO_ALLOCATION_RING_BUFFER_SIZE,$(PRODUCT_SCUDO_ALLOCATION_RING_BUFFER_SIZE)) + +$(call soong_config_set_bool,ANDROID,EMMA_INSTRUMENT,$(if $(filter true,$(EMMA_INSTRUMENT)),true,false)) + # PRODUCT_PRECOMPILED_SEPOLICY defaults to true. Explicitly check if it's "false" or not. $(call soong_config_set_bool,ANDROID,PRODUCT_PRECOMPILED_SEPOLICY,$(if $(filter false,$(PRODUCT_PRECOMPILED_SEPOLICY)),false,true)) +# For art modules +$(call soong_config_set_bool,art_module,host_prefer_32_bit,$(if $(filter true,$(HOST_PREFER_32_BIT)),true,false)) ifdef ART_DEBUG_OPT_FLAG $(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG)) endif +# The default value of ART_BUILD_HOST_DEBUG is true +$(call soong_config_set_bool,art_module,art_build_host_debug,$(if $(filter false,$(ART_BUILD_HOST_DEBUG)),false,true)) ifdef TARGET_BOARD_AUTO $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO)) @@ -56,11 +73,6 @@ $(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static endif endif -# TODO(b/308187800): some internal modules set `prefer` to true on the prebuilt apex module, -# and set that to false when `ANDROID.module_build_from_source` is true. -# Set this soong config variable to true for now, and cleanup `prefer` as part of b/308187800 -$(call add_soong_config_var_value,ANDROID,module_build_from_source,true) - # Enable SystemUI optimizations by default unless explicitly set. SYSTEMUI_OPTIMIZE_JAVA ?= true $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA) @@ -90,6 +102,7 @@ endif $(call add_soong_config_var_value,ANDROID,release_avf_allow_preinstalled_apps,$(RELEASE_AVF_ALLOW_PREINSTALLED_APPS)) $(call add_soong_config_var_value,ANDROID,release_avf_enable_device_assignment,$(RELEASE_AVF_ENABLE_DEVICE_ASSIGNMENT)) $(call add_soong_config_var_value,ANDROID,release_avf_enable_dice_changes,$(RELEASE_AVF_ENABLE_DICE_CHANGES)) +$(call add_soong_config_var_value,ANDROID,release_avf_enable_early_vm,$(RELEASE_AVF_ENABLE_EARLY_VM)) $(call add_soong_config_var_value,ANDROID,release_avf_enable_llpvm_changes,$(RELEASE_AVF_ENABLE_LLPVM_CHANGES)) $(call add_soong_config_var_value,ANDROID,release_avf_enable_multi_tenant_microdroid_vm,$(RELEASE_AVF_ENABLE_MULTI_TENANT_MICRODROID_VM)) $(call add_soong_config_var_value,ANDROID,release_avf_enable_network,$(RELEASE_AVF_ENABLE_NETWORK)) @@ -162,3 +175,36 @@ $(call soong_config_set,bootclasspath,release_crashrecovery_module,$(RELEASE_CRA # Enable Profiling module. Also used by platform_bootclasspath. $(call soong_config_set,ANDROID,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE)) $(call soong_config_set,bootclasspath,release_package_profiling_module,$(RELEASE_PACKAGE_PROFILING_MODULE)) + +# Add perf-setup build flag to soong +# Note: BOARD_PERFSETUP_SCRIPT location must be under platform_testing/scripts/perf-setup/. +ifdef BOARD_PERFSETUP_SCRIPT + $(call soong_config_set,perf,board_perfsetup_script,$(notdir $(BOARD_PERFSETUP_SCRIPT))) +endif + +# Add target_use_pan_display flag for hardware/libhardware:gralloc.default +$(call soong_config_set_bool,gralloc,target_use_pan_display,$(if $(filter true,$(TARGET_USE_PAN_DISPLAY)),true,false)) + +# Add use_camera_v4l2_hal flag for hardware/libhardware/modules/camera/3_4:camera.v4l2 +$(call soong_config_set_bool,camera,use_camera_v4l2_hal,$(if $(filter true,$(USE_CAMERA_V4L2_HAL)),true,false)) + +# Add audioserver_multilib flag for hardware/interfaces/soundtrigger/2.0/default:android.hardware.soundtrigger@2.0-impl +ifneq ($(strip $(AUDIOSERVER_MULTILIB)),) + $(call soong_config_set,soundtrigger,audioserver_multilib,$(AUDIOSERVER_MULTILIB)) +endif + +# Add sim_count, disable_rild_oem_hook, and use_aosp_rild flag for ril related modules +$(call soong_config_set,ril,sim_count,$(SIM_COUNT)) +ifneq ($(DISABLE_RILD_OEM_HOOK), false) + $(call soong_config_set_bool,ril,disable_rild_oem_hook,true) +endif +ifneq ($(ENABLE_VENDOR_RIL_SERVICE), true) + $(call soong_config_set_bool,ril,use_aosp_rild,true) +endif + +# Export target_board_platform to soong for hardware/google/graphics/common/libmemtrack:memtrack.$(TARGET_BOARD_PLATFORM) +$(call soong_config_set,ANDROID,target_board_platform,$(TARGET_BOARD_PLATFORM)) + +# Export board_uses_scaler_m2m1shot and board_uses_align_restriction to soong for hardware/google/graphics/common/libscaler:libexynosscaler +$(call soong_config_set_bool,google_graphics,board_uses_scaler_m2m1shot,$(if $(filter true,$(BOARD_USES_SCALER_M2M1SHOT)),true,false)) +$(call soong_config_set_bool,google_graphics,board_uses_align_restriction,$(if $(filter true,$(BOARD_USES_ALIGN_RESTRICTION)),true,false)) diff --git a/core/base_rules.mk b/core/base_rules.mk index ca553f6b05..5363e0fbf9 100644 --- a/core/base_rules.mk +++ b/core/base_rules.mk @@ -340,7 +340,7 @@ LOCAL_BUILT_MODULE := $(intermediates)/$(my_built_module_stem) ifneq (,$(LOCAL_SOONG_INSTALLED_MODULE)) ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK)) - $(call pretty-error, LOCAL_SOONG_INSTALLED_MODULE can only be used from $(SOONG_ANDROID_MK)) + $(call pretty-error, LOCAL_MODULE_MAKEFILE can only be used from $(SOONG_ANDROID_MK)) endif # Use the install path requested by Soong. LOCAL_INSTALLED_MODULE := $(LOCAL_SOONG_INSTALLED_MODULE) @@ -776,6 +776,8 @@ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \ $(eval my_compat_dist_$(suite) := $(patsubst %:$(LOCAL_INSTALLED_MODULE),$(LOCAL_INSTALLED_MODULE):$(LOCAL_INSTALLED_MODULE),\ $(foreach dir, $(call compatibility_suite_dirs,$(suite),$(arch_dir)), \ $(LOCAL_BUILT_MODULE):$(dir)/$(my_installed_module_stem)))) \ + $(eval my_compat_module_arch_dir_$(suite).$(my_register_name) :=) \ + $(foreach dir,$(call compatibility_suite_dirs,$(suite),$(arch_dir)),$(eval my_compat_module_arch_dir_$(suite).$(my_register_name) += $(dir))) \ $(eval my_compat_dist_config_$(suite) := )) ifneq (,$(LOCAL_SOONG_CLASSES_JAR)) diff --git a/core/binary.mk b/core/binary.mk index 0bc94692a4..34811449e9 100644 --- a/core/binary.mk +++ b/core/binary.mk @@ -205,8 +205,6 @@ ifneq ($(LOCAL_SDK_VERSION),) my_api_level := $(my_ndk_api) endif - my_ndk_source_root := \ - $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources my_built_ndk := $(SOONG_OUT_DIR)/ndk my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE) my_ndk_sysroot_include := \ @@ -239,16 +237,18 @@ ifneq ($(LOCAL_SDK_VERSION),) endif ifeq (system,$(LOCAL_NDK_STL_VARIANT)) + my_ndk_source_root := \ + $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include my_system_shared_libraries += libstdc++ else ifneq (,$(filter c++_%, $(LOCAL_NDK_STL_VARIANT))) - my_ndk_stl_include_path := \ - $(my_ndk_source_root)/cxx-stl/llvm-libc++/include - my_ndk_stl_include_path += \ - $(my_ndk_source_root)/cxx-stl/llvm-libc++abi/include + my_llvm_dir := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION) + my_libcxx_arch_dir := $(my_llvm_dir)/android_libc++/ndk/$($(LOCAL_2ND_ARCH_VAR_PREFIX)PREBUILT_LIBCXX_ARCH_DIR) - my_libcxx_libdir := \ - $(my_ndk_source_root)/cxx-stl/llvm-libc++/libs/$(my_cpu_variant) + # Include the target-specific __config_site file followed by the generic libc++ headers. + my_ndk_stl_include_path := $(my_libcxx_arch_dir)/include/c++/v1 + my_ndk_stl_include_path += $(my_llvm_dir)/include/c++/v1 + my_libcxx_libdir := $(my_libcxx_arch_dir)/lib ifeq (c++_static,$(LOCAL_NDK_STL_VARIANT)) my_ndk_stl_static_lib := \ @@ -258,7 +258,7 @@ ifneq ($(LOCAL_SDK_VERSION),) my_ndk_stl_shared_lib_fullpath := $(my_libcxx_libdir)/libc++_shared.so endif - my_ndk_stl_static_lib += $(my_libcxx_libdir)/libunwind.a + my_ndk_stl_static_lib += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBUNWIND) my_ldlibs += -ldl else # LOCAL_NDK_STL_VARIANT must be none # Do nothing. @@ -330,18 +330,20 @@ ifneq ($(call module-in-vendor-or-product),) ifneq ($(LOCAL_IN_VENDOR),) # Vendor modules have LOCAL_IN_VENDOR my_cflags += -D__ANDROID_VENDOR__ - - ifeq ($(BOARD_API_LEVEL),) - # TODO(b/314036847): This is a fallback for UDC targets. - # This must be a build failure when UDC is no longer built from this source tree. - my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION) - else - my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL) - endif else ifneq ($(LOCAL_IN_PRODUCT),) # Product modules have LOCAL_IN_PRODUCT my_cflags += -D__ANDROID_PRODUCT__ endif + + # Define __ANDROID_VENDOR_API__ for both product and vendor variants because + # they both use the same LLNDK libraries. + ifeq ($(BOARD_API_LEVEL),) + # TODO(b/314036847): This is a fallback for UDC targets. + # This must be a build failure when UDC is no longer built from this source tree. + my_cflags += -D__ANDROID_VENDOR_API__=$(PLATFORM_SDK_VERSION) + else + my_cflags += -D__ANDROID_VENDOR_API__=$(BOARD_API_LEVEL) + endif endif ifndef LOCAL_IS_HOST_MODULE diff --git a/core/board_config.mk b/core/board_config.mk index d3f0493a6c..5606964950 100644 --- a/core/board_config.mk +++ b/core/board_config.mk @@ -237,6 +237,7 @@ else .KATI_READONLY := TARGET_DEVICE_DIR endif +$(call dump-phase-start,BOARD,,,, build/make/core/board_config.mk) ifndef RBC_PRODUCT_CONFIG include $(board_config_mk) else @@ -261,6 +262,7 @@ else include $(OUT_DIR)/rbc/rbc_board_config_results.mk endif +$(call dump-phase-end, build/make/core/board_config.mk) ifneq (,$(and $(TARGET_ARCH),$(TARGET_ARCH_SUITE))) $(error $(board_config_mk) erroneously sets both TARGET_ARCH and TARGET_ARCH_SUITE) diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk index f18747a44b..126482f72c 100644 --- a/core/clang/TARGET_arm.mk +++ b/core/clang/TARGET_arm.mk @@ -4,7 +4,10 @@ $(clang_2nd_arch_prefix)RS_COMPAT_TRIPLE := armv7-none-linux-gnueabi $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-arm-android.a $(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-arm-android.a +$(clang_2nd_arch_prefix)TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/arm/libunwind.a # Address sanitizer clang config $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan + +$(clang_2nd_arch_prefix)PREBUILT_LIBCXX_ARCH_DIR := arm diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk index 42bed0aaed..e7ab6cb500 100644 --- a/core/clang/TARGET_arm64.mk +++ b/core/clang/TARGET_arm64.mk @@ -4,7 +4,10 @@ RS_COMPAT_TRIPLE := aarch64-linux-android TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-aarch64-android.a TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-aarch64-android.a +TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/aarch64/libunwind.a # Address sanitizer clang config ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64 + +PREBUILT_LIBCXX_ARCH_DIR := aarch64 diff --git a/core/clang/TARGET_riscv64.mk b/core/clang/TARGET_riscv64.mk index cfb5c7d0ea..58c9c7bf54 100644 --- a/core/clang/TARGET_riscv64.mk +++ b/core/clang/TARGET_riscv64.mk @@ -4,7 +4,10 @@ RS_COMPAT_TRIPLE := riscv64-linux-android TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-riscv64-android.a TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-riscv64-android.a +TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/riscv64/libunwind.a # Address sanitizer clang config ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64 + +PREBUILT_LIBCXX_ARCH_DIR := riscv64 diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk index 5491a05978..1a08c79518 100644 --- a/core/clang/TARGET_x86.mk +++ b/core/clang/TARGET_x86.mk @@ -4,7 +4,10 @@ $(clang_2nd_arch_prefix)RS_COMPAT_TRIPLE := i686-linux-android $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686-android.a $(clang_2nd_arch_prefix)TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-i686-android.a +$(clang_2nd_arch_prefix)TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/i386/libunwind.a # Address sanitizer clang config $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan + +$(clang_2nd_arch_prefix)PREBUILT_LIBCXX_ARCH_DIR := i386 diff --git a/core/clang/TARGET_x86_64.mk b/core/clang/TARGET_x86_64.mk index 167db72e74..f39b41ebf4 100644 --- a/core/clang/TARGET_x86_64.mk +++ b/core/clang/TARGET_x86_64.mk @@ -4,7 +4,10 @@ RS_COMPAT_TRIPLE := x86_64-linux-android TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64-android.a TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-x86_64-android.a +TARGET_LIBUNWIND := $(LLVM_RTLIB_PATH)/x86_64/libunwind.a # Address sanitizer clang config ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64 ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64 + +PREBUILT_LIBCXX_ARCH_DIR := x86_64 diff --git a/core/combo/arch/arm64/armv9-2a.mk b/core/combo/arch/arm64/armv9-2a.mk new file mode 100644 index 0000000000..69ffde014b --- /dev/null +++ b/core/combo/arch/arm64/armv9-2a.mk @@ -0,0 +1,18 @@ +# +# Copyright (C) 2023 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# .mk file required to support build for the ARMv9.2-A arch variant. +# The file just needs to be present, it does not need to contain anything. diff --git a/core/combo/arch/x86/alderlake.mk b/core/combo/arch/x86/alderlake.mk new file mode 100644 index 0000000000..a7ae6ed679 --- /dev/null +++ b/core/combo/arch/x86/alderlake.mk @@ -0,0 +1,6 @@ +# Configuration for Linux on x86. +# Generating binaries for processors +# that have AVX2 feature flag +# + +ARCH_X86_HAVE_SSE4_1 := true diff --git a/core/combo/arch/x86_64/alderlake.mk b/core/combo/arch/x86_64/alderlake.mk new file mode 100644 index 0000000000..a7ae6ed679 --- /dev/null +++ b/core/combo/arch/x86_64/alderlake.mk @@ -0,0 +1,6 @@ +# Configuration for Linux on x86. +# Generating binaries for processors +# that have AVX2 feature flag +# + +ARCH_X86_HAVE_SSE4_1 := true diff --git a/core/config.mk b/core/config.mk index 82b63cf76d..192c8b28c8 100644 --- a/core/config.mk +++ b/core/config.mk @@ -173,6 +173,7 @@ $(KATI_obsolete_var BOARD_PREBUILT_PVMFWIMAGE,pvmfw.bin is now built in AOSP and $(KATI_obsolete_var BUILDING_PVMFW_IMAGE,BUILDING_PVMFW_IMAGE is no longer used) $(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(KATI_obsolete_var FS_GET_STATS) +$(KATI_obsolete_var BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES) # Used to force goals to build. Only use for conditionally defined goals. .PHONY: FORCE @@ -363,8 +364,7 @@ endif # configs, generally for cross-cutting features. # Build broken variables that should be treated as booleans -_build_broken_bool_vars := \ - BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES \ +_build_broken_bool_vars := # Build broken variables that should be treated as lists _build_broken_list_vars := \ @@ -811,6 +811,12 @@ ifeq ($(PRODUCT_FULL_TREBLE),true) BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true endif +ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),36),) + ifneq ($(NEED_AIDL_NDK_PLATFORM_BACKEND),) + $(error Must not set NEED_AIDL_NDK_PLATFORM_BACKEND, but it is set to: $(NEED_AIDL_NDK_PLATFORM_BACKEND). Support will be removed.) + endif +endif + # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching # devices if unset. ifndef BOARD_SYSTEMSDK_VERSIONS @@ -1251,14 +1257,43 @@ BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST := LegacyCamera Gallery2 # in the source tree. dont_bother_goals := out product-graph +ifeq ($(TARGET_SYSTEM_PROP),) +TARGET_SYSTEM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system.prop) +endif + +ifeq ($(TARGET_SYSTEM_EXT_PROP),) +TARGET_SYSTEM_EXT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop) +endif + +ifeq ($(TARGET_PRODUCT_PROP),) +TARGET_PRODUCT_PROP := $(wildcard $(TARGET_DEVICE_DIR)/product.prop) +endif + +ifeq ($(TARGET_ODM_PROP),) +TARGET_ODM_PROP := $(wildcard $(TARGET_DEVICE_DIR)/odm.prop) +endif + +.KATI_READONLY := \ + TARGET_SYSTEM_PROP \ + TARGET_SYSTEM_EXT_PROP \ + TARGET_PRODUCT_PROP \ + TARGET_ODM_PROP \ + include $(BUILD_SYSTEM)/sysprop_config.mk # Make ANDROID Soong config variables visible to Android.mk files, for # consistency with those defined in BoardConfig.mk files. include $(BUILD_SYSTEM)/android_soong_config_vars.mk -SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables -SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).extra.variables +# EMMA_INSTRUMENT is set to true when coverage is enabled. Creates a suffix to +# differeciate the coverage version of ninja files. This will save 5 minutes of +# build time used to regenerate ninja. +ifeq (true,$(EMMA_INSTRUMENT)) +COVERAGE_SUFFIX := .coverage +endif + +SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).variables +SOONG_EXTRA_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).extra.variables ifeq ($(CALLED_FROM_SETUP),true) include $(BUILD_SYSTEM)/ninja_config.mk diff --git a/core/definitions.mk b/core/definitions.mk index b30b1596d1..cd1b36e4c7 100644 --- a/core/definitions.mk +++ b/core/definitions.mk @@ -3612,6 +3612,7 @@ $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \ $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \ $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \ $$(my_compat_dist_test_data_$(suite))) \ + $(eval COMPATIBILITY.$(suite).ARCH_DIRS.$(my_register_name) := $(my_compat_module_arch_dir_$(suite).$(my_register_name))) \ $(eval COMPATIBILITY.$(suite).API_MAP_FILES += $$(my_compat_api_map_$(suite))) \ $(eval COMPATIBILITY.$(suite).SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES += $(LOCAL_SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) \ $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \ diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk index 26b8b17a49..88e0cc7452 100644 --- a/core/dex_preopt.mk +++ b/core/dex_preopt.mk @@ -13,47 +13,6 @@ else install-on-system-other = $(filter-out $(PRODUCT_DEXPREOPT_SPEED_APPS) $(PRODUCT_SYSTEM_SERVER_APPS),$(basename $(notdir $(filter $(foreach f,$(SYSTEM_OTHER_ODEX_FILTER),$(TARGET_OUT)/$(f)),$(1))))) endif -# We want to install the profile even if we are not using preopt since it is required to generate -# the image on the device. -ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED),$(PRODUCT_OUT)) - -# Install boot images. Note that there can be multiple. -my_boot_image_arch := TARGET_ARCH -my_boot_image_out := $(PRODUCT_OUT) -my_boot_image_syms := $(TARGET_OUT_UNSTRIPPED) -DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE := \ - $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(strip \ - $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \ - $(my_boot_image_module))) -ifdef TARGET_2ND_ARCH - my_boot_image_arch := TARGET_2ND_ARCH - 2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE := \ - $(foreach my_boot_image_name,$(DEXPREOPT_IMAGE_NAMES),$(strip \ - $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \ - $(my_boot_image_module))) -endif -# Install boot images for testing on host. We exclude framework image as it is not part of art manifest. -my_boot_image_arch := HOST_ARCH -my_boot_image_out := $(HOST_OUT) -my_boot_image_syms := $(HOST_OUT)/symbols -HOST_BOOT_IMAGE_MODULE := \ - $(foreach my_boot_image_name,art_host,$(strip \ - $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \ - $(my_boot_image_module))) -HOST_BOOT_IMAGE := $(call module-installed-files,$(HOST_BOOT_IMAGE_MODULE)) -ifdef HOST_2ND_ARCH - my_boot_image_arch := HOST_2ND_ARCH - 2ND_HOST_BOOT_IMAGE_MODULE := \ - $(foreach my_boot_image_name,art_host,$(strip \ - $(eval include $(BUILD_SYSTEM)/dex_preopt_libart.mk) \ - $(my_boot_image_module))) - 2ND_HOST_BOOT_IMAGE := $(call module-installed-files,$(2ND_HOST_BOOT_IMAGE_MODULE)) -endif -my_boot_image_arch := -my_boot_image_out := -my_boot_image_syms := -my_boot_image_module := - # Build the boot.zip which contains the boot jars and their compilation output # We can do this only if preopt is enabled and if the product uses libart config (which sets the # default properties for preopting). diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk index d51de33273..f1e9fb59b7 100644 --- a/core/dex_preopt_config.mk +++ b/core/dex_preopt_config.mk @@ -1,4 +1,4 @@ -DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config +DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt${COVERAGE_SUFFIX}.config ENABLE_PREOPT := true ENABLE_PREOPT_BOOT_IMAGES := true diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk deleted file mode 100644 index a2c9942a41..0000000000 --- a/core/dex_preopt_libart.mk +++ /dev/null @@ -1,109 +0,0 @@ -#################################### -# ART boot image installation -# Input variables: -# my_boot_image_name: the boot image to install -# my_boot_image_arch: the architecture to install (e.g. TARGET_ARCH, not expanded) -# my_boot_image_out: the install directory (e.g. $(PRODUCT_OUT)) -# my_boot_image_syms: the symbols director (e.g. $(TARGET_OUT_UNSTRIPPED)) -# -# Output variables: -# my_boot_image_module: the created module name. Empty if no module is created. -# -# Install the boot images compiled by Soong. -# Create a module named dexpreopt_bootjar.$(my_boot_image_name)_$($(my_boot_image_arch)) -# that installs all of boot image files. -# If there is no file to install for $(my_boot_image_name), for example when -# building an unbundled build, then no module is created. -# -#################################### - -# Takes a list of src:dest install pairs and returns a new list with a path -# prefixed to each dest value. -# $(1): list of src:dest install pairs -# $(2): path to prefix to each dest value -define prefix-copy-many-files-dest -$(foreach v,$(1),$(call word-colon,1,$(v)):$(2)$(call word-colon,2,$(v))) -endef - -# Converts an architecture-specific vdex path into a location that can be shared -# between architectures. -define vdex-shared-install-path -$(dir $(patsubst %/,%,$(dir $(1))))$(notdir $(1)) -endef - -# Takes a list of src:dest install pairs of vdex files and returns a new list -# where each dest has been rewritten to the shared location for vdex files. -define vdex-copy-many-files-shared-dest -$(foreach v,$(1),$(call word-colon,1,$(v)):$(call vdex-shared-install-path,$(call word-colon,2,$(v)))) -endef - -# Creates a rule to symlink an architecture specific vdex file to the shared -# location for that vdex file. -define symlink-vdex-file -$(strip \ - $(call symlink-file,\ - $(call vdex-shared-install-path,$(1)),\ - ../$(notdir $(1)),\ - $(1))\ - $(1)) -endef - -# Takes a list of src:dest install pairs of vdex files and creates rules to -# symlink each dest to the shared location for that vdex file. -define symlink-vdex-files -$(foreach v,$(1),$(call symlink-vdex-file,$(call word-colon,2,$(v)))) -endef - -my_boot_image_module := - -my_suffix := $(my_boot_image_name)_$($(my_boot_image_arch)) -my_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out)) -my_vdex_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_VDEX_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_out)) -my_vdex_copy_shared_pairs := $(call vdex-copy-many-files-shared-dest,$(my_vdex_copy_pairs)) -ifeq (,$(filter %_2ND_ARCH,$(my_boot_image_arch))) - # Only install the vdex to the shared location for the primary architecture. - my_copy_pairs += $(my_vdex_copy_shared_pairs) -endif - -my_unstripped_copy_pairs := $(call prefix-copy-many-files-dest,$(DEXPREOPT_IMAGE_UNSTRIPPED_BUILT_INSTALLED_$(my_suffix)),$(my_boot_image_syms)) - -# Generate the boot image module only if there is any file to install. -ifneq (,$(strip $(my_copy_pairs))) - my_first_pair := $(firstword $(my_copy_pairs)) - my_rest_pairs := $(wordlist 2,$(words $(my_copy_pairs)),$(my_copy_pairs)) - - my_first_src := $(call word-colon,1,$(my_first_pair)) - my_first_dest := $(call word-colon,2,$(my_first_pair)) - - my_installed := $(call copy-many-files,$(my_copy_pairs)) - my_unstripped_installed := $(call copy-many-files,$(my_unstripped_copy_pairs)) - - my_symlinks := $(call symlink-vdex-files,$(my_vdex_copy_pairs)) - - # We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM). - LOCAL_PATH := $(BUILD_SYSTEM) - # Hack to let these pseudo-modules wrapped around Soong modules use LOCAL_SOONG_INSTALLED_MODULE. - LOCAL_MODULE_MAKEFILE := $(SOONG_ANDROID_MK) - - include $(CLEAR_VARS) - LOCAL_MODULE := dexpreopt_bootjar.$(my_suffix) - LOCAL_PREBUILT_MODULE_FILE := $(my_first_src) - LOCAL_MODULE_PATH := $(dir $(my_first_dest)) - LOCAL_MODULE_STEM := $(notdir $(my_first_dest)) - LOCAL_SOONG_INSTALL_PAIRS := $(my_copy_pairs) - LOCAL_SOONG_INSTALL_SYMLINKS := $(my_symlinks) - LOCAL_SOONG_INSTALLED_MODULE := $(my_first_dest) - LOCAL_SOONG_LICENSE_METADATA := $(DEXPREOPT_IMAGE_LICENSE_METADATA_$(my_suffix)) - ifneq (,$(strip $(filter HOST_%,$(my_boot_image_arch)))) - LOCAL_IS_HOST_MODULE := true - endif - LOCAL_MODULE_CLASS := ETC - include $(BUILD_PREBUILT) - $(LOCAL_BUILT_MODULE): | $(my_unstripped_installed) - # Installing boot.art causes all boot image bits to be installed. - # Keep this old behavior in case anyone still needs it. - $(LOCAL_INSTALLED_MODULE): $(wordlist 2,$(words $(my_installed)),$(my_installed)) $(my_symlinks) - $(my_all_targets): $(my_installed) $(my_symlinks) - - my_boot_image_module := $(LOCAL_MODULE) -endif # my_copy_pairs != empty diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk index 640fe10f9c..eb4c822dc5 100644 --- a/core/dumpconfig.mk +++ b/core/dumpconfig.mk @@ -56,7 +56,7 @@ BUILD_DATETIME_FILE := $(OUT_DIR)/build_date.txt # Escape quotation marks for CSV, and wraps in quotation marks. define escape-for-csv -"$(subst ","",$1)" +"$(subst ","",$(subst $(newline), ,$1))" endef # Args: @@ -68,7 +68,7 @@ endef # Args: # $(1): include stack define dump-import-done -$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)))) +$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1)),$(filter-out $(1),$(MAKEFILE_LIST)))) endef # Args: diff --git a/core/envsetup.mk b/core/envsetup.mk index c063f60a15..f82e861abf 100644 --- a/core/envsetup.mk +++ b/core/envsetup.mk @@ -417,6 +417,7 @@ HOST_OUT_SDK_ADDON := $(HOST_OUT)/sdk_addon HOST_OUT_NATIVE_TESTS := $(HOST_OUT)/nativetest64 HOST_OUT_COVERAGE := $(HOST_OUT)/coverage HOST_OUT_TESTCASES := $(HOST_OUT)/testcases +HOST_OUT_ETC := $(HOST_OUT)/etc .KATI_READONLY := \ HOST_OUT_EXECUTABLES \ HOST_OUT_SHARED_LIBRARIES \ @@ -425,7 +426,8 @@ HOST_OUT_TESTCASES := $(HOST_OUT)/testcases HOST_OUT_SDK_ADDON \ HOST_OUT_NATIVE_TESTS \ HOST_OUT_COVERAGE \ - HOST_OUT_TESTCASES + HOST_OUT_TESTCASES \ + HOST_OUT_ETC HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk index 5491247057..4959edd4c3 100644 --- a/core/install_jni_libs_internal.mk +++ b/core/install_jni_libs_internal.mk @@ -38,8 +38,9 @@ ifdef my_embed_jni $(error LOCAL_SDK_VERSION must be defined with LOCAL_NDK_STL_VARIANT, \ LOCAL_PACKAGE_NAME=$(LOCAL_PACKAGE_NAME)) endif + my_libcxx_arch := $($(LOCAL_2ND_ARCH_VAR_PREFIX)PREBUILT_LIBCXX_ARCH_DIR) my_jni_shared_libraries += \ - $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources/cxx-stl/llvm-libc++/libs/$(TARGET_$(my_2nd_arch_prefix)CPU_ABI)/libc++_shared.so + $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/android_libc++/ndk/$(my_libcxx_arch)/lib/libc++_shared.so endif # Set the abi directory used by the local JNI shared libraries. diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk index 46393acb12..4b6eea7616 100644 --- a/core/java_prebuilt_internal.mk +++ b/core/java_prebuilt_internal.mk @@ -172,6 +172,12 @@ framework_res_package_export := \ endif endif +# transitive-res-packages is only populated for Soong modules for now, but needs +# to exist so that other Make modules can depend on it. Create an empty file. +my_transitive_res_packages := $(intermediates.COMMON)/transitive-res-packages +$(my_transitive_res_packages): + touch $@ + my_res_package := $(intermediates.COMMON)/package-res.apk # We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones. diff --git a/core/layoutlib_data.mk b/core/layoutlib_data.mk index e45f7efe16..e420a004de 100644 --- a/core/layoutlib_data.mk +++ b/core/layoutlib_data.mk @@ -66,11 +66,19 @@ $(call dist-for-goals,layoutlib,$(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop:lay # Resource files from frameworks/base/core/res/res LAYOUTLIB_RES := $(call intermediates-dir-for,PACKAGING,layoutlib-res,HOST,COMMON) LAYOUTLIB_RES_FILES := $(shell find frameworks/base/core/res/res -type f -not -path 'frameworks/base/core/res/res/values-m[nc]c*' | sort) -$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) +EMULATED_OVERLAYS_FILES := $(shell find frameworks/base/packages/overlays/*/res/ | sort) +DEVICE_OVERLAYS_FILES := $(shell find device/generic/goldfish/phone/overlay/frameworks/base/packages/overlays/*/AndroidOverlay/res/ | sort) +$(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES) rm -rf $@ - echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist.txt - $(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist.txt -o $(LAYOUTLIB_RES)/temp.zip - rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp.zip + echo $(LAYOUTLIB_RES_FILES) > $(LAYOUTLIB_RES)/filelist_res.txt + $(SOONG_ZIP) -C frameworks/base/core/res -l $(LAYOUTLIB_RES)/filelist_res.txt -o $(LAYOUTLIB_RES)/temp_res.zip + echo $(EMULATED_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt + $(SOONG_ZIP) -C frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_emulated_overlays.txt -o $(LAYOUTLIB_RES)/temp_emulated_overlays.zip + echo $(DEVICE_OVERLAYS_FILES) > $(LAYOUTLIB_RES)/filelist_device_overlays.txt + $(SOONG_ZIP) -C device/generic/goldfish/phone/overlay/frameworks/base/packages -l $(LAYOUTLIB_RES)/filelist_device_overlays.txt -o $(LAYOUTLIB_RES)/temp_device_overlays.zip + rm -rf $(LAYOUTLIB_RES)/data && unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_res.zip + unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_emulated_overlays.zip + unzip -q -d $(LAYOUTLIB_RES)/data $(LAYOUTLIB_RES)/temp_device_overlays.zip rm -rf $(LAYOUTLIB_RES)/compiled && mkdir $(LAYOUTLIB_RES)/compiled && $(HOST_OUT_EXECUTABLES)/aapt2 compile $(LAYOUTLIB_RES)/data/res/**/*.9.png -o $(LAYOUTLIB_RES)/compiled printf '<?xml version="1.0" encoding="utf-8"?>\n<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.android.layoutlib" />' > $(LAYOUTLIB_RES)/AndroidManifest.xml $(HOST_OUT_EXECUTABLES)/aapt2 link -R $(LAYOUTLIB_RES)/compiled/* -o $(LAYOUTLIB_RES)/compiled.apk --manifest $(LAYOUTLIB_RES)/AndroidManifest.xml @@ -78,7 +86,7 @@ $(LAYOUTLIB_RES)/layoutlib-res.zip: $(SOONG_ZIP) $(HOST_OUT_EXECUTABLES)/aapt2 $ for f in $(LAYOUTLIB_RES)/compiled_apk/res/*; do mv "$$f" "$${f/-v4/}";done for f in $(LAYOUTLIB_RES)/compiled_apk/res/**/*.9.png; do mv "$$f" "$${f/.9.png/.compiled.9.png}";done cp -r $(LAYOUTLIB_RES)/compiled_apk/res $(LAYOUTLIB_RES)/data - $(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/res -o $@ + $(SOONG_ZIP) -C $(LAYOUTLIB_RES)/data -D $(LAYOUTLIB_RES)/data/ -o $@ $(call dist-for-goals,layoutlib,$(LAYOUTLIB_RES)/layoutlib-res.zip:layoutlib_native/res.zip) @@ -132,16 +140,26 @@ $(LAYOUTLIB_SBOM)/sbom-metadata.csv: echo $(_path),,,,,,Y,$f,,, >> $@; \ ) + $(foreach f,$(EMULATED_OVERLAYS_FILES), \ + $(eval _path := $(subst frameworks/base/packages,data,$f)) \ + echo $(_path),,,,,,Y,$f,,, >> $@; \ + ) + + $(foreach f,$(DEVICE_OVERLAYS_FILES), \ + $(eval _path := $(subst device/generic/goldfish/phone/overlay/frameworks/base/packages,data,$f)) \ + echo $(_path),,,,,,Y,$f,,, >> $@; \ + ) + .PHONY: layoutlib-sbom layoutlib-sbom: $(LAYOUTLIB_SBOM)/layoutlib.spdx.json -$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES) +$(LAYOUTLIB_SBOM)/layoutlib.spdx.json: $(PRODUCT_OUT)/always_dirty_file.txt $(GEN_SBOM) $(LAYOUTLIB_SBOM)/sbom-metadata.csv $(_layoutlib_font_config_files) $(_layoutlib_fonts_files) $(LAYOUTLIB_BUILD_PROP)/layoutlib-build.prop $(_layoutlib_keyboard_files) $(LAYOUTLIB_RES_FILES) $(EMULATED_OVERLAYS_FILES) $(DEVICE_OVERLAYS_FILES) rm -rf $@ $(GEN_SBOM) --output_file $@ --metadata $(LAYOUTLIB_SBOM)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --module_name "layoutlib" --json $(call dist-for-goals,layoutlib,$(LAYOUTLIB_SBOM)/layoutlib.spdx.json:layoutlib_native/sbom/layoutlib.spdx.json) # Generate SBOM of framework_res.jar that is created in release_layoutlib.sh. -# The generated SBOM contains placeholders for release_layotlib.sh to substitute, and the placeholders include: +# The generated SBOM contains placeholders for release_layoutlib.sh to substitute, and the placeholders include: # document name, document namespace, document creation info, organization and SHA1 value of framework_res.jar. GEN_SBOM_FRAMEWORK_RES := $(HOST_OUT_EXECUTABLES)/generate-sbom-framework_res .PHONY: layoutlib-framework_res-sbom diff --git a/core/main.mk b/core/main.mk index 8d737939fd..c59de8ed45 100644 --- a/core/main.mk +++ b/core/main.mk @@ -31,8 +31,7 @@ endif .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),SOONG_CONFIG_$(n)) .KATI_READONLY := $(foreach n,$(SOONG_CONFIG_NAMESPACES),$(foreach k,$(SOONG_CONFIG_$(n)),SOONG_CONFIG_$(n)_$(k))) -include $(SOONG_MAKEVARS_MK) - +include $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk YACC :=$= $(BISON) -d include $(BUILD_SYSTEM)/clang/config.mk @@ -276,17 +275,23 @@ FULL_BUILD := true # Include all of the makefiles in the system # -subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT).mk $(SOONG_ANDROID_MK) +subdir_makefiles := $(SOONG_OUT_DIR)/installs-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk $(SOONG_ANDROID_MK) + # Android.mk files are only used on Linux builds, Mac only supports Android.bp ifeq ($(HOST_OS),linux) subdir_makefiles += $(file <$(OUT_DIR)/.module_paths/Android.mk.list) endif -subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT).mk + +subdir_makefiles += $(SOONG_OUT_DIR)/late-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk + subdir_makefiles_total := $(words int $(subdir_makefiles) post finish) .KATI_READONLY := subdir_makefiles_total $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk))) +# Build bootloader.img/radio.img, and unpack the partitions. +include $(BUILD_SYSTEM)/tasks/tools/update_bootloader_radio_image.mk + # For an unbundled image, we can skip blueprint_tools because unbundled image # aims to remove a large number framework projects from the manifest, the # sources or dependencies for these tools may be missing from the tree. @@ -295,6 +300,9 @@ droid_targets : blueprint_tools checkbuild: blueprint_tests endif +# Create necessary directories and symlinks in the root filesystem +include system/core/rootdir/create_root_structure.mk + endif # dont_bother ifndef subdir_makefiles_total @@ -679,17 +687,20 @@ endef # Scan all modules in general-tests, device-tests and other selected suites and # flatten the shared library dependencies. define update-host-shared-libs-deps-for-suites -$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests,\ +$(foreach suite,general-tests device-tests vts tvts art-host-tests host-unit-tests camera-hal-tests,\ $(foreach m,$(COMPATIBILITY.$(suite).MODULES),\ $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\ $(foreach dep,$(my_deps),\ $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\ - $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\ + $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests camera-hal-tests),\ $(eval my_testcases := $(HOST_OUT_TESTCASES)),\ $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\ $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\ - $(eval link_target := ../../../$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\ - $(eval symlink := $(my_testcases)/$(m)/shared_libs/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\ + $(eval prefix := ../../..) + $(if $(strip $(patsubst %x86,,$(COMPATIBILITY.$(suite).ARCH_DIRS.$(m)))), \ + $(if $(strip $(patsubst %x86_64,,$(COMPATIBILITY.$(suite).ARCH_DIRS.$(m)))),$(eval prefix := ../..),),) \ + $(eval link_target := $(prefix)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\ + $(eval symlink := $(COMPATIBILITY.$(suite).ARCH_DIRS.$(m))/shared_libs/$(notdir $(f)))\ $(eval COMPATIBILITY.$(suite).SYMLINKS := \ $$(COMPATIBILITY.$(suite).SYMLINKS) $(f):$(link_target):$(symlink))\ $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\ @@ -970,7 +981,8 @@ endef # Returns modules included automatically as a result of certain BoardConfig # variables being set. define auto-included-modules - llndk_in_system \ + $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES)) \ + llndk.libraries.txt \ $(if $(DEVICE_MANIFEST_FILE),vendor_manifest.xml) \ $(if $(DEVICE_MANIFEST_SKUS),$(foreach sku, $(DEVICE_MANIFEST_SKUS),vendor_manifest_$(sku).xml)) \ $(if $(ODM_MANIFEST_FILES),odm_manifest.xml) \ @@ -1855,80 +1867,18 @@ ifndef INSTALLED_RECOVERYIMAGE_TARGET filter_out_files += $(PRODUCT_OUT)/recovery/% endif +# userdata.img +ifndef BUILDING_USERDATA_IMAGE +filter_out_files += $(PRODUCT_OUT)/data/% +endif + installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install)))) else installed_files := $(apps_only_installed_files) endif # TARGET_BUILD_APPS -# sbom-metadata.csv contains all raw data collected in Make for generating SBOM in generate-sbom.py. -# There are multiple columns and each identifies the source of an installed file for a specific case. -# The columns and their uses are described as below: -# installed_file: the file path on device, e.g. /product/app/Browser2/Browser2.apk -# module_path: the path of the module that generates the installed file, e.g. packages/apps/Browser2 -# soong_module_type: Soong module type, e.g. android_app, cc_binary -# is_prebuilt_make_module: Y, if the installed file is from a prebuilt Make module, see prebuilt_internal.mk -# product_copy_files: the installed file is from variable PRODUCT_COPY_FILES, e.g. device/google/cuttlefish/shared/config/init.product.rc:product/etc/init/init.rc -# kernel_module_copy_files: the installed file is from variable KERNEL_MODULE_COPY_FILES, similar to product_copy_files -# is_platform_generated: this is an aggregated value including some small cases instead of adding more columns. It is set to Y if any case is Y -# is_build_prop: build.prop in each partition, see sysprop.mk. -# is_notice_file: NOTICE.xml.gz in each partition, see Makefile. -# is_dexpreopt_image_profile: see the usage of DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED in Soong and Make -# is_product_system_other_avbkey: see INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET -# is_system_other_odex_marker: see INSTALLED_SYSTEM_OTHER_ODEX_MARKER -# is_event_log_tags_file: see variable event_log_tags_file in Makefile -# is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile. -# is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile. -# is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile. -# build_output_path: the path of the built file, used to calculate checksum -# static_libraries/whole_static_libraries: list of module name of the static libraries the file links against, e.g. libclang_rt.builtins or libclang_rt.builtins_32 -# Info of all static libraries of all installed files are collected in variable _all_static_libs that is used to list all the static library files in sbom-metadata.csv. -# See the second foreach loop in the rule of sbom-metadata.csv for the detailed info of static libraries collected in _all_static_libs. -# is_static_lib: whether the file is a static library - metadata_list := $(OUT_DIR)/.module_paths/METADATA.list metadata_files := $(subst $(newline),$(space),$(file <$(metadata_list))) -$(PRODUCT_OUT)/sbom-metadata.csv: - rm -f $@ - echo 'installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib' >> $@ - $(eval _all_static_libs :=) - $(foreach f,$(installed_files),\ - $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \ - $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \ - $(eval _build_output_path := $(PRODUCT_OUT)/$(_path_on_device)) \ - $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \ - $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \ - $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \ - $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \ - $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \ - $(eval _is_build_prop := $(call is-build-prop,$f)) \ - $(eval _is_notice_file := $(call is-notice-file,$f)) \ - $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \ - $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \ - $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \ - $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \ - $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \ - $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \ - $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \ - $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \ - $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \ - $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \ - $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \ - $(eval _static_libs := $(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES)) \ - $(eval _whole_static_libs := $(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES)) \ - $(foreach l,$(_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \ - $(foreach l,$(_whole_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \ - echo '/$(_path_on_device),$(_module_path),$(_soong_module_type),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_build_output_path),$(_static_libs),$(_whole_static_libs),' >> $@; \ - ) - $(foreach l,$(sort $(_all_static_libs)), \ - $(eval _lib_stem := $(call word-colon,1,$l)) \ - $(eval _module_path := $(call word-colon,2,$l)) \ - $(eval _soong_module_type := $(call word-colon,3,$l)) \ - $(eval _built_file := $(call word-colon,4,$l)) \ - $(eval _static_libs := $(ALL_STATIC_LIBRARIES.$l.STATIC_LIBRARIES)) \ - $(eval _whole_static_libs := $(ALL_STATIC_LIBRARIES.$l.WHOLE_STATIC_LIBRARIES)) \ - $(eval _is_static_lib := Y) \ - echo '$(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib)' >> $@; \ - ) # Create metadata for compliance support in Soong .PHONY: make-compliance-metadata @@ -1950,7 +1900,6 @@ $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv: $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \ $(eval _is_build_prop := $(call is-build-prop,$f)) \ $(eval _is_notice_file := $(call is-notice-file,$f)) \ - $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \ $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \ $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \ $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \ @@ -1960,7 +1909,7 @@ $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-metadata.csv: $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \ $(eval _is_flags_file := $(if $(findstring $f, $(ALL_FLAGS_FILES)),Y)) \ $(eval _is_rootdir_symlink := $(if $(findstring $f, $(ALL_ROOTDIR_SYMLINKS)),Y)) \ - $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \ + $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)$(_is_flags_file)$(_is_rootdir_symlink)) \ $(eval _static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES))) \ $(eval _whole_static_libs := $(if $(_is_soong_module),,$(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES))) \ $(eval _license_text := $(if $(filter $(_build_output_path),$(ALL_NON_MODULES)),$(ALL_NON_MODULES.$(_build_output_path).NOTICES))) \ @@ -1988,22 +1937,13 @@ $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/make-modules.csv: $(SOONG_OUT_DIR)/compliance-metadata/$(TARGET_PRODUCT)/installed_files.stamp: $(installed_files) touch $@ -# (TODO: b/272358583 find another way of always rebuilding sbom.spdx) # Remove the always_dirty_file.txt whenever the makefile is evaluated $(shell rm -f $(PRODUCT_OUT)/always_dirty_file.txt) $(PRODUCT_OUT)/always_dirty_file.txt: touch $@ .PHONY: sbom -ifeq ($(TARGET_BUILD_APPS),) -sbom: $(PRODUCT_OUT)/sbom.spdx.json -$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx -$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM) $(installed_files) $(metadata_list) $(metadata_files) $(PRODUCT_OUT)/always_dirty_file.txt - rm -rf $@ - $(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json - -$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json) -else +ifneq ($(TARGET_BUILD_APPS),) # Create build rules for generating SBOMs of unbundled APKs and APEXs # $1: sbom file # $2: sbom fragment file diff --git a/core/os_licensing.mk b/core/os_licensing.mk index 1e1b7df7a9..d15a3d0715 100644 --- a/core/os_licensing.mk +++ b/core/os_licensing.mk @@ -17,13 +17,17 @@ $(eval $(call xml-notice-rule,$(target_notice_file_xml_gz),"System image",$(syst $(eval $(call text-notice-rule,$(target_notice_file_txt),"System image",$(system_notice_file_message),$(SYSTEM_NOTICE_DEPS),$(SYSTEM_NOTICE_DEPS))) +ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true) $(installed_notice_html_or_xml_gz): $(target_notice_file_xml_gz) $(copy-file-to-target) endif +endif $(call declare-1p-target,$(target_notice_file_xml_gz)) +ifneq ($(USE_SOONG_DEFINED_SYSTEM_IMAGE),true) $(call declare-1p-target,$(installed_notice_html_or_xml_gz)) endif +endif .PHONY: vendorlicense vendorlicense: $(call corresponding-license-metadata, $(VENDOR_NOTICE_DEPS)) reportmissinglicenses diff --git a/core/packaging/flags.mk b/core/packaging/flags.mk index a77956bdea..ccb502ca11 100644 --- a/core/packaging/flags.mk +++ b/core/packaging/flags.mk @@ -18,7 +18,7 @@ # # TODO: Should we do all of the images in $(IMAGES_TO_BUILD)? -_FLAG_PARTITIONS := product system system_ext vendor +_FLAG_PARTITIONS := product system vendor # ----------------------------------------------------------------- @@ -28,7 +28,6 @@ _FLAG_PARTITIONS := product system system_ext vendor # $(1): built aconfig flags file (out) # $(2): installed aconfig flags file (out) # $(3): the partition (in) -# $(4): input aconfig files for the partition (in) define generate-partition-aconfig-flag-file $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1))) $(eval $(strip $(1)): PRIVATE_IN := $(strip $(4))) @@ -36,12 +35,14 @@ $(strip $(1)): $(ACONFIG) $(strip $(4)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ $$(ACONFIG) dump --dedup --format protobuf --out $$(PRIVATE_OUT) \ - --filter container:$$(strip $(3)) $$(addprefix --cache ,$$(PRIVATE_IN)), \ + --filter container:$(strip $(3)) \ + $$(addprefix --cache ,$$(PRIVATE_IN)), \ echo -n > $$(PRIVATE_OUT) \ ) $(call copy-one-file, $(1), $(2)) endef + # Create a summary file of build flags for each partition # $(1): built aconfig flags file (out) # $(2): installed aconfig flags file (out) @@ -59,16 +60,22 @@ $(strip $(1)): $(ACONFIG) $(strip $(3)) $(call copy-one-file, $(1), $(2)) endef - $(foreach partition, $(_FLAG_PARTITIONS), \ $(eval aconfig_flag_summaries_protobuf.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig_flags.pb) \ $(eval $(call generate-partition-aconfig-flag-file, \ - $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \ - $(aconfig_flag_summaries_protobuf.$(partition)), \ - $(partition), \ - $(sort $(foreach m,$(call register-names-for-partition, $(partition)), \ + $(TARGET_OUT_FLAGS)/$(partition)/aconfig_flags.pb, \ + $(aconfig_flag_summaries_protobuf.$(partition)), \ + $(partition), \ + $(sort \ + $(foreach m, $(call register-names-for-partition, $(partition)), \ $(ALL_MODULES.$(m).ACONFIG_FILES) \ - )), \ + ) \ + $(if $(filter system, $(partition)), \ + $(foreach m, $(call register-names-for-partition, system_ext), \ + $(ALL_MODULES.$(m).ACONFIG_FILES) \ + ) \ + ) \ + ) \ )) \ ) @@ -90,42 +97,54 @@ $(eval $(call generate-global-aconfig-flag-file, \ # $(1): built aconfig flags storage package map file (out) # $(2): built aconfig flags storage flag map file (out) # $(3): built aconfig flags storage flag val file (out) -# $(4): installed aconfig flags storage package map file (out) -# $(5): installed aconfig flags storage flag map file (out) -# $(6): installed aconfig flags storage flag value file (out) -# $(7): input aconfig files for the partition (in) -# $(8): partition name +# $(4): built aconfig flags storage flag info file (out) +# $(5): installed aconfig flags storage package map file (out) +# $(6): installed aconfig flags storage flag map file (out) +# $(7): installed aconfig flags storage flag value file (out) +# $(8): installed aconfig flags storage flag info file (out) +# $(9): input aconfig files for the partition (in) +# $(10): partition name define generate-partition-aconfig-storage-file $(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1))) -$(eval $(strip $(1)): PRIVATE_IN := $(strip $(7))) -$(strip $(1)): $(ACONFIG) $(strip $(7)) +$(eval $(strip $(1)): PRIVATE_IN := $(strip $(9))) +$(strip $(1)): $(ACONFIG) $(strip $(9)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ - $$(ACONFIG) create-storage --container $(8) --file package_map --out $$(PRIVATE_OUT) \ + $$(ACONFIG) create-storage --container $(10) --file package_map --out $$(PRIVATE_OUT) \ $$(addprefix --cache ,$$(PRIVATE_IN)), \ ) touch $$(PRIVATE_OUT) $(eval $(strip $(2)): PRIVATE_OUT := $(strip $(2))) -$(eval $(strip $(2)): PRIVATE_IN := $(strip $(7))) -$(strip $(2)): $(ACONFIG) $(strip $(7)) +$(eval $(strip $(2)): PRIVATE_IN := $(strip $(9))) +$(strip $(2)): $(ACONFIG) $(strip $(9)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ - $$(ACONFIG) create-storage --container $(8) --file flag_map --out $$(PRIVATE_OUT) \ + $$(ACONFIG) create-storage --container $(10) --file flag_map --out $$(PRIVATE_OUT) \ $$(addprefix --cache ,$$(PRIVATE_IN)), \ ) touch $$(PRIVATE_OUT) $(eval $(strip $(3)): PRIVATE_OUT := $(strip $(3))) -$(eval $(strip $(3)): PRIVATE_IN := $(strip $(7))) -$(strip $(3)): $(ACONFIG) $(strip $(7)) +$(eval $(strip $(3)): PRIVATE_IN := $(strip $(9))) +$(strip $(3)): $(ACONFIG) $(strip $(9)) mkdir -p $$(dir $$(PRIVATE_OUT)) $$(if $$(PRIVATE_IN), \ - $$(ACONFIG) create-storage --container $(8) --file flag_val --out $$(PRIVATE_OUT) \ + $$(ACONFIG) create-storage --container $(10) --file flag_val --out $$(PRIVATE_OUT) \ $$(addprefix --cache ,$$(PRIVATE_IN)), \ ) touch $$(PRIVATE_OUT) -$(call copy-one-file, $(strip $(1)), $(4)) -$(call copy-one-file, $(strip $(2)), $(5)) -$(call copy-one-file, $(strip $(3)), $(6)) +$(eval $(strip $(4)): PRIVATE_OUT := $(strip $(4))) +$(eval $(strip $(4)): PRIVATE_IN := $(strip $(9))) +$(strip $(4)): $(ACONFIG) $(strip $(9)) + mkdir -p $$(dir $$(PRIVATE_OUT)) + $$(if $$(PRIVATE_IN), \ + $$(ACONFIG) create-storage --container $(10) --file flag_info --out $$(PRIVATE_OUT) \ + $$(addprefix --cache ,$$(PRIVATE_IN)), \ + ) + touch $$(PRIVATE_OUT) +$(call copy-one-file, $(strip $(1)), $(5)) +$(call copy-one-file, $(strip $(2)), $(6)) +$(call copy-one-file, $(strip $(3)), $(7)) +$(call copy-one-file, $(strip $(4)), $(8)) endef ifeq ($(RELEASE_CREATE_ACONFIG_STORAGE_FILE),true) @@ -133,13 +152,16 @@ $(foreach partition, $(_FLAG_PARTITIONS), \ $(eval aconfig_storage_package_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/package.map) \ $(eval aconfig_storage_flag_map.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.map) \ $(eval aconfig_storage_flag_val.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.val) \ + $(eval aconfig_storage_flag_info.$(partition) := $(PRODUCT_OUT)/$(partition)/etc/aconfig/flag.info) \ $(eval $(call generate-partition-aconfig-storage-file, \ $(TARGET_OUT_FLAGS)/$(partition)/package.map, \ $(TARGET_OUT_FLAGS)/$(partition)/flag.map, \ $(TARGET_OUT_FLAGS)/$(partition)/flag.val, \ + $(TARGET_OUT_FLAGS)/$(partition)/flag.info, \ $(aconfig_storage_package_map.$(partition)), \ $(aconfig_storage_flag_map.$(partition)), \ $(aconfig_storage_flag_val.$(partition)), \ + $(aconfig_storage_flag_info.$(partition)), \ $(aconfig_flag_summaries_protobuf.$(partition)), \ $(partition), \ )) \ @@ -155,6 +177,7 @@ required_flags_files := \ $(aconfig_storage_package_map.$(partition)) \ $(aconfig_storage_flag_map.$(partition)) \ $(aconfig_storage_flag_val.$(partition)) \ + $(aconfig_storage_flag_info.$(partition)) \ )) ALL_DEFAULT_INSTALLED_MODULES += $(required_flags_files) @@ -174,5 +197,5 @@ $(foreach partition, $(_FLAG_PARTITIONS), \ $(eval aconfig_storage_package_map.$(partition):=) \ $(eval aconfig_storage_flag_map.$(partition):=) \ $(eval aconfig_storage_flag_val.$(partition):=) \ + $(eval aconfig_storage_flag_info.$(partition):=) \ ) - diff --git a/core/product.mk b/core/product.mk index 832d0946e5..8fc40f820c 100644 --- a/core/product.mk +++ b/core/product.mk @@ -26,6 +26,7 @@ _product_single_value_vars += PRODUCT_NAME _product_single_value_vars += PRODUCT_MODEL _product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION _product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION +_product_single_value_vars += PRODUCT_BASE_OS # Defines the ELF segment alignment for binaries (executables and shared libraries). # The ELF segment alignment has to be a PAGE_SIZE multiple. For example, if @@ -389,20 +390,6 @@ _product_single_value_vars += PRODUCT_OTA_FORCE_NON_AB_PACKAGE # If set, Java module in product partition cannot use hidden APIs. _product_single_value_vars += PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE -# If set, only java_sdk_library can be used at inter-partition dependency. -# Note: Build error if BOARD_VNDK_VERSION is not set while -# PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true, because -# PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY has no meaning if -# BOARD_VNDK_VERSION is not set. -# Note: When PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE is not set, there are -# no restrictions at dependency between system and product partition. -_product_single_value_vars += PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY - -# Allowlist for PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY option. -# Listed modules are allowed at inter-partition dependency even if it isn't -# a java_sdk_library module. -_product_list_vars += PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST - # Install a copy of the debug policy to the system_ext partition, and allow # init-second-stage to load debug policy from system_ext. # This option is only meant to be set by compliance GSI targets. @@ -435,8 +422,9 @@ _product_single_value_vars += PRODUCT_MEMCG_V2_FORCE_ENABLED # If true, the cgroup v2 hierarchy will be split into apps/system subtrees _product_single_value_vars += PRODUCT_CGROUP_V2_SYS_APP_ISOLATION_ENABLED -# List of .json files to be merged/compiled into vendor/etc/linker.config.pb +# List of .json files to be merged/compiled into vendor/etc/linker.config.pb and product/etc/linker.config.pb _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS +_product_list_vars += PRODUCT_PRODUCT_LINKER_CONFIG_FRAGMENTS # Whether to use userfaultfd GC. # Possible values are: @@ -498,6 +486,10 @@ _product_single_value_vars += PRODUCT_BUILD_APPS_WITH_BUILD_NUMBER # If set, build would generate system image from Soong-defined module. _product_single_value_vars += PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE +# List of stub libraries specific to the product that are already present in the system image and +# should be included in the system_linker_config. +_product_list_vars += PRODUCT_EXTRA_STUB_LIBRARIES + .KATI_READONLY := _product_single_value_vars _product_list_vars _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars) diff --git a/core/product_config.mk b/core/product_config.mk index cc2fea96ac..738d4cff58 100644 --- a/core/product_config.mk +++ b/core/product_config.mk @@ -311,6 +311,14 @@ endif TARGET_DEVICE := $(PRODUCT_DEVICE) +# Allow overriding PLATFORM_BASE_OS when PRODUCT_BASE_OS is defined +ifdef PRODUCT_BASE_OS + PLATFORM_BASE_OS := $(PRODUCT_BASE_OS) +else + PLATFORM_BASE_OS := $(PLATFORM_BASE_OS_ENV_INPUT) +endif +.KATI_READONLY := PLATFORM_BASE_OS + # TODO: also keep track of things like "port", "land" in product files. # Figure out which resoure configuration options to use for this diff --git a/core/product_config.rbc b/core/product_config.rbc index 59e2c95903..20344f4f87 100644 --- a/core/product_config.rbc +++ b/core/product_config.rbc @@ -382,6 +382,11 @@ def _soong_config_set(g, nsname, var, value): _soong_config_namespace(g, nsname) g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value) +def _soong_config_set_bool(g, nsname, var, value): + """Assigns the value to the variable in the namespace, and marks it as a boolean.""" + _soong_config_set(g, nsname, var, _filter("true", value)) + g["SOONG_CONFIG_TYPE_%s_%s" % (nsname, var)] = "bool" + def _soong_config_append(g, nsname, var, value): """Appends to the value of the variable in the namespace.""" _soong_config_namespace(g, nsname) @@ -861,6 +866,7 @@ rblf = struct( soong_config_namespace = _soong_config_namespace, soong_config_append = _soong_config_append, soong_config_set = _soong_config_set, + soong_config_set_bool = _soong_config_set_bool, soong_config_get = _soong_config_get, abspath = _abspath, add_product_dex_preopt_module_config = _add_product_dex_preopt_module_config, diff --git a/core/proguard.flags b/core/proguard.flags index aa406b983e..5148e56407 100644 --- a/core/proguard.flags +++ b/core/proguard.flags @@ -38,6 +38,17 @@ @com.android.internal.annotations.KeepForWeakReference <fields>; } +# Needed to ensure callback field references are kept in their respective +# owning classes when the downstream callback registrars only store weak refs. +-if @com.android.internal.annotations.WeaklyReferencedCallback class * +-keepclassmembers,allowaccessmodification class * { + <1> *; +} +-if class * extends @com.android.internal.annotations.WeaklyReferencedCallback ** +-keepclassmembers,allowaccessmodification class * { + <1> *; +} + # Understand the common @Keep annotation from various Android packages: # * android.support.annotation # * androidx.annotation diff --git a/core/project_definitions.mk b/core/project_definitions.mk index 5728b677e7..184b03e019 100644 --- a/core/project_definitions.mk +++ b/core/project_definitions.mk @@ -22,3 +22,6 @@ # Include definitions for prebuilt SDK, if present. # -include prebuilts/sdk/current/definitions.mk + +# SDV-specific config. +-include system/software_defined_vehicle/platform/config.mk diff --git a/core/ravenwood_test_config_template.xml b/core/ravenwood_test_config_template.xml index 16a22c0628..2f21baedf7 100644 --- a/core/ravenwood_test_config_template.xml +++ b/core/ravenwood_test_config_template.xml @@ -18,10 +18,9 @@ <option name="test-suite-tag" value="ravenwood" /> <option name="test-suite-tag" value="ravenwood-tests" /> - <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" /> + <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" /> <option name="use-ravenwood-resources" value="true" /> <option name="exclude-paths" value="java" /> - <option name="socket-timeout" value="10000" /> <option name="null-device" value="true" /> {EXTRA_CONFIGS} diff --git a/core/release_config.mk b/core/release_config.mk index 2898868682..fe2170ede4 100644 --- a/core/release_config.mk +++ b/core/release_config.mk @@ -131,6 +131,9 @@ ifneq (,$(_use_protobuf)) _args += --guard=false endif _args += --allow-missing=true + ifneq (,$(TARGET_PRODUCT)) + _args += --product $(TARGET_PRODUCT) + endif _flags_dir:=$(OUT_DIR)/soong/release-config _flags_file:=$(_flags_dir)/release_config-$(TARGET_PRODUCT)-$(TARGET_RELEASE).vars # release-config generates $(_flags_varmk) diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml index 56d2312626..257c820746 100644 --- a/core/robolectric_test_config_template.xml +++ b/core/robolectric_test_config_template.xml @@ -18,7 +18,7 @@ <option name="test-suite-tag" value="robolectric" /> <option name="test-suite-tag" value="robolectric-tests" /> - <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" /> + <option name="java-folder" value="prebuilts/jdk/jdk21/linux-x86/" /> <option name="exclude-paths" value="java" /> <option name="use-robolectric-resources" value="true" /> @@ -33,5 +33,15 @@ <option name="java-flags" value="--add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED"/> <!-- b/251387255 --> <option name="java-flags" value="--add-opens=java.base/java.io=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.net=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.nio=ALL-UNNAMED"/> <!-- required for ShadowVMRuntime --> + <option name="java-flags" value="--add-opens=java.base/java.security=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.text=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/java.util=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.base/jdk.internal.access=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=java.desktop/java.awt.font=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED"/> + <option name="java-flags" value="--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED"/> </test> </configuration> diff --git a/core/soong_android_app_set.mk b/core/soong_android_app_set.mk index ec3d8c85cb..d97980d2ba 100644 --- a/core/soong_android_app_set.mk +++ b/core/soong_android_app_set.mk @@ -9,10 +9,6 @@ endif LOCAL_BUILT_MODULE_STEM := package.apk LOCAL_INSTALLED_MODULE_STEM := $(notdir $(LOCAL_PREBUILT_MODULE_FILE)) -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) - ####################################### include $(BUILD_SYSTEM)/base_rules.mk ####################################### diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk index 3aa244c77f..df1cf2d369 100644 --- a/core/soong_app_prebuilt.mk +++ b/core/soong_app_prebuilt.mk @@ -29,16 +29,6 @@ full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.ja full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_CLASSES_JAR) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR) - ####################################### include $(BUILD_SYSTEM)/base_rules.mk ####################################### diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk index a1c64786ee..da608322f2 100644 --- a/core/soong_cc_rust_prebuilt.mk +++ b/core/soong_cc_rust_prebuilt.mk @@ -38,10 +38,6 @@ ifndef LOCAL_UNINSTALLABLE_MODULE endif endif -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) - my_check_same_vndk_variants := same_vndk_variants_stamp := ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true) @@ -61,7 +57,7 @@ ifeq ($(my_check_same_vndk_variants),true) # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant # isn't used at all and it may break in the downstream trees. - LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp) + LOCAL_ADDITIONAL_CHECKED_MODULE += $(same_vndk_variants_stamp) endif ####################################### diff --git a/core/soong_config.mk b/core/soong_config.mk index 12b41358f7..8c57ce64bf 100644 --- a/core/soong_config.mk +++ b/core/soong_config.mk @@ -1,5 +1,5 @@ -SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk -SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk +SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk +SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT)$(COVERAGE_SUFFIX).mk include $(BUILD_SYSTEM)/art_config.mk include $(BUILD_SYSTEM)/dex_preopt_config.mk @@ -26,7 +26,7 @@ ifeq ($(WRITE_SOONG_VARIABLES),true) $(shell mkdir -p $(dir $(SOONG_VARIABLES))) $(call json_start) -$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT)) +$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT)$(COVERAGE_SUFFIX)) $(call add_json_str, BuildId, $(BUILD_ID)) $(call add_json_str, BuildFingerprintFile, build_fingerprint.txt) @@ -152,7 +152,6 @@ $(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_ $(call add_json_str, VendorApiLevel, $(BOARD_API_LEVEL)) $(call add_json_list, ExtraVndkVersions, $(PRODUCT_EXTRA_VNDK_VERSIONS)) $(call add_json_list, DeviceSystemSdkVersions, $(BOARD_SYSTEMSDK_VERSIONS)) -$(call add_json_str, RecoverySnapshotVersion, $(RECOVERY_SNAPSHOT_VERSION)) $(call add_json_list, Platform_systemsdk_versions, $(PLATFORM_SYSTEMSDK_VERSIONS)) $(call add_json_bool, Malloc_low_memory, $(findstring true,$(MALLOC_SVELTE) $(MALLOC_LOW_MEMORY))) $(call add_json_bool, Malloc_zero_contents, $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS)))) @@ -167,8 +166,6 @@ $(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRI $(call add_json_list, BootJars, $(PRODUCT_BOOT_JARS)) $(call add_json_list, ApexBootJars, $(filter-out $(APEX_BOOT_JARS_EXCLUDED), $(PRODUCT_APEX_BOOT_JARS))) -$(call add_json_bool, VndkSnapshotBuildArtifacts, $(VNDK_SNAPSHOT_BUILD_ARTIFACTS)) - $(call add_json_map, BuildFlags) $(foreach flag,$(_ALL_RELEASE_FLAGS),\ $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).VALUE))) @@ -178,24 +175,6 @@ $(foreach flag,$(_ALL_RELEASE_FLAGS),\ $(call add_json_str,$(flag),$(_ALL_RELEASE_FLAGS.$(flag).TYPE))) $(call end_json_map) -$(call add_json_bool, DirectedVendorSnapshot, $(DIRECTED_VENDOR_SNAPSHOT)) -$(call add_json_map, VendorSnapshotModules) -$(foreach module,$(VENDOR_SNAPSHOT_MODULES),\ - $(call add_json_bool,$(module),true)) -$(call end_json_map) - -$(call add_json_bool, DirectedRecoverySnapshot, $(DIRECTED_RECOVERY_SNAPSHOT)) -$(call add_json_map, RecoverySnapshotModules) -$(foreach module,$(RECOVERY_SNAPSHOT_MODULES),\ - $(call add_json_bool,$(module),true)) -$(call end_json_map) - -$(call add_json_list, VendorSnapshotDirsIncluded, $(VENDOR_SNAPSHOT_DIRS_INCLUDED)) -$(call add_json_list, VendorSnapshotDirsExcluded, $(VENDOR_SNAPSHOT_DIRS_EXCLUDED)) -$(call add_json_list, RecoverySnapshotDirsIncluded, $(RECOVERY_SNAPSHOT_DIRS_INCLUDED)) -$(call add_json_list, RecoverySnapshotDirsExcluded, $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED)) -$(call add_json_bool, HostFakeSnapshotEnabled, $(HOST_FAKE_SNAPSHOT_ENABLE)) - $(call add_json_bool, MultitreeUpdateMeta, $(filter true,$(TARGET_MULTITREE_UPDATE_META))) $(call add_json_bool, Treble_linker_namespaces, $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES))) @@ -227,6 +206,7 @@ $(call add_json_list, BoardSepolicyM4Defs, $(BOARD_SEPOLICY_M4DEFS $(call add_json_str, BoardSepolicyVers, $(BOARD_SEPOLICY_VERS)) $(call add_json_str, SystemExtSepolicyPrebuiltApiDir, $(BOARD_SYSTEM_EXT_PREBUILT_DIR)) $(call add_json_str, ProductSepolicyPrebuiltApiDir, $(BOARD_PRODUCT_PREBUILT_DIR)) +$(call add_json_str, BoardPlatform, $(TARGET_BOARD_PLATFORM)) $(call add_json_str, PlatformSepolicyVersion, $(PLATFORM_SEPOLICY_VERSION)) $(call add_json_list, PlatformSepolicyCompatVersions, $(PLATFORM_SEPOLICY_COMPAT_VERSIONS)) @@ -256,6 +236,12 @@ $(call add_json_list, ProductPrivateSepolicyDirs, $(PRODUCT_PRIVATE_SEPOL $(call add_json_list, TargetFSConfigGen, $(TARGET_FS_CONFIG_GEN)) +# Although USE_SOONG_DEFINED_SYSTEM_IMAGE determines whether to use the system image specified by +# PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE, PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE is still used to compare +# installed files between make and soong, regardless of the USE_SOONG_DEFINED_SYSTEM_IMAGE setting. +$(call add_json_bool, UseSoongSystemImage, $(filter true,$(USE_SOONG_DEFINED_SYSTEM_IMAGE))) +$(call add_json_str, ProductSoongDefinedSystemImage, $(PRODUCT_SOONG_DEFINED_SYSTEM_IMAGE)) + $(call add_json_map, VendorVars) $(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\ $(call add_json_map, $(namespace))\ @@ -279,9 +265,6 @@ $(call end_json_map) $(call add_json_bool, EnforceProductPartitionInterface, $(filter true,$(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))) $(call add_json_str, DeviceCurrentApiLevelForVendorModules, $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)) -$(call add_json_bool, EnforceInterPartitionJavaSdkLibrary, $(filter true,$(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY))) -$(call add_json_list, InterPartitionJavaLibraryAllowList, $(PRODUCT_INTER_PARTITION_JAVA_LIBRARY_ALLOWLIST)) - $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX))) ifndef APEX_BUILD_FOR_PRE_S_DEVICES @@ -306,7 +289,6 @@ $(call add_json_bool, BuildBrokenClangCFlags, $(filter true,$(BUILD $(call add_json_bool, GenruleSandboxing, $(if $(GENRULE_SANDBOXING),$(filter true,$(GENRULE_SANDBOXING)),$(if $(filter true,$(BUILD_BROKEN_GENRULE_SANDBOXING)),,true))) $(call add_json_bool, BuildBrokenEnforceSyspropOwner, $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER))) $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW))) -$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES))) $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE))) $(call add_json_bool, BuildBrokenIncorrectPartitionImages, $(filter true,$(BUILD_BROKEN_INCORRECT_PARTITION_IMAGES))) $(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES)) @@ -334,6 +316,8 @@ $(call add_json_list, AfdoProfiles, $(ALL_AFDO_PROFILES)) $(call add_json_str, ProductManufacturer, $(PRODUCT_MANUFACTURER)) $(call add_json_str, ProductBrand, $(PRODUCT_BRAND)) +$(call add_json_str, ProductDevice, $(PRODUCT_DEVICE)) +$(call add_json_str, ProductModel, $(PRODUCT_MODEL)) $(call add_json_str, ReleaseVersion, $(_RELEASE_VERSION)) $(call add_json_list, ReleaseAconfigValueSets, $(RELEASE_ACONFIG_VALUE_SETS)) @@ -361,6 +345,87 @@ $(call add_json_bool, BoardUseVbmetaDigestInFingerprint, $(filter true,$(BOARD_U $(call add_json_list, OemProperties, $(PRODUCT_OEM_PROPERTIES)) +$(call add_json_list, SystemPropFiles, $(TARGET_SYSTEM_PROP)) +$(call add_json_list, SystemExtPropFiles, $(TARGET_SYSTEM_EXT_PROP)) +$(call add_json_list, ProductPropFiles, $(TARGET_PRODUCT_PROP)) +$(call add_json_list, OdmPropFiles, $(TARGET_ODM_PROP)) + +$(call add_json_str, ExtraAllowedDepsTxt, $(EXTRA_ALLOWED_DEPS_TXT)) + +# Do not set ArtTargetIncludeDebugBuild into any value if PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD is not set, +# to have the same behavior from runtime_libart.mk. +ifneq ($(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD),) +$(call add_json_bool, ArtTargetIncludeDebugBuild, $(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD)) +endif + +_config_enable_uffd_gc := \ + $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default) +$(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc)) +_config_enable_uffd_gc := + +$(call add_json_list, DeviceFrameworkCompatibilityMatrixFile, $(DEVICE_FRAMEWORK_COMPATIBILITY_MATRIX_FILE)) +$(call add_json_list, DeviceProductCompatibilityMatrixFile, $(DEVICE_PRODUCT_COMPATIBILITY_MATRIX_FILE)) +$(call add_json_list, BoardAvbSystemAddHashtreeFooterArgs, $(BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS)) +$(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE))) + +$(call add_json_str, AdbKeys, $(PRODUCT_ADB_KEYS)) + +$(call add_json_map, PartitionVarsForSoongMigrationOnlyDoNotUse) + $(call add_json_str, ProductDirectory, $(dir $(INTERNAL_PRODUCT))) + + $(call add_json_map,PartitionQualifiedVariables) + $(foreach image_type,SYSTEM VENDOR CACHE USERDATA PRODUCT SYSTEM_EXT OEM ODM VENDOR_DLKM ODM_DLKM SYSTEM_DLKM, \ + $(call add_json_map,$(call to-lower,$(image_type))) \ + $(call add_json_bool, BuildingImage, $(filter true,$(BUILDING_$(image_type)_IMAGE))) \ + $(call add_json_str, BoardErofsCompressor, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESSOR)) \ + $(call add_json_str, BoardErofsCompressHints, $(BOARD_$(image_type)IMAGE_EROFS_COMPRESS_HINTS)) \ + $(call add_json_str, BoardErofsPclusterSize, $(BOARD_$(image_type)IMAGE_EROFS_PCLUSTER_SIZE)) \ + $(call add_json_str, BoardExtfsInodeCount, $(BOARD_$(image_type)IMAGE_EXTFS_INODE_COUNT)) \ + $(call add_json_str, BoardExtfsRsvPct, $(BOARD_$(image_type)IMAGE_EXTFS_RSV_PCT)) \ + $(call add_json_str, BoardF2fsSloadCompressFlags, $(BOARD_$(image_type)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)) \ + $(call add_json_str, BoardFileSystemCompress, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_COMPRESS)) \ + $(call add_json_str, BoardFileSystemType, $(BOARD_$(image_type)IMAGE_FILE_SYSTEM_TYPE)) \ + $(call add_json_str, BoardJournalSize, $(BOARD_$(image_type)IMAGE_JOURNAL_SIZE)) \ + $(call add_json_str, BoardPartitionReservedSize, $(BOARD_$(image_type)IMAGE_PARTITION_RESERVED_SIZE)) \ + $(call add_json_str, BoardPartitionSize, $(BOARD_$(image_type)IMAGE_PARTITION_SIZE)) \ + $(call add_json_str, BoardSquashfsBlockSize, $(BOARD_$(image_type)IMAGE_SQUASHFS_BLOCK_SIZE)) \ + $(call add_json_str, BoardSquashfsCompressor, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR)) \ + $(call add_json_str, BoardSquashfsCompressorOpt, $(BOARD_$(image_type)IMAGE_SQUASHFS_COMPRESSOR_OPT)) \ + $(call add_json_str, BoardSquashfsDisable4kAlign, $(BOARD_$(image_type)IMAGE_SQUASHFS_DISABLE_4K_ALIGN)) \ + $(call add_json_str, ProductBaseFsPath, $(PRODUCT_$(image_type)_BASE_FS_PATH)) \ + $(call add_json_str, ProductHeadroom, $(PRODUCT_$(image_type)_HEADROOM)) \ + $(call add_json_str, ProductVerityPartition, $(PRODUCT_$(image_type)_VERITY_PARTITION)) \ + $(call end_json_map) \ + ) + $(call end_json_map) + + $(call add_json_bool, TargetUserimagesUseExt2, $(filter true,$(TARGET_USERIMAGES_USE_EXT2))) + $(call add_json_bool, TargetUserimagesUseExt3, $(filter true,$(TARGET_USERIMAGES_USE_EXT3))) + $(call add_json_bool, TargetUserimagesUseExt4, $(filter true,$(TARGET_USERIMAGES_USE_EXT4))) + + $(call add_json_bool, TargetUserimagesSparseExtDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))) + $(call add_json_bool, TargetUserimagesSparseErofsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED))) + $(call add_json_bool, TargetUserimagesSparseSquashfsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))) + $(call add_json_bool, TargetUserimagesSparseF2fsDisabled, $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED))) + + $(call add_json_str, BoardErofsCompressor, $(BOARD_EROFS_COMPRESSOR)) + $(call add_json_str, BoardErofsCompressorHints, $(BOARD_EROFS_COMPRESS_HINTS)) + $(call add_json_str, BoardErofsPclusterSize, $(BOARD_EROFS_PCLUSTER_SIZE)) + $(call add_json_str, BoardErofsShareDupBlocks, $(BOARD_EROFS_SHARE_DUP_BLOCKS)) + $(call add_json_str, BoardErofsUseLegacyCompression, $(BOARD_EROFS_USE_LEGACY_COMPRESSION)) + $(call add_json_str, BoardExt4ShareDupBlocks, $(BOARD_EXT4_SHARE_DUP_BLOCKS)) + $(call add_json_str, BoardFlashLogicalBlockSize, $(BOARD_FLASH_LOGICAL_BLOCK_SIZE)) + $(call add_json_str, BoardFlashEraseBlockSize, $(BOARD_FLASH_ERASE_BLOCK_SIZE)) + + $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))) + $(call add_json_bool, ProductUseDynamicPartitionSize, $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE))) + $(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP))) + + $(call add_json_list, ProductPackages, $(PRODUCT_PACKAGES)) + $(call add_json_list, ProductPackagesDebug, $(PRODUCT_PACKAGES_DEBUG)) + +$(call end_json_map) + $(call json_end) $(file >$(SOONG_VARIABLES).tmp,$(json_contents)) diff --git a/core/soong_extra_config.mk b/core/soong_extra_config.mk index e4432d2c5c..00b5c0fd63 100644 --- a/core/soong_extra_config.mk +++ b/core/soong_extra_config.mk @@ -48,11 +48,6 @@ $(call add_json_str, BootloaderBoardName, $(TARGET_BOOTLOADER_BOARD_NAME)) $(call add_json_bool, SdkBuild, $(filter sdk sdk_addon,$(MAKECMDGOALS))) -_config_enable_uffd_gc := \ - $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default) -$(call add_json_str, EnableUffdGc, $(_config_enable_uffd_gc)) -_config_enable_uffd_gc := - $(call add_json_str, SystemServerCompilerFilter, $(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)) $(call add_json_bool, Product16KDeveloperOption, $(filter true,$(PRODUCT_16K_DEVELOPER_OPTION))) @@ -93,6 +88,12 @@ $(call add_json_str, VendorImageFileSystemType, $(BOARD_VENDORIMAGE_FILE_SYSTEM_ $(call add_json_list, BuildVersionTags, $(BUILD_VERSION_TAGS)) +$(call add_json_bool, ProductNotDebuggableInUserdebug, $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG)) + +$(call add_json_bool, UsesProductImage, $(filter true,$(BOARD_USES_PRODUCTIMAGE))) + +$(call add_json_bool, TargetBoots16K, $(filter true,$(TARGET_BOOTS_16K))) + $(call json_end) $(shell mkdir -p $(dir $(SOONG_EXTRA_VARIABLES))) diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk index 7f85231543..8c3882f364 100644 --- a/core/soong_java_prebuilt.mk +++ b/core/soong_java_prebuilt.mk @@ -21,19 +21,6 @@ full_classes_pre_proguard_jar := $(intermediates.COMMON)/classes-pre-proguard.ja full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar common_javalib.jar := $(intermediates.COMMON)/javalib.jar -ifdef LOCAL_SOONG_AAR - LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_AAR) -endif - -# Use the Soong output as the checkbuild target instead of LOCAL_BUILT_MODULE -# to avoid checkbuilds making an extra copy of every module. -LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_HEADER_JAR) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_FULL_MANIFEST_FILE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEXPREOPT_CONFIG) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE) -LOCAL_ADDITIONAL_CHECKED_MODULE += $(LOCAL_SOONG_DEX_JAR) - ####################################### include $(BUILD_SYSTEM)/base_rules.mk ####################################### @@ -115,16 +102,14 @@ ifdef LOCAL_SOONG_DEX_JAR boot_jars := $(foreach pair,$(PRODUCT_BOOT_JARS), $(call word-colon,2,$(pair))) ifneq ($(filter $(LOCAL_MODULE),$(boot_jars)),) # is_boot_jar ifeq (true,$(WITH_DEXPREOPT)) - # $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) contains modules that installs - # all of bootjars' dexpreopt files (.art, .oat, .vdex, ...) + # dex_bootjars singleton installs all of bootjars' dexpreopt files (.art, .oat, .vdex, ...) + # This includes both the primary and secondary arches. # Add them to the required list so they are installed alongside this module. - ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET += \ - $(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) \ - $(2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) + ALL_MODULES.$(my_register_name).REQUIRED_FROM_TARGET += dex_bootjars # Copy $(LOCAL_BUILT_MODULE) and its dependencies when installing boot.art # so that dependencies of $(LOCAL_BUILT_MODULE) (which may include # jacoco-report-classes.jar) are copied for every build. - $(foreach m,$(DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE) $(2ND_DEFAULT_DEX_PREOPT_INSTALLED_IMAGE_MODULE), \ + $(foreach m,dex_bootjars, \ $(eval $(call add-dependency,$(firstword $(call module-installed-files,$(m))),$(LOCAL_BUILT_MODULE))) \ ) endif diff --git a/core/sysprop.mk b/core/sysprop.mk index 47d8a41a38..dc6f2c4ac6 100644 --- a/core/sysprop.mk +++ b/core/sysprop.mk @@ -33,34 +33,26 @@ define generate-common-build-props echo "# from generate-common-build-props" >> $(2);\ echo "# These properties identify this partition image." >> $(2);\ echo "####################################" >> $(2);\ - $(if $(filter system,$(1)),\ - echo "ro.product.$(1).brand=$(PRODUCT_SYSTEM_BRAND)" >> $(2);\ - echo "ro.product.$(1).device=$(PRODUCT_SYSTEM_DEVICE)" >> $(2);\ - echo "ro.product.$(1).manufacturer=$(PRODUCT_SYSTEM_MANUFACTURER)" >> $(2);\ - echo "ro.product.$(1).model=$(PRODUCT_SYSTEM_MODEL)" >> $(2);\ - echo "ro.product.$(1).name=$(PRODUCT_SYSTEM_NAME)" >> $(2);\ - ,\ - echo "ro.product.$(1).brand=$(PRODUCT_BRAND)" >> $(2);\ - echo "ro.product.$(1).device=$(TARGET_DEVICE)" >> $(2);\ - echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\ - echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\ - echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\ - if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \ - echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\ - fi; \ - if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \ - echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\ - fi; \ - if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \ - echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\ - fi; \ - if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \ - echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\ - fi; \ - if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \ - echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\ - fi; \ - )\ + echo "ro.product.$(1).brand=$(PRODUCT_BRAND)" >> $(2);\ + echo "ro.product.$(1).device=$(TARGET_DEVICE)" >> $(2);\ + echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\ + echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\ + echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\ + if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \ + echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\ + fi; \ + if [ -n "$(strip $(PRODUCT_BRAND_FOR_ATTESTATION))" ]; then \ + echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\ + fi; \ + if [ -n "$(strip $(PRODUCT_NAME_FOR_ATTESTATION))" ]; then \ + echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\ + fi; \ + if [ -n "$(strip $(PRODUCT_DEVICE_FOR_ATTESTATION))" ]; then \ + echo "ro.product.device_for_attestation=$(PRODUCT_DEVICE_FOR_ATTESTATION)" >> $(2);\ + fi; \ + if [ -n "$(strip $(PRODUCT_MANUFACTURER_FOR_ATTESTATION))" ]; then \ + echo "ro.product.manufacturer_for_attestation=$(PRODUCT_MANUFACTURER_FOR_ATTESTATION)" >> $(2);\ + fi; \ $(if $(filter true,$(ZYGOTE_FORCE_64)),\ $(if $(filter vendor,$(1)),\ echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\ @@ -226,50 +218,11 @@ KNOWN_OEM_THUMBPRINT_PROPERTIES:= # ----------------------------------------------------------------- # system/build.prop # -# Note: parts of this file that can't be generated by the build-properties -# macro are manually created as separate files and then fed into the macro - -buildinfo_prop := $(call intermediates-dir-for,ETC,buildinfo.prop)/buildinfo.prop - -ifdef TARGET_SYSTEM_PROP -system_prop_file := $(TARGET_SYSTEM_PROP) -else -system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop) -endif - -_prop_files_ := \ - $(buildinfo_prop) \ - $(system_prop_file) - -# Order matters here. When there are duplicates, the last one wins. -# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter -_prop_vars_ := \ - ADDITIONAL_SYSTEM_PROPERTIES \ - PRODUCT_SYSTEM_PROPERTIES - -# TODO(b/117892318): deprecate this -_prop_vars_ += \ - PRODUCT_SYSTEM_DEFAULT_PROPERTIES - -ifndef property_overrides_split_enabled -_prop_vars_ += \ - ADDITIONAL_VENDOR_PROPERTIES \ - PRODUCT_VENDOR_PROPERTIES -endif +# system/build.prop is built by Soong. See system-build.prop module in +# build/soong/Android.bp. INSTALLED_BUILD_PROP_TARGET := $(TARGET_OUT)/build.prop -$(eval $(call build-properties,\ - system,\ - $(INSTALLED_BUILD_PROP_TARGET),\ - $(_prop_files_),\ - $(_prop_vars_),\ - $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_BUILD_PROP_TARGET))) - # ----------------------------------------------------------------- # vendor/build.prop # @@ -313,83 +266,18 @@ $(eval $(call declare-1p-target,$(INSTALLED_VENDOR_BUILD_PROP_TARGET))) # ----------------------------------------------------------------- # product/etc/build.prop # - -_prop_files_ := $(if $(TARGET_PRODUCT_PROP),\ - $(TARGET_PRODUCT_PROP),\ - $(wildcard $(TARGET_DEVICE_DIR)/product.prop)) - -# Order matters here. When there are duplicates, the last one wins. -# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter -_prop_vars_ := \ - ADDITIONAL_PRODUCT_PROPERTIES \ - PRODUCT_PRODUCT_PROPERTIES +# product/etc/build.prop is built by Soong. See product-build.prop module in +# build/soong/Android.bp. INSTALLED_PRODUCT_BUILD_PROP_TARGET := $(TARGET_OUT_PRODUCT)/etc/build.prop -ifdef PRODUCT_OEM_PROPERTIES -import_oem_prop := $(call intermediates-dir-for,ETC,import_oem_prop)/oem.prop - -$(import_oem_prop): - $(hide) echo "####################################" >> $@; \ - echo "# PRODUCT_OEM_PROPERTIES" >> $@; \ - echo "####################################" >> $@; - $(hide) $(foreach prop,$(PRODUCT_OEM_PROPERTIES), \ - echo "import /oem/oem.prop $(prop)" >> $@;) - -_footers_ := $(import_oem_prop) -else -_footers_ := -endif - -# Skip common /product properties generation if device released before R and -# has no product partition. This is the first part of the check. -ifeq ($(call math_lt,$(if $(PRODUCT_SHIPPING_API_LEVEL),$(PRODUCT_SHIPPING_API_LEVEL),30),30), true) - _skip_common_properties := true -endif - -# The second part of the check - always generate common properties for the -# devices with product partition regardless of shipping level. -ifneq ($(BOARD_USES_PRODUCTIMAGE),) - _skip_common_properties := -endif - -$(eval $(call build-properties,\ - product,\ - $(INSTALLED_PRODUCT_BUILD_PROP_TARGET),\ - $(_prop_files_),\ - $(_prop_vars_),\ - $(empty),\ - $(_footers_),\ - $(_skip_common_properties))) - -$(eval $(call declare-1p-target,$(INSTALLED_PRODUCT_BUILD_PROP_TARGET))) - -_skip_common_properties := - # ---------------------------------------------------------------- # odm/etc/build.prop # -_prop_files_ := $(if $(TARGET_ODM_PROP),\ - $(TARGET_ODM_PROP),\ - $(wildcard $(TARGET_DEVICE_DIR)/odm.prop)) - -# Order matters here. When there are duplicates, the last one wins. -# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter -_prop_vars_ := \ - ADDITIONAL_ODM_PROPERTIES \ - PRODUCT_ODM_PROPERTIES +# odm/etc/build.prop is built by Soong. See odm-build.prop module in +# build/soong/Android.bp. INSTALLED_ODM_BUILD_PROP_TARGET := $(TARGET_OUT_ODM)/etc/build.prop -$(eval $(call build-properties,\ - odm,\ - $(INSTALLED_ODM_BUILD_PROP_TARGET),\ - $(_prop_files_),\ - $(_prop_vars_),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_ODM_BUILD_PROP_TARGET))) # ---------------------------------------------------------------- # vendor_dlkm/etc/build.prop @@ -442,25 +330,10 @@ $(eval $(call declare-1p-target,$(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET))) # ----------------------------------------------------------------- # system_ext/etc/build.prop # -_prop_files_ := $(if $(TARGET_SYSTEM_EXT_PROP),\ - $(TARGET_SYSTEM_EXT_PROP),\ - $(wildcard $(TARGET_DEVICE_DIR)/system_ext.prop)) - -# Order matters here. When there are duplicates, the last one wins. -# TODO(b/117892318): don't allow duplicates so that the ordering doesn't matter -_prop_vars_ := PRODUCT_SYSTEM_EXT_PROPERTIES +# system_ext/etc/build.prop is built by Soong. See system-build.prop module in +# build/soong/Android.bp. INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET := $(TARGET_OUT_SYSTEM_EXT)/etc/build.prop -$(eval $(call build-properties,\ - system_ext,\ - $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET),\ - $(_prop_files_),\ - $(_prop_vars_),\ - $(empty),\ - $(empty),\ - $(empty))) - -$(eval $(call declare-1p-target,$(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET))) # ---------------------------------------------------------------- # ramdisk/boot/etc/build.prop diff --git a/core/sysprop_config.mk b/core/sysprop_config.mk index f9b9d1c2ee..69066117a3 100644 --- a/core/sysprop_config.mk +++ b/core/sysprop_config.mk @@ -15,63 +15,9 @@ $(foreach name, $(_additional_prop_var_names),\ ) _additional_prop_var_names := -# -# ----------------------------------------------------------------- -# Add the product-defined properties to the build properties. -ifneq ($(BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED), true) - ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES) -else - ifndef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE - ADDITIONAL_SYSTEM_PROPERTIES += $(PRODUCT_PROPERTY_OVERRIDES) - endif -endif - -ADDITIONAL_SYSTEM_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE} - -# Set ro.llndk.api_level to show the maximum vendor API level that the LLNDK in -# the system partition supports. -ifdef RELEASE_BOARD_API_LEVEL -ADDITIONAL_SYSTEM_PROPERTIES += ro.llndk.api_level=$(RELEASE_BOARD_API_LEVEL) -endif - -# Sets ro.actionable_compatible_property.enabled to know on runtime whether the -# allowed list of actionable compatible properties is enabled or not. -ADDITIONAL_SYSTEM_PROPERTIES += ro.actionable_compatible_property.enabled=true - -# Add the system server compiler filter if they are specified for the product. -ifneq (,$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER)) -ADDITIONAL_PRODUCT_PROPERTIES += dalvik.vm.systemservercompilerfilter=$(PRODUCT_SYSTEM_SERVER_COMPILER_FILTER) -endif - -# Add the 16K developer option if it is defined for the product. -ifeq ($(PRODUCT_16K_DEVELOPER_OPTION),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=true -else -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.16k_page.enabled=false -endif - -ifeq ($(TARGET_BOOTS_16K),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=16384 -else -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.page_size=4096 -endif - -# Enable core platform API violation warnings on userdebug and eng builds. -ifneq ($(TARGET_BUILD_VARIANT),user) -ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn -endif - -# Define ro.sanitize.<name> properties for all global sanitizers. -ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true) - -# Sets the default value of ro.postinstall.fstab.prefix to /system. -# Device board config should override the value to /product when needed by: -# -# PRODUCT_PRODUCT_PROPERTIES += ro.postinstall.fstab.prefix=/product -# -# It then uses ${ro.postinstall.fstab.prefix}/etc/fstab.postinstall to -# mount system_other partition. -ADDITIONAL_SYSTEM_PROPERTIES += ro.postinstall.fstab.prefix=/system +$(KATI_obsolete_var ADDITIONAL_SYSTEM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead) +$(KATI_obsolete_var ADDITIONAL_ODM_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead) +$(KATI_obsolete_var ADDITIONAL_PRODUCT_PROPERTIES,Use build/soong/scripts/gen_build_prop.py instead) # Add cpu properties for bionic and ART. ADDITIONAL_VENDOR_PROPERTIES += ro.bionic.arch=$(TARGET_ARCH) @@ -184,118 +130,16 @@ ADDITIONAL_VENDOR_PROPERTIES += \ ro.build.ab_update=$(AB_OTA_UPDATER) endif -ADDITIONAL_PRODUCT_PROPERTIES += ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS) - ifeq ($(AB_OTA_UPDATER),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS))) ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS))) endif -# Set this property for VTS to skip large page size tests on unsupported devices. -ADDITIONAL_PRODUCT_PROPERTIES += \ - ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED) - -ifeq ($(PRODUCT_NO_BIONIC_PAGE_SIZE_MACRO),true) -ADDITIONAL_PRODUCT_PROPERTIES += ro.product.build.no_bionic_page_size_macro=true -endif - user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT)) -enable_target_debugging := true -enable_dalvik_lock_contention_logging := true -ifneq (,$(user_variant)) - # Target is secure in user builds. - ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=1 - ADDITIONAL_SYSTEM_PROPERTIES += security.perf_harden=1 - - ifeq ($(user_variant),user) - ADDITIONAL_SYSTEM_PROPERTIES += ro.adb.secure=1 - endif - - ifneq ($(user_variant),userdebug) - # Disable debugging in plain user builds. - enable_target_debugging := - enable_dalvik_lock_contention_logging := - else - # Disable debugging in userdebug builds if PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG - # is set. - ifneq (,$(strip $(PRODUCT_NOT_DEBUGGABLE_IN_USERDEBUG))) - enable_target_debugging := - endif - endif - - # Disallow mock locations by default for user builds - ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=0 - -else # !user_variant - # Turn on checkjni for non-user builds. - ADDITIONAL_SYSTEM_PROPERTIES += ro.kernel.android.checkjni=1 - # Set device insecure for non-user builds. - ADDITIONAL_SYSTEM_PROPERTIES += ro.secure=0 - # Allow mock locations by default for non user builds - ADDITIONAL_SYSTEM_PROPERTIES += ro.allow.mock.location=1 -endif # !user_variant - -ifeq (true,$(strip $(enable_dalvik_lock_contention_logging))) - # Enable Dalvik lock contention logging. - ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.lockprof.threshold=500 -endif # !enable_dalvik_lock_contention_logging - -ifeq (true,$(strip $(enable_target_debugging))) - # Target is more debuggable and adbd is on by default - ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=1 -else # !enable_target_debugging - # Target is less debuggable and adbd is off by default - ADDITIONAL_SYSTEM_PROPERTIES += ro.debuggable=0 -endif # !enable_target_debugging - -enable_target_debugging:= -enable_dalvik_lock_contention_logging:= - -ifneq ($(filter sdk sdk_addon,$(MAKECMDGOALS)),) -_is_sdk_build := true -endif - -ifeq ($(TARGET_BUILD_VARIANT),eng) -ifneq ($(filter ro.setupwizard.mode=ENABLED, $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))),) - # Don't require the setup wizard on eng builds - ADDITIONAL_SYSTEM_PROPERTIES := $(filter-out ro.setupwizard.mode=%,\ - $(call collapse-pairs, $(ADDITIONAL_SYSTEM_PROPERTIES))) \ - ro.setupwizard.mode=OPTIONAL -endif -ifndef _is_sdk_build - # To speedup startup of non-preopted builds, don't verify or compile the boot image. - ADDITIONAL_SYSTEM_PROPERTIES += dalvik.vm.image-dex2oat-filter=extract -endif -# b/323566535 -ADDITIONAL_SYSTEM_PROPERTIES += init.svc_debug.no_fatal.zygote=true -endif - -ifdef _is_sdk_build -ADDITIONAL_SYSTEM_PROPERTIES += xmpp.auto-presence=true -ADDITIONAL_SYSTEM_PROPERTIES += ro.config.nocheckin=yes -endif - -_is_sdk_build := - -ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android - -# This property is set by flashing debug boot image, so default to false. -ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0 config_enable_uffd_gc := \ $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC) default) -# This is a temporary system property that controls the ART module. The plan is -# to remove it by Aug 2025, at which time Mainline updates of the ART module -# will ignore it as well. -# If the value is "default", it will be mangled by post_process_props.py. -ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(config_enable_uffd_gc) - -ADDITIONAL_SYSTEM_PROPERTIES := $(strip $(ADDITIONAL_SYSTEM_PROPERTIES)) -ADDITIONAL_PRODUCT_PROPERTIES := $(strip $(ADDITIONAL_PRODUCT_PROPERTIES)) ADDITIONAL_VENDOR_PROPERTIES := $(strip $(ADDITIONAL_VENDOR_PROPERTIES)) .KATI_READONLY += \ - ADDITIONAL_SYSTEM_PROPERTIES \ - ADDITIONAL_PRODUCT_PROPERTIES \ ADDITIONAL_VENDOR_PROPERTIES diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk index c95f6e7878..eb54faeffe 100644 --- a/core/tasks/art-host-tests.mk +++ b/core/tasks/art-host-tests.mk @@ -47,21 +47,16 @@ $(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_l $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \ done - grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \ - -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \ -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list \ -sha256 grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true - grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true $(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \ - -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \ - -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list + -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true $(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list - grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list $(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list art-host-tests: $(art_host_tests_zip) diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk index 5850c4ed73..6164c2e94b 100644 --- a/core/tasks/device-tests.mk +++ b/core/tasks/device-tests.mk @@ -14,6 +14,7 @@ .PHONY: device-tests +.PHONY: device-tests-host-shared-libs device-tests-zip := $(PRODUCT_OUT)/device-tests.zip # Create an artifact to include a list of test config files in device-tests. @@ -23,37 +24,45 @@ device-tests-configs-zip := $(PRODUCT_OUT)/device-tests_configs.zip my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES)) device_tests_host_shared_libs_zip := $(PRODUCT_OUT)/device-tests_host-shared-libs.zip -$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip) +$(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip) $(device-tests-configs-zip) $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list $(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests) -$(device-tests-zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip) $(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP) - rm -f $@-shared-libs.list echo $(sort $(COMPATIBILITY.device-tests.FILES) $(COMPATIBILITY.device-tests.SOONG_INSTALLED_COMPATIBILITY_SUPPORT_FILES)) | tr " " "\n" > $@.list grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true grep -e .*\\.config$$ $@-host.list > $@-host-test-configs.list || true $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ echo $$shared_lib >> $@-host.list; \ - echo $$shared_lib >> $@-shared-libs.list; \ done - grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true $(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256 $(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \ -P host -C $(HOST_OUT) -l $@-host-test-configs.list \ -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list - $(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \ - -P host -C $(HOST_OUT) -l $@-host-shared-libs.list rm -f $(PRIVATE_device_tests_list) $(hide) grep -e .*\\.config$$ $@-host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_device_tests_list) $(hide) grep -e .*\\.config$$ $@-target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_device_tests_list) $(hide) $(SOONG_ZIP) -d -o $(device-tests-list-zip) -C $(dir $@) -f $(PRIVATE_device_tests_list) rm -f $@.list $@-host.list $@-target.list $@-host-test-configs.list $@-target-test-configs.list \ - $@-shared-libs.list $@-host-shared-libs.list $(PRIVATE_device_tests_list) + $(PRIVATE_device_tests_list) + +$(device_tests_host_shared_libs_zip) : PRIVATE_device_host_shared_libs_zip := $(device_tests_host_shared_libs_zip) +$(device_tests_host_shared_libs_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests) +$(device_tests_host_shared_libs_zip) : $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP) + rm -f $@-shared-libs.list + $(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \ + echo $$shared_lib >> $@-shared-libs.list; \ + done + grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true + $(SOONG_ZIP) -d -o $(PRIVATE_device_host_shared_libs_zip) \ + -P host -C $(HOST_OUT) -l $@-host-shared-libs.list device-tests: $(device-tests-zip) +device-tests-host-shared-libs: $(device_tests_host_shared_libs_zip) + $(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip)) +$(call dist-for-goals, device-tests-host-shared-libs, $(device_tests_host_shared_libs_zip)) $(call declare-1p-container,$(device-tests-zip),) $(call declare-container-license-deps,$(device-tests-zip),$(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests),$(PRODUCT_OUT)/:/) diff --git a/core/tasks/meta-lic.mk b/core/tasks/meta-lic.mk index 85357eb15b..620b1e29ae 100644 --- a/core/tasks/meta-lic.mk +++ b/core/tasks/meta-lic.mk @@ -83,6 +83,40 @@ $(eval $(call declare-copy-files-license-metadata,device/google/coral,display_19 $(eval $(call declare-1p-copy-files,device/google/coral,audio_policy_configuration.xml)) $(eval $(call declare-1p-copy-files,device/google/coral,display_19260504575090817.xml)) +# Moved here from device/google/cuttlefish/Android.mk +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,.idc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.postinstall,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,ueventd.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,hals.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,device_state_configuration.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,p2p_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant_overlay.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,wpa_supplicant.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.cutf_cvm.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.f2fs.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.hctr2,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,fstab.cf.ext4.cts,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,init.rc,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish,audio_policy.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) + +$(eval $(call declare-copy-files-license-metadata,device/google/cuttlefish/shared/config,pci.ids,SPDX-license-identifier-BSD-3-Clause,notice,device/google/cuttlefish/shared/config/LICENSE_BSD,)) + +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,privapp-permissions-cuttlefish.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_profiles_V1_0.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs_performance.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,cuttlefish_excluded_hardware.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,media_codecs_google_video.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,car_audio_configuration.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,audio_policy_configuration.xml)) +$(eval $(call declare-1p-copy-files,device/google/cuttlefish,preinstalled-packages-product-car-cuttlefish.xml)) +$(eval $(call declare-1p-copy-files,hardware/google/camera/devices,.json)) + # Moved here from device/google/gs101/Android.mk $(eval $(call declare-copy-files-license-metadata,device/google/gs101,default-permissions.xml,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) $(eval $(call declare-copy-files-license-metadata,device/google/gs101,libnfc-nci.conf,SPDX-license-identifier-Apache-2.0,notice,build/soong/licenses/LICENSE,)) @@ -191,3 +225,6 @@ $(eval $(call declare-1p-copy-files,packages/services/Car,)) # Moved here from hardware/libhardware_legacy/Android.mk $(eval $(call declare-1p-copy-files,hardware/libhardware_legacy,)) + +# Moved here from system/core/rootdir/Android.mk +$(eval $(call declare-1p-copy-files,system/core/rootdir,)) diff --git a/core/tasks/mke2fs-dist.mk b/core/tasks/mke2fs-dist.mk new file mode 100644 index 0000000000..3540c1f985 --- /dev/null +++ b/core/tasks/mke2fs-dist.mk @@ -0,0 +1,22 @@ +# Copyright (C) 2024 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: After Soong's recovery partition variation can be set to selectable +# and the meta_lic file duplication issue is resolved, move it to the +# dist section of the corresponding module's Android.bp. +my_dist_files := $(HOST_OUT_EXECUTABLES)/mke2fs +my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs +my_dist_files += $(HOST_OUT_EXECUTABLES)/make_f2fs_casefold +$(call dist-for-goals,dist_files sdk,$(my_dist_files)) +my_dist_files := diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk index 75936685a4..0ca27d8222 100644 --- a/core/tasks/module-info.mk +++ b/core/tasks/module-info.mk @@ -13,7 +13,7 @@ define write-optional-json-bool $(if $(strip $(2)),'$(COMMA)$(strip $(1)): "$(strip $(2))"') endef -SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT).json +SOONG_MODULE_INFO := $(SOONG_OUT_DIR)/module-info-$(TARGET_PRODUCT)${COVERAGE_SUFFIX}.json $(MODULE_INFO_JSON): PRIVATE_SOONG_MODULE_INFO := $(SOONG_MODULE_INFO) $(MODULE_INFO_JSON): PRIVATE_MERGE_JSON_OBJECTS := $(HOST_OUT_EXECUTABLES)/merge_module_info_json diff --git a/core/tasks/prebuilt_tradefed.mk b/core/tasks/prebuilt_tradefed.mk new file mode 100644 index 0000000000..96c57d5633 --- /dev/null +++ b/core/tasks/prebuilt_tradefed.mk @@ -0,0 +1,22 @@ +# Copyright (C) 2020 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ifeq (,$(wildcard tools/tradefederation/core)) +.PHONY: tradefed-core +tradefed-core: tradefed atest_tradefed.sh +.PHONY: tradefed-all +tradefed-all: tradefed atest_tradefed.sh + +$(call dist-for-goals, tradefed, $(HOST_OUT)/etc/tradefed.zip) +endif diff --git a/core/tasks/recovery_snapshot.mk b/core/tasks/recovery_snapshot.mk deleted file mode 100644 index 525273bfc3..0000000000 --- a/core/tasks/recovery_snapshot.mk +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright (C) 2020 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -current_makefile := $(lastword $(MAKEFILE_LIST)) - -# RECOVERY_SNAPSHOT_VERSION must be set to 'current' in order to generate a recovery snapshot. -ifeq ($(RECOVERY_SNAPSHOT_VERSION),current) - -.PHONY: recovery-snapshot -recovery-snapshot: $(SOONG_RECOVERY_SNAPSHOT_ZIP) - -$(call dist-for-goals, recovery-snapshot, $(SOONG_RECOVERY_SNAPSHOT_ZIP)) - -else # RECOVERY_SNAPSHOT_VERSION is NOT set to 'current' - -.PHONY: recovery-snapshot -recovery-snapshot: PRIVATE_MAKEFILE := $(current_makefile) -recovery-snapshot: - $(call echo-error,$(PRIVATE_MAKEFILE),\ - "CANNOT generate Recovery snapshot. RECOVERY_SNAPSHOT_VERSION must be set to 'current'.") - exit 1 - -endif # RECOVERY_SNAPSHOT_VERSION diff --git a/core/tasks/sts-lite.mk b/core/tasks/sts-sdk.mk index 65c65c3dc6..4abbc29c5e 100644 --- a/core/tasks/sts-lite.mk +++ b/core/tasks/sts-sdk.mk @@ -13,26 +13,24 @@ # limitations under the License. ifneq ($(wildcard test/sts/README-sts-sdk.md),) -test_suite_name := sts-lite +test_suite_name := sts-sdk test_suite_tradefed := sts-tradefed test_suite_readme := test/sts/README-sts-sdk.md sts_sdk_zip := $(HOST_OUT)/$(test_suite_name)/sts-sdk.zip include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk -sts_sdk_samples := $(call intermediates-dir-for,ETC,sts-sdk-samples.zip)/sts-sdk-samples.zip +sts_sdk_plugin_skel := $(call intermediates-dir-for,ETC,sts-sdk-plugin-skel.zip)/sts-sdk-plugin-skel.zip -$(sts_sdk_zip): STS_LITE_ZIP := $(compatibility_zip) -$(sts_sdk_zip): STS_SDK_SAMPLES := $(sts_sdk_samples) -$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_samples) - rm -f $@ $(STS_LITE_ZIP)_filtered - $(ZIP2ZIP) -i $(STS_LITE_ZIP) -o $(STS_LITE_ZIP)_filtered \ - -x android-sts-lite/tools/sts-tradefed-tests.jar \ - 'android-sts-lite/tools/*:sts-test/libs/' \ - 'android-sts-lite/testcases/*:sts-test/utils/' \ - 'android-sts-lite/jdk/**/*:sts-test/jdk/' - $(MERGE_ZIPS) $@ $(STS_LITE_ZIP)_filtered $(STS_SDK_SAMPLES) - rm -f $(STS_LITE_ZIP)_filtered +$(sts_sdk_zip): STS_SDK_ZIP := $(compatibility_zip) +$(sts_sdk_zip): STS_SDK_PLUGIN_SKEL := $(sts_sdk_plugin_skel) +$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_plugin_skel) + rm -f $@ $(STS_SDK_ZIP)_filtered + $(ZIP2ZIP) -i $(STS_SDK_ZIP) -o $(STS_SDK_ZIP)_filtered \ + -x android-sts-sdk/tools/sts-tradefed-tests.jar \ + 'android-sts-sdk/tools/*:sts-sdk/src/main/resources/sts-tradefed-tools/' + $(MERGE_ZIPS) $@ $(STS_SDK_ZIP)_filtered $(STS_SDK_PLUGIN_SKEL) + rm -f $(STS_SDK_ZIP)_filtered .PHONY: sts-sdk sts-sdk: $(sts_sdk_zip) diff --git a/core/tasks/tools/update_bootloader_radio_image.mk b/core/tasks/tools/update_bootloader_radio_image.mk new file mode 100644 index 0000000000..adb86ea11a --- /dev/null +++ b/core/tasks/tools/update_bootloader_radio_image.mk @@ -0,0 +1,26 @@ +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http:#www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ifeq ($(USES_DEVICE_GOOGLE_ZUMA),true) + -include vendor/google_devices/zuma/prebuilts/misc_bins/update_bootloader_radio_image.mk +endif +ifeq ($(USES_DEVICE_GOOGLE_ZUMAPRO),true) + -include vendor/google_devices/zumapro/prebuilts/misc_bins/update_bootloader_radio_image.mk +endif +ifeq ($(USES_DEVICE_GOOGLE_LAGUNA),true) + -include vendor/google_devices/laguna/prebuilts/misc_bins/update_bootloader_radio_image.mk +endif +ifeq ($(USES_DEVICE_GOOGLE_MALIBU),true) + -include vendor/google_devices/malibu/prebuilts/misc_bins/update_bootloader_radio_image.mk +endif diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk index 61bf13695d..47c360de52 100644 --- a/core/tasks/tradefed-tests-list.mk +++ b/core/tasks/tradefed-tests-list.mk @@ -15,6 +15,11 @@ # List all TradeFed tests from COMPATIBILITY.tradefed_tests_dir .PHONY: tradefed-tests-list +COMPATIBILITY.tradefed_tests_dir := \ + $(COMPATIBILITY.tradefed_tests_dir) \ + tools/tradefederation/core/res/config \ + tools/tradefederation/core/javatests/res/config + tradefed_tests := $(foreach dir, $(COMPATIBILITY.tradefed_tests_dir), \ $(eval tradefed_tests += $(shell find $(dir) -type f -name "*.xml"))) diff --git a/core/tasks/vendor_snapshot.mk b/core/tasks/vendor_snapshot.mk deleted file mode 100644 index 83c13792a8..0000000000 --- a/core/tasks/vendor_snapshot.mk +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) 2020 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -current_makefile := $(lastword $(MAKEFILE_LIST)) - -# BOARD_VNDK_VERSION must be set to 'current' in order to generate a vendor snapshot. -ifeq ($(BOARD_VNDK_VERSION),current) - -.PHONY: vendor-snapshot -vendor-snapshot: $(SOONG_VENDOR_SNAPSHOT_ZIP) - -$(call dist-for-goals, vendor-snapshot, $(SOONG_VENDOR_SNAPSHOT_ZIP)) - -.PHONY: vendor-fake-snapshot -vendor-fake-snapshot: $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP) - -$(call dist-for-goals, vendor-fake-snapshot, $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP):fake/$(notdir $(SOONG_VENDOR_FAKE_SNAPSHOT_ZIP))) - -else # BOARD_VNDK_VERSION is NOT set to 'current' - -.PHONY: vendor-snapshot -vendor-snapshot: PRIVATE_MAKEFILE := $(current_makefile) -vendor-snapshot: - $(call echo-error,$(PRIVATE_MAKEFILE),\ - "CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.") - exit 1 - -.PHONY: vendor-fake-snapshot -vendor-fake-snapshot: PRIVATE_MAKEFILE := $(current_makefile) -vendor-fake-snapshot: - $(call echo-error,$(PRIVATE_MAKEFILE),\ - "CANNOT generate Vendor snapshot. BOARD_VNDK_VERSION must be set to 'current'.") - exit 1 - -endif # BOARD_VNDK_VERSION diff --git a/core/version_util.mk b/core/version_util.mk index eb568becc4..0e346347bb 100644 --- a/core/version_util.mk +++ b/core/version_util.mk @@ -183,14 +183,17 @@ ifndef PLATFORM_SECURITY_PATCH_TIMESTAMP endif .KATI_READONLY := PLATFORM_SECURITY_PATCH_TIMESTAMP -ifndef PLATFORM_BASE_OS - # Used to indicate the base os applied to the device. - # Can be an arbitrary string, but must be a single word. - # - # If there is no $PLATFORM_BASE_OS set, keep it empty. - PLATFORM_BASE_OS := -endif -.KATI_READONLY := PLATFORM_BASE_OS +# PLATFORM_BASE_OS is used to indicate the base os applied +# to the device. Can be an arbitrary string, but must be a +# single word. +# +# If there is no $PLATFORM_BASE_OS set, keep it empty. +# +# PLATFORM_BASE_OS can either be set via an enviornment +# variable, or set via the PRODUCT_BASE_OS product variable. +PLATFORM_BASE_OS_ENV_INPUT := $(PLATFORM_BASE_OS) +.KATI_READONLY := PLATFORM_BASE_OS_ENV_INPUT +PLATFORM_BASE_OS := ifndef BUILD_ID # Used to signify special builds. E.g., branches and/or releases, diff --git a/envsetup.sh b/envsetup.sh index 06dadd3f38..3fed5aed6d 100644 --- a/envsetup.sh +++ b/envsetup.sh @@ -442,6 +442,7 @@ function print_lunch_menu() function lunch() { local answer + setup_cog_env_if_needed if [[ $# -gt 1 ]]; then echo "usage: lunch [target]" >&2 @@ -1079,10 +1080,7 @@ function source_vendorsetup() { done done - if [[ "${PWD}" == /google/cog/* ]]; then - f="build/make/cogsetup.sh" - echo "including $f"; . "$T/$f" - fi + setup_cog_env_if_needed } function showcommands() { diff --git a/shell_utils.sh b/shell_utils.sh index 86f3f49f50..c4a67564c2 100644 --- a/shell_utils.sh +++ b/shell_utils.sh @@ -63,6 +63,70 @@ function require_lunch } fi +# This function sets up the build environment to be appropriate for Cog. +function setup_cog_env_if_needed() { + local top=$(gettop) + + # return early if not in a cog workspace + if [[ ! "$top" =~ ^/google/cog ]]; then + return 0 + fi + + setup_cog_symlink + + export ANDROID_BUILD_ENVIRONMENT_CONFIG="googler-cog" + + # Running repo command within Cog workspaces is not supported, so override + # it with this function. If the user is running repo within a Cog workspace, + # we'll fail with an error, otherwise, we run the original repo command with + # the given args. + if ! ORIG_REPO_PATH=`which repo`; then + return 0 + fi + function repo { + if [[ "${PWD}" == /google/cog/* ]]; then + echo -e "\e[01;31mERROR:\e[0mrepo command is disallowed within Cog workspaces." + kill -INT $$ # exits the script without exiting the user's shell + fi + ${ORIG_REPO_PATH} "$@" + } +} + +# creates a symlink for the out/ dir when inside a cog workspace. +function setup_cog_symlink() { + local out_dir=$(getoutdir) + local top=$(gettop) + + # return early if out dir is already a symlink + if [[ -L "$out_dir" ]]; then + return 0 + fi + + # return early if out dir is not in the workspace + if [[ ! "$out_dir" =~ ^$top/ ]]; then + return 0 + fi + + local link_destination="${HOME}/.cog/android-build-out" + + # remove existing out/ dir if it exists + if [[ -d "$out_dir" ]]; then + echo "Detected existing out/ directory in the Cog workspace which is not supported. Repairing workspace by removing it and creating the symlink to ~/.cog/android-build-out" + if ! rm -rf "$out_dir"; then + echo "Failed to remove existing out/ directory: $out_dir" >&2 + kill -INT $$ # exits the script without exiting the user's shell + fi + fi + + # create symlink + echo "Creating symlink: $out_dir -> $link_destination" + mkdir -p ${link_destination} + if ! ln -s "$link_destination" "$out_dir"; then + echo "Failed to create cog symlink: $out_dir -> $link_destination" >&2 + kill -INT $$ # exits the script without exiting the user's shell + fi +} + function getoutdir { local top=$(gettop) diff --git a/target/board/Android.mk b/target/board/Android.mk index decc345ded..8133af9a7f 100644 --- a/target/board/Android.mk +++ b/target/board/Android.mk @@ -67,7 +67,6 @@ $(GEN): PRIVATE_DEVICE_MANIFEST_FILE := $(DEVICE_MANIFEST_FILE) $(GEN): $(DEVICE_MANIFEST_FILE) $(HOST_OUT_EXECUTABLES)/assemble_vintf BOARD_SEPOLICY_VERS=$(BOARD_SEPOLICY_VERS) \ PRODUCT_ENFORCE_VINTF_MANIFEST=$(PRODUCT_ENFORCE_VINTF_MANIFEST) \ - PRODUCT_SHIPPING_API_LEVEL=$(PRODUCT_SHIPPING_API_LEVEL) \ $(HOST_OUT_EXECUTABLES)/assemble_vintf -o $@ \ -i $(call normalize-path-list,$(PRIVATE_DEVICE_MANIFEST_FILE)) @@ -99,7 +98,6 @@ $$(GEN): PRIVATE_SRC_FILES := $$(my_fragment_files) $$(GEN): $$(my_fragment_files) $$(HOST_OUT_EXECUTABLES)/assemble_vintf BOARD_SEPOLICY_VERS=$$(BOARD_SEPOLICY_VERS) \ PRODUCT_ENFORCE_VINTF_MANIFEST=$$(PRODUCT_ENFORCE_VINTF_MANIFEST) \ - PRODUCT_SHIPPING_API_LEVEL=$$(PRODUCT_SHIPPING_API_LEVEL) \ $$(HOST_OUT_EXECUTABLES)/assemble_vintf -o $$@ \ -i $$(call normalize-path-list,$$(PRIVATE_SRC_FILES)) diff --git a/target/product/base_product.mk b/target/product/base_product.mk index 0ac220bb16..acfc6534f8 100644 --- a/target/product/base_product.mk +++ b/target/product/base_product.mk @@ -25,3 +25,8 @@ PRODUCT_PACKAGES += \ product_compatibility_matrix.xml \ product_manifest.xml \ selinux_policy_product \ + product-build.prop \ + +# Packages included only for eng or userdebug builds, previously debug tagged +PRODUCT_PACKAGES_DEBUG += \ + adb_keys \ diff --git a/target/product/base_system.mk b/target/product/base_system.mk index 9a6746fc80..74ed82d3ca 100644 --- a/target/product/base_system.mk +++ b/target/product/base_system.mk @@ -279,6 +279,7 @@ PRODUCT_PACKAGES += \ storaged \ surfaceflinger \ svc \ + system-build.prop \ task_profiles.json \ tc \ telecom \ @@ -344,6 +345,11 @@ ifeq ($(RELEASE_USE_WEBVIEW_BOOTSTRAP_MODULE),true) com.android.webview.bootstrap endif +ifneq (,$(RELEASE_RANGING_STACK)) + PRODUCT_PACKAGES += \ + com.android.ranging +endif + # VINTF data for system image PRODUCT_PACKAGES += \ system_manifest.xml \ @@ -402,7 +408,6 @@ PRODUCT_HOST_PACKAGES += \ BugReport \ adb \ adevice \ - art-tools \ atest \ bcc \ bit \ @@ -433,6 +438,21 @@ PRODUCT_HOST_PACKAGES += \ tz_version_host \ tz_version_host_tzdata_apex \ +# For art-tools, if the dependencies have changed, please sync them to art/Android.bp as well. +PRODUCT_HOST_PACKAGES += \ + ahat \ + dexdump \ + hprof-conv +# A subset of the tools are disabled when HOST_PREFER_32_BIT is defined as make reports that +# they are not supported on host (b/129323791). This is likely due to art_apex disabling host +# APEX builds when HOST_PREFER_32_BIT is set (b/120617876). +ifneq ($(HOST_PREFER_32_BIT),true) +PRODUCT_HOST_PACKAGES += \ + dexlist \ + oatdump +endif + + PRODUCT_PACKAGES += init.usb.rc init.usb.configfs.rc PRODUCT_PACKAGES += etc_hosts @@ -450,7 +470,6 @@ PRODUCT_PACKAGES += \ # Packages included only for eng or userdebug builds, previously debug tagged PRODUCT_PACKAGES_DEBUG := \ - adb_keys \ adevice_fingerprint \ arping \ dmuserd \ @@ -485,6 +504,10 @@ PRODUCT_PACKAGES_DEBUG := \ unwind_reg_info \ unwind_symbols \ +# For Remotely Provisioned Certificate Processor +PRODUCT_SYSTEM_PROPERTIES += \ + remote_provisioning.use_cert_processor=false + # The set of packages whose code can be loaded by the system server. PRODUCT_SYSTEM_SERVER_APPS += \ SettingsProvider \ diff --git a/target/product/base_system_ext.mk b/target/product/base_system_ext.mk index 92ca227a01..febe5378b5 100644 --- a/target/product/base_system_ext.mk +++ b/target/product/base_system_ext.mk @@ -24,6 +24,7 @@ PRODUCT_PACKAGES += \ SatelliteClient \ selinux_policy_system_ext \ system_ext_manifest.xml \ + system_ext-build.prop \ # Base modules when shipping api level is less than or equal to 34 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \ diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk index 1854f9756f..3f859417b6 100644 --- a/target/product/base_vendor.mk +++ b/target/product/base_vendor.mk @@ -17,7 +17,6 @@ # Base modules and settings for recovery. PRODUCT_PACKAGES += \ adbd.recovery \ - android.hardware.health@2.0-impl-default.recovery \ build_flag_vendor \ cgroups.recovery.json \ charger.recovery \ @@ -73,6 +72,12 @@ PRODUCT_PACKAGES += \ passwd_vendor \ selinux_policy_nonsystem \ shell_and_utilities_vendor \ + odm-build.prop \ + +# libhealthloop BPF filter. This is in base_vendor.mk because libhealthloop must +# be a static library and because the Android build system ignores 'required' +# sections for static libraries. +PRODUCT_PACKAGES += filterPowerSupplyEvents.o # Base modules when shipping api level is less than or equal to 34 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_34 += \ @@ -105,3 +110,9 @@ PRODUCT_PACKAGES += \ PRODUCT_PACKAGES += \ adb_debug.prop \ userdebug_plat_sepolicy.cil + +# On eng or userdebug builds, build in perf-setup-sh by default. +ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT))) +PRODUCT_PACKAGES += \ + perf-setup-sh +endif diff --git a/target/product/build_variables.mk b/target/product/build_variables.mk index 5fe5333f45..9fc9ff9dc1 100644 --- a/target/product/build_variables.mk +++ b/target/product/build_variables.mk @@ -17,5 +17,8 @@ # This file contains the trunk-stable flags that should be exported to all # Android targets. +# Control libbinder client caching +$(call soong_config_set, libbinder, release_libbinder_client_cache, $(RELEASE_LIBBINDER_CLIENT_CACHE)) + # Use the configured release of sqlite $(call soong_config_set, libsqlite3, release_package_libsqlite3, $(RELEASE_PACKAGE_LIBSQLITE3)) diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk index 1a3f2cf0e8..668f054773 100644 --- a/target/product/default_art_config.mk +++ b/target/product/default_art_config.mk @@ -76,6 +76,7 @@ PRODUCT_APEX_BOOT_JARS := \ com.android.mediaprovider:framework-mediaprovider \ com.android.mediaprovider:framework-pdf \ com.android.mediaprovider:framework-pdf-v \ + com.android.mediaprovider:framework-photopicker \ com.android.ondevicepersonalization:framework-ondevicepersonalization \ com.android.os.statsd:framework-statsd \ com.android.permission:framework-permission \ @@ -113,6 +114,12 @@ ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true) endif +ifneq (,$(RELEASE_RANGING_STACK)) + PRODUCT_APEX_BOOT_JARS += \ + com.android.uwb:framework-ranging \ + $(call soong_config_set,bootclasspath,release_ranging_stack,true) +endif + # List of system_server classpath jars delivered via apex. # Keep the list sorted by module names and then library names. # Note: For modules available in Q, DO NOT add new entries here. @@ -168,6 +175,11 @@ ifeq ($(RELEASE_PACKAGE_PROFILING_MODULE),true) endif +ifneq (,$(RELEASE_RANGING_STACK)) + PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS += \ + com.android.uwb:service-ranging +endif + # Overrides the (apex, jar) pairs above when determining the on-device location. The format is: # <old_apex>:<old_jar>:<new_apex>:<new_jar> PRODUCT_CONFIGURED_JAR_LOCATION_OVERRIDES := \ diff --git a/target/product/generic/Android.bp b/target/product/generic/Android.bp new file mode 100644 index 0000000000..c14fa172b1 --- /dev/null +++ b/target/product/generic/Android.bp @@ -0,0 +1,826 @@ +android_rootdirs = [ + "acct", + "apex", + "bootstrap-apex", + "config", + "data", + "data_mirror", + "debug_ramdisk", + "dev", + "linkerconfig", + "metadata", + "mnt", + "odm", + "odm_dlkm", + "oem", + "postinstall", + "proc", + "product", + "second_stage_resources", + "storage", + "sys", + "system", + "system_dlkm", + "system_ext", + "tmp", + "vendor", + "vendor_dlkm", +] + +android_symlinks = [ + { + target: "/system/bin/init", + name: "init", + }, + { + target: "/system/etc", + name: "etc", + }, + { + target: "/system/bin", + name: "bin", + }, + { + target: "/product", + name: "system/product", + }, + { + target: "/vendor", + name: "system/vendor", + }, + { + target: "/system_ext", + name: "system/system_ext", + }, + { + target: "/system_dlkm/lib/modules", + name: "system/lib/modules", + }, + { + target: "/data/user_de/0/com.android.shell/files/bugreports", + name: "bugreports", + }, + { + target: "/data/cache", + name: "cache", + }, + { + target: "/sys/kernel/debug", + name: "d", + }, + { + target: "/storage/self/primary", + name: "sdcard", + }, + { + target: "/product/etc/security/adb_keys", + name: "adb_keys", + }, +] + +filegroup { + name: "generic_system_sign_key", + srcs: [":avb_testkey_rsa4096"], +} + +phony { + name: "generic_system_fonts", + required: [ + "AndroidClock.ttf", + "CarroisGothicSC-Regular.ttf", + "ComingSoon.ttf", + "CutiveMono.ttf", + "DancingScript-Regular.ttf", + "DroidSansMono.ttf", + "NotoColorEmoji.ttf", + "NotoColorEmojiFlags.ttf", + "NotoNaskhArabic-Bold.ttf", + "NotoNaskhArabic-Regular.ttf", + "NotoNaskhArabicUI-Bold.ttf", + "NotoNaskhArabicUI-Regular.ttf", + "NotoSansAdlam-VF.ttf", + "NotoSansAhom-Regular.otf", + "NotoSansAnatolianHieroglyphs-Regular.otf", + "NotoSansArmenian-VF.ttf", + "NotoSansAvestan-Regular.ttf", + "NotoSansBalinese-Regular.ttf", + "NotoSansBamum-Regular.ttf", + "NotoSansBassaVah-Regular.otf", + "NotoSansBatak-Regular.ttf", + "NotoSansBengali-VF.ttf", + "NotoSansBengaliUI-VF.ttf", + "NotoSansBhaiksuki-Regular.otf", + "NotoSansBrahmi-Regular.ttf", + "NotoSansBuginese-Regular.ttf", + "NotoSansBuhid-Regular.ttf", + "NotoSansCJK-Regular.ttc", + "NotoSansCanadianAboriginal-Regular.ttf", + "NotoSansCarian-Regular.ttf", + "NotoSansChakma-Regular.otf", + "NotoSansCham-Bold.ttf", + "NotoSansCham-Regular.ttf", + "NotoSansCherokee-Regular.ttf", + "NotoSansCoptic-Regular.ttf", + "NotoSansCuneiform-Regular.ttf", + "NotoSansCypriot-Regular.ttf", + "NotoSansDeseret-Regular.ttf", + "NotoSansDevanagari-VF.ttf", + "NotoSansDevanagariUI-VF.ttf", + "NotoSansEgyptianHieroglyphs-Regular.ttf", + "NotoSansElbasan-Regular.otf", + "NotoSansEthiopic-VF.ttf", + "NotoSansGeorgian-VF.ttf", + "NotoSansGlagolitic-Regular.ttf", + "NotoSansGothic-Regular.ttf", + "NotoSansGrantha-Regular.ttf", + "NotoSansGujarati-Bold.ttf", + "NotoSansGujarati-Regular.ttf", + "NotoSansGujaratiUI-Bold.ttf", + "NotoSansGujaratiUI-Regular.ttf", + "NotoSansGunjalaGondi-Regular.otf", + "NotoSansGurmukhi-VF.ttf", + "NotoSansGurmukhiUI-VF.ttf", + "NotoSansHanifiRohingya-Regular.otf", + "NotoSansHanunoo-Regular.ttf", + "NotoSansHatran-Regular.otf", + "NotoSansHebrew-Bold.ttf", + "NotoSansHebrew-Regular.ttf", + "NotoSansImperialAramaic-Regular.ttf", + "NotoSansInscriptionalPahlavi-Regular.ttf", + "NotoSansInscriptionalParthian-Regular.ttf", + "NotoSansJavanese-Regular.otf", + "NotoSansKaithi-Regular.ttf", + "NotoSansKannada-VF.ttf", + "NotoSansKannadaUI-VF.ttf", + "NotoSansKayahLi-Regular.ttf", + "NotoSansKharoshthi-Regular.ttf", + "NotoSansKhmer-VF.ttf", + "NotoSansKhmerUI-Bold.ttf", + "NotoSansKhmerUI-Regular.ttf", + "NotoSansKhojki-Regular.otf", + "NotoSansLao-Bold.ttf", + "NotoSansLao-Regular.ttf", + "NotoSansLaoUI-Bold.ttf", + "NotoSansLaoUI-Regular.ttf", + "NotoSansLepcha-Regular.ttf", + "NotoSansLimbu-Regular.ttf", + "NotoSansLinearA-Regular.otf", + "NotoSansLinearB-Regular.ttf", + "NotoSansLisu-Regular.ttf", + "NotoSansLycian-Regular.ttf", + "NotoSansLydian-Regular.ttf", + "NotoSansMalayalam-VF.ttf", + "NotoSansMalayalamUI-VF.ttf", + "NotoSansMandaic-Regular.ttf", + "NotoSansManichaean-Regular.otf", + "NotoSansMarchen-Regular.otf", + "NotoSansMasaramGondi-Regular.otf", + "NotoSansMedefaidrin-VF.ttf", + "NotoSansMeeteiMayek-Regular.ttf", + "NotoSansMeroitic-Regular.otf", + "NotoSansMiao-Regular.otf", + "NotoSansModi-Regular.ttf", + "NotoSansMongolian-Regular.ttf", + "NotoSansMro-Regular.otf", + "NotoSansMultani-Regular.otf", + "NotoSansMyanmar-Bold.otf", + "NotoSansMyanmar-Medium.otf", + "NotoSansMyanmar-Regular.otf", + "NotoSansMyanmarUI-Bold.otf", + "NotoSansMyanmarUI-Medium.otf", + "NotoSansMyanmarUI-Regular.otf", + "NotoSansNKo-Regular.ttf", + "NotoSansNabataean-Regular.otf", + "NotoSansNewTaiLue-Regular.ttf", + "NotoSansNewa-Regular.otf", + "NotoSansOgham-Regular.ttf", + "NotoSansOlChiki-Regular.ttf", + "NotoSansOldItalic-Regular.ttf", + "NotoSansOldNorthArabian-Regular.otf", + "NotoSansOldPermic-Regular.otf", + "NotoSansOldPersian-Regular.ttf", + "NotoSansOldSouthArabian-Regular.ttf", + "NotoSansOldTurkic-Regular.ttf", + "NotoSansOriya-Bold.ttf", + "NotoSansOriya-Regular.ttf", + "NotoSansOriyaUI-Bold.ttf", + "NotoSansOriyaUI-Regular.ttf", + "NotoSansOsage-Regular.ttf", + "NotoSansOsmanya-Regular.ttf", + "NotoSansPahawhHmong-Regular.otf", + "NotoSansPalmyrene-Regular.otf", + "NotoSansPauCinHau-Regular.otf", + "NotoSansPhagsPa-Regular.ttf", + "NotoSansPhoenician-Regular.ttf", + "NotoSansRejang-Regular.ttf", + "NotoSansRunic-Regular.ttf", + "NotoSansSamaritan-Regular.ttf", + "NotoSansSaurashtra-Regular.ttf", + "NotoSansSharada-Regular.otf", + "NotoSansShavian-Regular.ttf", + "NotoSansSinhala-VF.ttf", + "NotoSansSinhalaUI-VF.ttf", + "NotoSansSoraSompeng-Regular.otf", + "NotoSansSoyombo-VF.ttf", + "NotoSansSundanese-Regular.ttf", + "NotoSansSylotiNagri-Regular.ttf", + "NotoSansSymbols-Regular-Subsetted.ttf", + "NotoSansSymbols-Regular-Subsetted2.ttf", + "NotoSansSyriacEastern-Regular.ttf", + "NotoSansSyriacEstrangela-Regular.ttf", + "NotoSansSyriacWestern-Regular.ttf", + "NotoSansTagalog-Regular.ttf", + "NotoSansTagbanwa-Regular.ttf", + "NotoSansTaiLe-Regular.ttf", + "NotoSansTaiTham-Regular.ttf", + "NotoSansTaiViet-Regular.ttf", + "NotoSansTakri-VF.ttf", + "NotoSansTamil-VF.ttf", + "NotoSansTamilUI-VF.ttf", + "NotoSansTelugu-VF.ttf", + "NotoSansTeluguUI-VF.ttf", + "NotoSansThaana-Bold.ttf", + "NotoSansThaana-Regular.ttf", + "NotoSansThai-Bold.ttf", + "NotoSansThai-Regular.ttf", + "NotoSansThaiUI-Bold.ttf", + "NotoSansThaiUI-Regular.ttf", + "NotoSansTifinagh-Regular.otf", + "NotoSansUgaritic-Regular.ttf", + "NotoSansVai-Regular.ttf", + "NotoSansWancho-Regular.otf", + "NotoSansWarangCiti-Regular.otf", + "NotoSansYi-Regular.ttf", + "NotoSerif-Bold.ttf", + "NotoSerif-BoldItalic.ttf", + "NotoSerif-Italic.ttf", + "NotoSerif-Regular.ttf", + "NotoSerifArmenian-VF.ttf", + "NotoSerifBengali-VF.ttf", + "NotoSerifCJK-Regular.ttc", + "NotoSerifDevanagari-VF.ttf", + "NotoSerifDogra-Regular.ttf", + "NotoSerifEthiopic-VF.ttf", + "NotoSerifGeorgian-VF.ttf", + "NotoSerifGujarati-VF.ttf", + "NotoSerifGurmukhi-VF.ttf", + "NotoSerifHebrew-Bold.ttf", + "NotoSerifHebrew-Regular.ttf", + "NotoSerifHentaigana.ttf", + "NotoSerifKannada-VF.ttf", + "NotoSerifKhmer-Bold.otf", + "NotoSerifKhmer-Regular.otf", + "NotoSerifLao-Bold.ttf", + "NotoSerifLao-Regular.ttf", + "NotoSerifMalayalam-VF.ttf", + "NotoSerifMyanmar-Bold.otf", + "NotoSerifMyanmar-Regular.otf", + "NotoSerifNyiakengPuachueHmong-VF.ttf", + "NotoSerifSinhala-VF.ttf", + "NotoSerifTamil-VF.ttf", + "NotoSerifTelugu-VF.ttf", + "NotoSerifThai-Bold.ttf", + "NotoSerifThai-Regular.ttf", + "NotoSerifTibetan-VF.ttf", + "NotoSerifYezidi-VF.ttf", + "Roboto-Regular.ttf", + "RobotoFlex-Regular.ttf", + "RobotoStatic-Regular.ttf", + "SourceSansPro-Bold.ttf", + "SourceSansPro-BoldItalic.ttf", + "SourceSansPro-Italic.ttf", + "SourceSansPro-Regular.ttf", + "SourceSansPro-SemiBold.ttf", + "SourceSansPro-SemiBoldItalic.ttf", + "font_fallback.xml", + "fonts.xml", + ], +} + +android_system_image { + name: "generic_system_image", + + partition_name: "system", + base_dir: "system", + dirs: android_rootdirs, + symlinks: android_symlinks, + file_contexts: ":plat_file_contexts", + linker_config_src: ":system_linker_config_json_file", + fsverity: { + inputs: [ + "etc/boot-image.prof", + "etc/classpaths/*.pb", + "etc/dirty-image-objects", + "etc/preloaded-classes", + "framework/*", + "framework/*/*", // framework/{arch} + "framework/oat/*/*", // framework/oat/{arch} + ], + libs: [":framework-res{.export-package.apk}"], + }, + build_logtags: true, + gen_aconfig_flags_pb: true, + + compile_multilib: "both", + + use_avb: true, + avb_private_key: ":generic_system_sign_key", + avb_algorithm: "SHA256_RSA4096", + avb_hash_algorithm: "sha256", + + deps: [ + "abx", + "aconfigd", + "aflags", + "am", + "android.software.credentials.prebuilt.xml", // generic_system + "android.software.webview.prebuilt.xml", // media_system + "android.software.window_magnification.prebuilt.xml", // handheld_system + "android.system.suspend-service", + "prebuilt_vintf_manifest", + "apexd", + "appops", + "approved-ogki-builds.xml", // base_system + "appwidget", + "atrace", + "audioserver", + "bcc", + "blank_screen", + "blkid", + "bmgr", + "bootanimation", + "bootstat", + "bpfloader", + "bu", + "bugreport", + "bugreportz", + "cameraserver", + "cgroups.json", + "cmd", + "content", + "cppreopts.sh", // generic_system + "credstore", + "debuggerd", + "device_config", + "dirty-image-objects", + "dmctl", + "dmesgd", + "dnsmasq", + "dpm", + "dump.erofs", + "dumpstate", + "dumpsys", + "e2fsck", + "enhanced-confirmation.xml", // base_system + "etc_hosts", + "flags_health_check", + "framework-audio_effects.xml", // for handheld // handheld_system + "framework-sysconfig.xml", + "fs_config_dirs_system", + "fs_config_files_system", + "fsck.erofs", + "fsck.f2fs", // for media_system + "fsck_msdos", + "fsverity-release-cert-der", + "gatekeeperd", + "gpu_counter_producer", + "gpuservice", + "group_system", + "gsi_tool", + "gsid", + "heapprofd", + "hid", + "hiddenapi-package-whitelist.xml", // from runtime_libart + "idc_data", + "idmap2", + "idmap2d", + "ime", + "incident", + "incident-helper-cmd", + "incident_helper", + "incidentd", + "init.environ.rc-soong", + "init.usb.configfs.rc", + "init.usb.rc", + "init.zygote32.rc", + "init.zygote64.rc", + "init.zygote64_32.rc", + "init_first_stage", // for boot partition + "initial-package-stopped-states.xml", + "input", + "installd", + "ip", // base_system + "iptables", + "kcmdlinectrl", + "kernel-lifetimes.xml", // base_system + "keychars_data", + "keylayout_data", + "keystore2", + "ld.mc", + "llkd", // base_system + "lmkd", // base_system + "local_time.default", // handheld_vendo + "locksettings", // base_system + "logcat", // base_system + "logd", // base_system + "logpersist.start", + "lpdump", // base_system + "lshal", // base_system + "make_f2fs", // media_system + "mdnsd", // base_system + "media_profiles_V1_0.dtd", // base_system + "mediacodec.policy", // base_system + "mediaextractor", // base_system + "mediametrics", // base_system + "misctrl", // from base_system + "mke2fs", // base_system + "mkfs.erofs", // base_system + "monkey", // base_system + "mtectrl", // base_system + "ndc", // base_system + "netd", // base_system + "netutils-wrapper-1.0", // full_base + "notice_xml_system", + "odsign", // base_system + "otapreopt_script", // generic_system + "package-shareduid-allowlist.xml", // base_system + "passwd_system", // base_system + "perfetto", // base_system + "ping", // base_system + "ping6", // base_system + "pintool", // base_system + "platform.xml", // base_system + "pm", // base_system + "preinstalled-packages-asl-files.xml", // base_system + "preinstalled-packages-platform-generic-system.xml", // generic_system + "preinstalled-packages-platform-handheld-system.xml", // handheld_system + "preinstalled-packages-platform.xml", // base_system + "preinstalled-packages-strict-signature.xml", // base_system + "preloaded-classes", // ok + "printflags", // base_system + "privapp-permissions-platform.xml", // base_system + "prng_seeder", // base_system + "public.libraries.android.txt", + "recovery-persist", // base_system + "recovery-refresh", // generic_system + "requestsync", // media_system + "resize2fs", // base_system + "rss_hwm_reset", // base_system + "run-as", // base_system + "schedtest", // base_system + "screencap", // base_system + "screenrecord", // handheld_system + "sdcard", // base_system + "secdiscard", // base_system + "sensorservice", // base_system + "service", // base_system + "servicemanager", // base_system + "settings", // base_system + "sfdo", // base_system + "sgdisk", // base_system + "sm", // base_system + "snapshotctl", // base_system + "snapuserd", // base_system + "snapuserd_ramdisk", // ramdisk + "storaged", // base_system + "surfaceflinger", // base_system + "svc", // base_system + "task_profiles.json", // base_system + "tc", // base_system + "telecom", // base_system + "tombstoned", // base_system + "traced", // base_system + "traced_probes", // base_system + "tune2fs", // base_system + "uiautomator", // base_system + "uinput", // base_system + "uncrypt", // base_system + "update_engine", // generic_system + "update_engine_sideload", // recovery + "update_verifier", // generic_system + "usbd", // base_system + "vdc", // base_system + "virtual_camera", // handheld_system // release_package_virtual_camera + "vold", // base_system + "vr", // handheld_system + "watchdogd", // base_system + "wifi.rc", // base_system + "wificond", // base_system + "wm", // base_system + ] + select(release_flag("RELEASE_PLATFORM_VERSION_CODENAME"), { + "REL": [], + default: [ + "android.software.preview_sdk.prebuilt.xml", // media_system + ], + }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), { + "true": [ + "trace_redactor", // base_system (RELEASE_PACKAGE_PROFILING_MODULE) + ], + default: [], + }) + select(product_variable("debuggable"), { + true: [ + "adevice_fingerprint", + "arping", + "avbctl", + "bootctl", + "dmuserd", + "evemu-record", + "idlcli", + "init-debug.rc", + "iotop", + "iperf3", + "iw", + "layertracegenerator", + "logtagd.rc", + "ot-cli-ftd", + "ot-ctl", + "procrank", + "profcollectctl", + "profcollectd", + "record_binder", + "sanitizer-status", + "servicedispatcher", + "showmap", + "sqlite3", + "ss", + "start_with_lockagent", + "strace", + "su", + "tinycap", + "tinyhostless", + "tinymix", + "tinypcminfo", + "tinyplay", // host + "tracepath", + "tracepath6", + "traceroute6", + "unwind_info", + "unwind_reg_info", + "unwind_symbols", + "update_engine_client", + ], + default: [], + }), + multilib: { + common: { + deps: [ + "BackupRestoreConfirmation", // base_system + "BasicDreams", // handheld_system + "BlockedNumberProvider", // handheld_system + "BluetoothMidiService", // handheld_system + "BookmarkProvider", // handheld_system + "BuiltInPrintService", // handheld_system + "CalendarProvider", // handheld_system + "CallLogBackup", // telephony_system + "CameraExtensionsProxy", // handheld_system + "CaptivePortalLogin", // handheld_system + "CarrierDefaultApp", // telephony_system + "CellBroadcastLegacyApp", // telephony_system + "CertInstaller", // handheld_system + "CompanionDeviceManager", // media_system + "ContactsProvider", // base_system + "CredentialManager", // handheld_system + "DeviceAsWebcam", // handheld_system + "DocumentsUI", // handheld_system + "DownloadProvider", // base_system + "DownloadProviderUi", // handheld_system + "DynamicSystemInstallationService", // base_system + "E2eeContactKeysProvider", // base_system + "EasterEgg", // handheld_system + "ExtShared", // base_system + "ExternalStorageProvider", // handheld_system + "FusedLocation", // handheld_system + "HTMLViewer", // media_system + "InputDevices", // handheld_system + "IntentResolver", // base_system + "KeyChain", // handheld_system + "LiveWallpapersPicker", // generic_system, full_base + "LocalTransport", // base_system + "ManagedProvisioning", // handheld_system + "MediaProviderLegacy", // base_system + "MmsService", // handheld_system + "MtpService", // handheld_system + "MusicFX", // handheld_system + "NetworkStack", // base_system + "ONS", // telephony_system + "PacProcessor", // handheld_system + "PackageInstaller", // base_system + "PartnerBookmarksProvider", // generic_system + "PhotoTable", // full_base + "PrintRecommendationService", // handheld_system + "PrintSpooler", // handheld_system + "ProxyHandler", // handheld_system + "SecureElement", // handheld_system + "SettingsProvider", // base_system + "SharedStorageBackup", // handheld_system + "Shell", // base_system + "SimAppDialog", // handheld_system + "SoundPicker", // not installed by anyone + "StatementService", // media_system + "Stk", // generic_system + "Tag", // generic_system + "TeleService", // handheld_system + "Telecom", // handheld_system + "TelephonyProvider", // handheld_system + "Traceur", // handheld_system + "UserDictionaryProvider", // handheld_system + "VpnDialogs", // handheld_system + "WallpaperBackup", // base_system + "adbd_system_api", // base_system + "android.hidl.base-V1.0-java", // base_system + "android.hidl.manager-V1.0-java", // base_system + "android.test.base", // from runtime_libart + "android.test.mock", // base_system + "android.test.runner", // base_system + "aosp_mainline_modules", // ok + "build_flag_system", // base_system + "charger_res_images", // generic_system + "com.android.apex.cts.shim.v1_prebuilt", // ok + "com.android.cellbroadcast", // telephony_system + "com.android.future.usb.accessory", // media_system + "com.android.location.provider", // base_system + "com.android.media.remotedisplay", // media_system + "com.android.media.remotedisplay.xml", // media_system + "com.android.mediadrm.signer", // media_system + "com.android.nfc_extras", // ok + "com.android.nfcservices", // base_system (RELEASE_PACKAGE_NFC_STACK != NfcNci) + "com.android.runtime", // ok + "dex_bootjars", + "ext", // from runtime_libart + "framework-graphics", // base_system + "framework-location", // base_system + "framework-minus-apex-install-dependencies", // base_system + "framework_compatibility_matrix.device.xml", + "generic_system_fonts", // ok + "hwservicemanager_compat_symlink_module", // base_system + "hyph-data", + "ims-common", // base_system + "init_system", // base_system + "javax.obex", // base_system + "llndk.libraries.txt", //ok + "org.apache.http.legacy", // base_system + "perfetto-extras", // system + "sanitizer.libraries.txt", // base_system + "selinux_policy_system_soong", // ok + "services", // base_system + "shell_and_utilities_system", // ok + "system-build.prop", + "system_compatibility_matrix.xml", //base_system + "telephony-common", // libs from TeleService + "voip-common", // base_system + ] + select(soong_config_variable("ANDROID", "release_crashrecovery_module"), { + "true": [ + "com.android.crashrecovery", // base_system (RELEASE_CRASHRECOVERY_MODULE) + ], + default: [], + }) + select(soong_config_variable("ANDROID", "release_package_profiling_module"), { + "true": [ + "com.android.profiling", // base_system (RELEASE_PACKAGE_PROFILING_MODULE) + ], + default: [], + }) + select(release_flag("RELEASE_AVATAR_PICKER_APP"), { + true: [ + "AvatarPicker", // generic_system (RELEASE_AVATAR_PICKER_APP) + ], + default: [], + }), + }, + prefer32: { + deps: [ + "drmserver", // media_system + "mediaserver", // base_system + ], + }, + lib64: { + deps: [ + "android.system.virtualizationcommon-ndk", + "android.system.virtualizationservice-ndk", + "libgsi", + "servicemanager", + ], + }, + both: { + deps: [ + "android.hardware.biometrics.fingerprint@2.1", // generic_system + "android.hardware.radio.config@1.0", // generic_system + "android.hardware.radio.deprecated@1.0", // generic_system + "android.hardware.radio@1.0", // generic_system + "android.hardware.radio@1.1", // generic_system + "android.hardware.radio@1.2", // generic_system + "android.hardware.radio@1.3", // generic_system + "android.hardware.radio@1.4", // generic_system + "android.hardware.secure_element@1.0", // generic_system + "app_process", // base_system + "boringssl_self_test", // base_system + "heapprofd_client", // base_system + "libEGL", // base_system + "libEGL_angle", // base_system + "libETC1", // base_system + "libFFTEm", // base_system + "libGLESv1_CM", // base_system + "libGLESv1_CM_angle", // base_system + "libGLESv2", // base_system + "libGLESv2_angle", // base_system + "libGLESv3", // base_system + "libOpenMAXAL", // base_system + "libOpenSLES", // base_system + "libaaudio", // base_system + "libalarm_jni", // base_system + "libamidi", // base_system + "libandroid", + "libandroid_runtime", + "libandroid_servers", + "libandroidfw", + "libartpalette-system", + "libaudio-resampler", // generic-system + "libaudioeffect_jni", + "libaudiohal", // generic-system + "libaudiopolicyengineconfigurable", // generic-system + "libbinder", + "libbinder_ndk", + "libbinder_rpc_unstable", + "libcamera2ndk", + "libclang_rt.asan", + "libcompiler_rt", + "libcutils", // used by many libs + "libdmabufheap", // used by many libs + "libdrm", // used by many libs // generic_system + "libdrmframework", // base_system + "libdrmframework_jni", // base_system + "libfdtrack", // base_system + "libfilterfw", // base_system + "libfilterpack_imageproc", // media_system + "libfwdlockengine", // generic_system + "libgatekeeper", // base_system + "libgui", // base_system + "libhardware", // base_system + "libhardware_legacy", // base_system + "libhidltransport", // generic_system + "libhwbinder", // generic_system + "libinput", // base_system + "libinputflinger", // base_system + "libiprouteutil", // base_system + "libjnigraphics", // base_system + "libjpeg", // base_system + "liblog", // base_system + "liblogwrap", // generic_system + "liblz4", // generic_system + "libmedia", // base_system + "libmedia_jni", // base_system + "libmediandk", // base_system + "libminui", // generic_system + "libmtp", // base_system + "libnetd_client", // base_system + "libnetlink", // base_system + "libnetutils", // base_system + "libneuralnetworks_packageinfo", // base_system + "libnl", // generic_system + "libpdfium", // base_system + "libpolicy-subsystem", // generic_system + "libpower", // base_system + "libpowermanager", // base_system + "libprotobuf-cpp-full", // generic_system + "libradio_metadata", // base_system + "librs_jni", // handheld_system + "librtp_jni", // base_system + "libsensorservice", // base_system + "libsfplugin_ccodec", // base_system + "libskia", // base_system + "libsonic", // base_system + "libsonivox", // base_system + "libsoundpool", // base_system + "libspeexresampler", // base_system + "libsqlite", // base_system + "libstagefright", // base_system + "libstagefright_foundation", // base_system + "libstagefright_omx", // base_system + "libstdc++", // base_system + "libsysutils", // base_system + "libui", // base_system + "libusbhost", // base_system + "libutils", // base_system + "libvendorsupport", // llndk library + "libvintf_jni", // base_system + "libvulkan", // base_system + "libwebviewchromium_loader", // media_system + "libwebviewchromium_plat_support", // media_system + "libwilhelm", // base_system + "linker", // base_system + ] + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_DRMSERVER"), { + "true": ["drmserver"], + default: [], + }) + select(soong_config_variable("ANDROID", "TARGET_DYNAMIC_64_32_MEDIASERVER"), { + "true": ["mediaserver"], + default: [], + }), + }, + }, +} + +prebuilt_etc { + name: "prebuilt_vintf_manifest", + src: "manifest.xml", + filename: "manifest.xml", + relative_install_path: "vintf", + no_full_install: true, +} diff --git a/target/product/generic/OWNERS b/target/product/generic/OWNERS new file mode 100644 index 0000000000..6d1446f099 --- /dev/null +++ b/target/product/generic/OWNERS @@ -0,0 +1,6 @@ +# Bug component: 1322713 +inseob@google.com +jeongik@google.com +jiyong@google.com +justinyun@google.com +kiyoungkim@google.com diff --git a/target/product/generic/manifest.xml b/target/product/generic/manifest.xml new file mode 100644 index 0000000000..1df2c0d0cf --- /dev/null +++ b/target/product/generic/manifest.xml @@ -0,0 +1,54 @@ +<!-- + Input: + system/libhidl/vintfdata/manifest.xml +--> +<manifest version="8.0" type="framework"> + <hal format="hidl" max-level="6"> + <name>android.frameworks.displayservice</name> + <transport>hwbinder</transport> + <fqname>@1.0::IDisplayService/default</fqname> + </hal> + <hal format="hidl" max-level="5"> + <name>android.frameworks.schedulerservice</name> + <transport>hwbinder</transport> + <fqname>@1.0::ISchedulingPolicyService/default</fqname> + </hal> + <hal format="aidl"> + <name>android.frameworks.sensorservice</name> + <fqname>ISensorManager/default</fqname> + </hal> + <hal format="hidl" max-level="8"> + <name>android.frameworks.sensorservice</name> + <transport>hwbinder</transport> + <fqname>@1.0::ISensorManager/default</fqname> + </hal> + <hal format="hidl" max-level="8"> + <name>android.hidl.memory</name> + <transport arch="32+64">passthrough</transport> + <fqname>@1.0::IMapper/ashmem</fqname> + </hal> + <hal format="hidl" max-level="7"> + <name>android.system.net.netd</name> + <transport>hwbinder</transport> + <fqname>@1.1::INetd/default</fqname> + </hal> + <hal format="hidl" max-level="7"> + <name>android.system.wifi.keystore</name> + <transport>hwbinder</transport> + <fqname>@1.0::IKeystore/default</fqname> + </hal> + <hal format="native"> + <name>netutils-wrapper</name> + <version>1.0</version> + </hal> + <system-sdk> + <version>29</version> + <version>30</version> + <version>31</version> + <version>32</version> + <version>33</version> + <version>34</version> + <version>35</version> + <version>VanillaIceCream</version> + </system-sdk> +</manifest> diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk index 0a09eb11d4..b9a623dcd3 100644 --- a/target/product/generic_system.mk +++ b/target/product/generic_system.mk @@ -152,4 +152,5 @@ _my_paths := \ $(call require-artifacts-in-path, $(_my_paths), $(_my_allowed_list)) # Product config map to toggle between sources and prebuilts of required mainline modules +PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline/required/release_config_map.textproto) PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline/required/release_config_map.textproto) diff --git a/target/product/go_defaults.mk b/target/product/go_defaults.mk index c9285307ab..ccc4f365e7 100644 --- a/target/product/go_defaults.mk +++ b/target/product/go_defaults.mk @@ -18,6 +18,7 @@ $(call inherit-product, build/make/target/product/go_defaults_common.mk) # Product config map to toggle between sources and prebuilts of required mainline modules +PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard build/release/gms_mainline_go/required/release_config_map.textproto) PRODUCT_RELEASE_CONFIG_MAPS += $(wildcard vendor/google_shared/build/release/gms_mainline_go/required/release_config_map.textproto) # Add the system properties. diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk index 5218f29c0a..0fcf16b753 100644 --- a/target/product/go_defaults_common.mk +++ b/target/product/go_defaults_common.mk @@ -24,11 +24,6 @@ PRODUCT_VENDOR_PROPERTIES += \ # Speed profile services and wifi-service to reduce RAM and storage. PRODUCT_SYSTEM_SERVER_COMPILER_FILTER := speed-profile -# Use a profile based boot image for this device. Note that this is currently a -# generic profile and not Android Go optimized. -PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE := true -PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION := frameworks/base/config/boot-image-profile.txt - # Do not generate libartd. PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false @@ -37,9 +32,9 @@ PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false # leave less information available via JDWP. PRODUCT_MINIMIZE_JAVA_DEBUG_INFO := true -# Disable Scudo outside of eng builds to save RAM. +# Use the low memory allocator outside of eng builds to save RSS. ifneq (,$(filter eng, $(TARGET_BUILD_VARIANT))) - PRODUCT_DISABLE_SCUDO := true + MALLOC_LOW_MEMORY := true endif # Add the system properties. diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp index 45ba14331b..f18f35a328 100644 --- a/target/product/gsi/Android.bp +++ b/target/product/gsi/Android.bp @@ -46,3 +46,18 @@ install_symlink { installed_location: "etc/init/config", symlink_target: "/system/system_ext/etc/init/config", } + +// init.gsi.rc, GSI-specific init script. +prebuilt_etc { + name: "init.gsi.rc", + src: "init.gsi.rc", + system_ext_specific: true, + relative_install_path: "init", +} + +prebuilt_etc { + name: "init.vndk-nodef.rc", + src: "init.vndk-nodef.rc", + system_ext_specific: true, + relative_install_path: "gsi", +} diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk index 36897fef8e..cdd5008c0a 100644 --- a/target/product/gsi/Android.mk +++ b/target/product/gsi/Android.mk @@ -121,48 +121,3 @@ LOCAL_LICENSE_CONDITIONS := notice LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(PRODUCT_EXTRA_VNDK_VERSIONS),com.android.vndk.v$(vndk_ver)) include $(BUILD_PHONY_PACKAGE) - -##################################################################### -# Define Phony module to install LLNDK modules which are installed in -# the system image -include $(CLEAR_VARS) -LOCAL_MODULE := llndk_in_system -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE - -# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB -LOCAL_REQUIRED_MODULES := \ - $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES)) \ - llndk.libraries.txt - - -include $(BUILD_PHONY_PACKAGE) - -##################################################################### -# init.gsi.rc, GSI-specific init script. - -include $(CLEAR_VARS) -LOCAL_MODULE := init.gsi.rc -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_SRC_FILES := $(LOCAL_MODULE) -LOCAL_MODULE_CLASS := ETC -LOCAL_SYSTEM_EXT_MODULE := true -LOCAL_MODULE_RELATIVE_PATH := init - -include $(BUILD_PREBUILT) - - -include $(CLEAR_VARS) -LOCAL_MODULE := init.vndk-nodef.rc -LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 -LOCAL_LICENSE_CONDITIONS := notice -LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE -LOCAL_SRC_FILES := $(LOCAL_MODULE) -LOCAL_MODULE_CLASS := ETC -LOCAL_SYSTEM_EXT_MODULE := true -LOCAL_MODULE_RELATIVE_PATH := gsi - -include $(BUILD_PREBUILT) diff --git a/target/product/large_screen_common.mk b/target/product/large_screen_common.mk new file mode 100644 index 0000000000..3eb9ff05e5 --- /dev/null +++ b/target/product/large_screen_common.mk @@ -0,0 +1,21 @@ +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Window Extensions +$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk) + +# Enable Settings 2-pane optimization for large-screen +PRODUCT_SYSTEM_PROPERTIES += \ + persist.settings.large_screen_opt.enabled=true diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk index 58234a8a8c..9e8afa85a4 100644 --- a/target/product/runtime_libart.mk +++ b/target/product/runtime_libart.mk @@ -181,3 +181,5 @@ PRODUCT_SYSTEM_PROPERTIES += \ # Copy preopted files from system_b on first boot. PRODUCT_SYSTEM_PROPERTIES += ro.cp_system_other_odex=1 +PRODUCT_PACKAGES += \ + cppreopts.sh diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp index 0d7b35e1c9..69d19a3e40 100644 --- a/target/product/security/Android.bp +++ b/target/product/security/Android.bp @@ -37,3 +37,7 @@ otacerts_zip { relative_install_path: "security", filename: "otacerts.zip", } + +adb_keys { + name: "adb_keys", +} diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk deleted file mode 100644 index 91b272c1bd..0000000000 --- a/target/product/security/Android.mk +++ /dev/null @@ -1,17 +0,0 @@ -LOCAL_PATH:= $(call my-dir) - -####################################### -# adb key, if configured via PRODUCT_ADB_KEYS -ifdef PRODUCT_ADB_KEYS - ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),) - include $(CLEAR_VARS) - LOCAL_MODULE := adb_keys - LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 - LOCAL_LICENSE_CONDITIONS := notice - LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE - LOCAL_MODULE_CLASS := ETC - LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT) - LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS) - include $(BUILD_PREBUILT) - endif -endif diff --git a/target/product/userspace_reboot.mk b/target/product/userspace_reboot.mk index f235d146e3..51feb0721f 100644 --- a/target/product/userspace_reboot.mk +++ b/target/product/userspace_reboot.mk @@ -14,6 +14,4 @@ # limitations under the License. # -# Inherit this when the target supports userspace reboot - -PRODUCT_VENDOR_PROPERTIES := init.userspace_reboot.is_supported=true +# DEPRECATED! Do not inherit this. diff --git a/teams/Android.bp b/teams/Android.bp index a9699d26f8..96d241bbf7 100644 --- a/teams/Android.bp +++ b/teams/Android.bp @@ -4414,8 +4414,43 @@ team { } team { + name: "trendy_team_android_media_solutions_playback", + + // go/trendy/manage/engineers/6742515252559872 + trendy_team_id: "6742515252559872", +} + +team { name: "trendy_team_android_telemetry_client_infra", // go/trendy/manage/engineers/5403245077430272 trendy_team_id: "5403245077430272", } + +team { + name: "trendy_team_pte_sysui", + + // go/trendy/manage/engineers/5185897463382016 + trendy_team_id: "5185897463382016", +} + +team { + name: "trendy_team_pixel_troubleshooting_app", + + // go/trendy/manage/engineers/5097003746426880 + trendy_team_id: "5097003746426880", +} + +team { + name: "trendy_team_desktop_firmware", + + // go/trendy/manage/engineers/5787938454863872 + trendy_team_id: "5787938454863872", +} + +team { + name: "trendy_team_art_cloud", + + // go/trendy/manage/engineers/5121440647577600 + trendy_team_id: "5121440647577600", +} diff --git a/tools/aconfig/aconfig/Android.bp b/tools/aconfig/aconfig/Android.bp index 68521af91f..f4dd10399b 100644 --- a/tools/aconfig/aconfig/Android.bp +++ b/tools/aconfig/aconfig/Android.bp @@ -234,6 +234,7 @@ rust_aconfig_library { name: "libaconfig_test_rust_library", crate_name: "aconfig_test_rust_library", aconfig_declarations: "aconfig.test.flags", + host_supported: true, } rust_test { diff --git a/tools/aconfig/aconfig/src/codegen/cpp.rs b/tools/aconfig/aconfig/src/codegen/cpp.rs index e743b2fc59..7a9c382bc7 100644 --- a/tools/aconfig/aconfig/src/codegen/cpp.rs +++ b/tools/aconfig/aconfig/src/codegen/cpp.rs @@ -45,6 +45,8 @@ where let header = package.replace('.', "_"); let package_macro = header.to_uppercase(); let cpp_namespace = package.replace('.', "::"); + ensure!(class_elements.len() > 0); + let container = class_elements[0].container.clone(); ensure!(codegen::is_valid_name_ident(&header)); let context = Context { header: &header, @@ -56,6 +58,7 @@ where readwrite_count, is_test_mode: codegen_mode == CodegenMode::Test, class_elements, + container, allow_instrumentation, }; @@ -100,6 +103,7 @@ pub struct Context<'a> { pub readwrite_count: i32, pub is_test_mode: bool, pub class_elements: Vec<ClassElement>, + pub container: String, pub allow_instrumentation: bool, } @@ -279,39 +283,23 @@ public: virtual ~flag_provider_interface() = default; virtual bool disabled_ro() = 0; - - virtual void disabled_ro(bool val) = 0; - virtual bool disabled_rw() = 0; - - virtual void disabled_rw(bool val) = 0; - virtual bool disabled_rw_exported() = 0; - - virtual void disabled_rw_exported(bool val) = 0; - virtual bool disabled_rw_in_other_namespace() = 0; - - virtual void disabled_rw_in_other_namespace(bool val) = 0; - virtual bool enabled_fixed_ro() = 0; - - virtual void enabled_fixed_ro(bool val) = 0; - virtual bool enabled_fixed_ro_exported() = 0; - - virtual void enabled_fixed_ro_exported(bool val) = 0; - virtual bool enabled_ro() = 0; - - virtual void enabled_ro(bool val) = 0; - virtual bool enabled_ro_exported() = 0; - - virtual void enabled_ro_exported(bool val) = 0; - virtual bool enabled_rw() = 0; + virtual void disabled_ro(bool val) = 0; + virtual void disabled_rw(bool val) = 0; + virtual void disabled_rw_exported(bool val) = 0; + virtual void disabled_rw_in_other_namespace(bool val) = 0; + virtual void enabled_fixed_ro(bool val) = 0; + virtual void enabled_fixed_ro_exported(bool val) = 0; + virtual void enabled_ro(bool val) = 0; + virtual void enabled_ro_exported(bool val) = 0; virtual void enabled_rw(bool val) = 0; virtual void reset_flags() {} diff --git a/tools/aconfig/aconfig/src/codegen/java.rs b/tools/aconfig/aconfig/src/codegen/java.rs index ec22ebc6c4..a34166d51d 100644 --- a/tools/aconfig/aconfig/src/codegen/java.rs +++ b/tools/aconfig/aconfig/src/codegen/java.rs @@ -1,18 +1,18 @@ /* - * Copyright (C) 2023 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +* Copyright (C) 2023 The Android Open Source Project +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ use anyhow::Result; use serde::Serialize; @@ -46,7 +46,6 @@ where let runtime_lookup_required = flag_elements.iter().any(|elem| elem.is_read_write) || library_exported; let container = (flag_elements.first().expect("zero template flags").container).to_string(); - let context = Context { flag_elements, namespace_flags, @@ -138,6 +137,7 @@ struct FlagElement { pub default_value: bool, pub device_config_namespace: String, pub device_config_flag: String, + pub flag_name: String, pub flag_name_constant_suffix: String, pub flag_offset: u16, pub is_read_write: bool, @@ -157,6 +157,7 @@ fn create_flag_element( default_value: pf.state() == ProtoFlagState::ENABLED, device_config_namespace: pf.namespace().to_string(), device_config_flag, + flag_name: pf.name().to_string(), flag_name_constant_suffix: pf.name().to_ascii_uppercase(), flag_offset: *flag_offsets.get(pf.name()).expect("didnt find package offset :("), is_read_write: pf.permission() == ProtoFlagPermission::READ_WRITE, @@ -508,22 +509,39 @@ mod tests { private static FeatureFlags FEATURE_FLAGS = new FeatureFlagsImpl(); }"#; - let expect_featureflagsimpl_content = r#" + let expected_featureflagsmpl_content = r#" package com.android.aconfig.test; // TODO(b/303773055): Remove the annotation after access issue is resolved. import android.compat.annotation.UnsupportedAppUsage; import android.provider.DeviceConfig; import android.provider.DeviceConfig.Properties; + import android.aconfig.storage.StorageInternalReader; + import java.nio.file.Files; + import java.nio.file.Paths; + /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags { - private static boolean aconfig_test_is_cached = false; - private static boolean other_namespace_is_cached = false; + private static final boolean isReadFromNew = Files.exists(Paths.get("/metadata/aconfig/boot/enable_only_new_storage")); + private static volatile boolean isCached = false; + private static volatile boolean aconfig_test_is_cached = false; + private static volatile boolean other_namespace_is_cached = false; private static boolean disabledRw = false; private static boolean disabledRwExported = false; private static boolean disabledRwInOtherNamespace = false; private static boolean enabledRw = true; - - + private void init() { + StorageInternalReader reader = null; + try { + reader = new StorageInternalReader("system", "com.android.aconfig.test"); + disabledRw = reader.getBooleanFlagValue(1); + disabledRwExported = reader.getBooleanFlagValue(2); + enabledRw = reader.getBooleanFlagValue(8); + disabledRwInOtherNamespace = reader.getBooleanFlagValue(3); + } catch (Exception e) { + throw new RuntimeException("Cannot read flag in codegen", e); + } + isCached = true; + } private void load_overrides_aconfig_test() { try { Properties properties = DeviceConfig.getProperties("aconfig_test"); @@ -574,8 +592,14 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean disabledRw() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (isReadFromNew) { + if (!isCached) { + init(); + } + } else { + if (!aconfig_test_is_cached) { + load_overrides_aconfig_test(); + } } return disabledRw; } @@ -583,8 +607,14 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean disabledRwExported() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (isReadFromNew) { + if (!isCached) { + init(); + } + } else { + if (!aconfig_test_is_cached) { + load_overrides_aconfig_test(); + } } return disabledRwExported; } @@ -592,8 +622,14 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean disabledRwInOtherNamespace() { - if (!other_namespace_is_cached) { - load_overrides_other_namespace(); + if (isReadFromNew) { + if (!isCached) { + init(); + } + } else { + if (!other_namespace_is_cached) { + load_overrides_other_namespace(); + } } return disabledRwInOtherNamespace; } @@ -625,16 +661,23 @@ mod tests { @com.android.aconfig.annotations.AconfigFlagAccessor @UnsupportedAppUsage public boolean enabledRw() { - if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + if (isReadFromNew) { + if (!isCached) { + init(); + } + } else { + if (!aconfig_test_is_cached) { + load_overrides_aconfig_test(); + } } return enabledRw; } } "#; + let mut file_set = HashMap::from([ ("com/android/aconfig/test/Flags.java", expect_flags_content.as_str()), - ("com/android/aconfig/test/FeatureFlagsImpl.java", expect_featureflagsimpl_content), + ("com/android/aconfig/test/FeatureFlagsImpl.java", expected_featureflagsmpl_content), ("com/android/aconfig/test/FeatureFlags.java", EXPECTED_FEATUREFLAGS_COMMON_CONTENT), ( "com/android/aconfig/test/CustomFeatureFlags.java", @@ -677,7 +720,7 @@ mod tests { modified_parsed_flags.into_iter(), mode, flag_ids, - false, + true, ) .unwrap(); @@ -720,12 +763,11 @@ mod tests { import android.provider.DeviceConfig.Properties; /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags { - private static boolean aconfig_test_is_cached = false; + private static volatile boolean aconfig_test_is_cached = false; private static boolean disabledRwExported = false; private static boolean enabledFixedRoExported = false; private static boolean enabledRoExported = false; - private void load_overrides_aconfig_test() { try { Properties properties = DeviceConfig.getProperties("aconfig_test"); @@ -750,21 +792,21 @@ mod tests { @Override public boolean disabledRwExported() { if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + load_overrides_aconfig_test(); } return disabledRwExported; } @Override public boolean enabledFixedRoExported() { if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + load_overrides_aconfig_test(); } return enabledFixedRoExported; } @Override public boolean enabledRoExported() { if (!aconfig_test_is_cached) { - load_overrides_aconfig_test(); + load_overrides_aconfig_test(); } return enabledRoExported; } @@ -870,7 +912,7 @@ mod tests { modified_parsed_flags.into_iter(), mode, flag_ids, - false, + true, ) .unwrap(); @@ -991,7 +1033,7 @@ mod tests { modified_parsed_flags.into_iter(), mode, flag_ids, - false, + true, ) .unwrap(); let expect_featureflags_content = r#" diff --git a/tools/aconfig/aconfig/src/codegen/rust.rs b/tools/aconfig/aconfig/src/codegen/rust.rs index 45488b05f2..d318b9652f 100644 --- a/tools/aconfig/aconfig/src/codegen/rust.rs +++ b/tools/aconfig/aconfig/src/codegen/rust.rs @@ -113,41 +113,35 @@ mod tests { use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; -lazy_static::lazy_static! { /// flag value cache for disabled_rw - static ref CACHED_disabled_rw: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.disabled_rw", - "false") == "true"; + "false") == "true"); /// flag value cache for disabled_rw_exported - static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; + "false") == "true"); /// flag value cache for disabled_rw_in_other_namespace - static ref CACHED_disabled_rw_in_other_namespace: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.other_namespace", "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true"; + "false") == "true"); /// flag value cache for enabled_rw - static ref CACHED_enabled_rw: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.enabled_rw", - "true") == "true"; - -} + "true") == "true"); impl FlagProvider { /// query flag disabled_ro @@ -259,223 +253,214 @@ pub fn enabled_rw() -> bool { use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; -lazy_static::lazy_static! { - - static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe { - get_mapped_storage_file("system", StorageFileType::PackageMap) - .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test")) - .map(|context| context.map(|c| c.boolean_start_index)) - }; - - static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe { - get_mapped_storage_file("system", StorageFileType::FlagVal) - }; - /// flag value cache for disabled_rw +static READ_FROM_NEW_STORAGE: LazyLock<bool> = LazyLock::new(|| unsafe { + Path::new("/metadata/aconfig/boot/enable_only_new_storage").exists() +}); + +static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe { + get_mapped_storage_file("system", StorageFileType::PackageMap) + .and_then(|package_map| get_package_read_context(&package_map, "com.android.aconfig.test")) + .map(|context| context.map(|c| c.boolean_start_index)) +}); + +static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe { + get_mapped_storage_file("system", StorageFileType::FlagVal) +}); + +/// flag value cache for disabled_rw +static CACHED_disabled_rw: LazyLock<bool> = LazyLock::new(|| { + if *READ_FROM_NEW_STORAGE { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 1) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Ok(false) + } + } + }) + }); - static ref CACHED_disabled_rw: bool = { - let result = flags_rust::GetServerConfigurableFlag( + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + panic!("failed to read flag value: {err}"); + } + } + } else { + flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.disabled_rw", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 1) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) + "false") == "true" + } +}); + +/// flag value cache for disabled_rw_exported +static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| { + if *READ_FROM_NEW_STORAGE { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 2) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Ok(false) } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_rw' contained correct value. Legacy storage was {result}, new storage was {storage_result}"); - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw'. Legacy storage was {result}, new storage was {storage_result}"); - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}") - } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + panic!("failed to read flag value: {err}"); } } - - result - }; - - /// flag value cache for disabled_rw_exported - - static ref CACHED_disabled_rw_exported: bool = { - let result = flags_rust::GetServerConfigurableFlag( + } else { + flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 2) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) + "false") == "true" + } +}); + +/// flag value cache for disabled_rw_in_other_namespace +static CACHED_disabled_rw_in_other_namespace: LazyLock<bool> = LazyLock::new(|| { + if *READ_FROM_NEW_STORAGE { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 3) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Ok(false) } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_rw_exported' contained correct value. Legacy storage was {result}, new storage was {storage_result}"); - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_exported'. Legacy storage was {result}, new storage was {storage_result}"); - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}") - } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + panic!("failed to read flag value: {err}"); } } - - result - }; - - /// flag value cache for disabled_rw_in_other_namespace - - static ref CACHED_disabled_rw_in_other_namespace: bool = { - let result = flags_rust::GetServerConfigurableFlag( + } else { + flags_rust::GetServerConfigurableFlag( "aconfig_flags.other_namespace", "com.android.aconfig.test.disabled_rw_in_other_namespace", - "false") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 3) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) + "false") == "true" + } +}); + + +/// flag value cache for enabled_rw +static CACHED_enabled_rw: LazyLock<bool> = LazyLock::new(|| { + if *READ_FROM_NEW_STORAGE { + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: {err}")) + .and_then(|flag_val_map| { + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: {err}")) + .and_then(|package_offset| { + match package_offset { + Some(offset) => { + get_boolean_flag_value(&flag_val_map, offset + 8) + .map_err(|err| format!("failed to get flag: {err}")) + }, + None => { + log!(Level::Error, "no context found for package com.android.aconfig.test"); + Ok(true) } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_rw_in_other_namespace' contained correct value. Legacy storage was {result}, new storage was {storage_result}"); - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'disabled_rw_in_other_namespace'. Legacy storage was {result}, new storage was {storage_result}"); - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}") - } + } + }) + }); + + match flag_value_result { + Ok(flag_value) => { + return flag_value; + }, + Err(err) => { + log!(Level::Error, "aconfig_rust_codegen: error: {err}"); + panic!("failed to read flag value: {err}"); } } - - result - }; - - /// flag value cache for enabled_rw - - static ref CACHED_enabled_rw: bool = { - let result = flags_rust::GetServerConfigurableFlag( + } else { + flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.enabled_rw", - "true") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: {err}")) - .and_then(|flag_val_map| { - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: {err}")) - .and_then(|package_offset| { - match package_offset { - Some(offset) => { - get_boolean_flag_value(&flag_val_map, offset + 8) - .map_err(|err| format!("failed to get flag: {err}")) - }, - None => Err("no context found for package 'com.android.aconfig.test'".to_string()) - } - }) - }); - - match aconfig_storage_result { - Ok(storage_result) if storage_result == result => { - log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_rw' contained correct value. Legacy storage was {result}, new storage was {storage_result}"); - }, - Ok(storage_result) => { - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag 'enabled_rw'. Legacy storage was {result}, new storage was {storage_result}"); - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: {err}") - } - } - } - - result - }; - -} + "true") == "true" + } +}); impl FlagProvider { @@ -535,66 +520,7 @@ pub static PROVIDER: FlagProvider = FlagProvider; /// query flag disabled_ro #[inline(always)] pub fn disabled_ro() -> bool { - - - let result = false; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 0 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'disabled_ro': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'disabled_ro'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = false; - log!(Level::Info, "AconfigTestMission1: success! flag 'disabled_ro' contained correct value. Legacy storage was {default_value}, new storage was {value}"); - } - } - - result - + false } /// query flag disabled_rw @@ -618,261 +544,25 @@ pub fn disabled_rw_in_other_namespace() -> bool { /// query flag enabled_fixed_ro #[inline(always)] pub fn enabled_fixed_ro() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 4 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; - log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_fixed_ro' contained correct value. Legacy storage was {default_value}, new storage was {value}"); - } - } - - result - + true } /// query flag enabled_fixed_ro_exported #[inline(always)] pub fn enabled_fixed_ro_exported() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 5 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_fixed_ro_exported': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_fixed_ro_exported'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; - log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_fixed_ro_exported' contained correct value. Legacy storage was {default_value}, new storage was {value}"); - } - } - - result - + true } /// query flag enabled_ro #[inline(always)] pub fn enabled_ro() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 6 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; - log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_ro' contained correct value. Legacy storage was {default_value}, new storage was {value}"); - } - } - - result - + true } /// query flag enabled_ro_exported #[inline(always)] pub fn enabled_ro_exported() -> bool { - - - let result = true; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() { - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe { - let package_map = match get_mapped_storage_file("system", StorageFileType::PackageMap) { - Ok(file) => file, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "com.android.aconfig.test") { - Ok(Some(context)) => context, - Ok(None) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': did not get context"); - return result; - }, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("system", StorageFileType::FlagVal) { - Ok(val_map) => val_map, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, 7 + package_read_context.boolean_start_index) { - Ok(val) => val, - Err(err) => { - log!(Level::Error, "AconfigTestMission1: error: failed to read flag 'enabled_ro_exported': {err}"); - return result; - } - }; - - if result != value { - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for 'enabled_ro_exported'. Legacy storage was {result}, new storage was {value}"); - } else { - let default_value = true; - log!(Level::Info, "AconfigTestMission1: success! flag 'enabled_ro_exported' contained correct value. Legacy storage was {default_value}, new storage was {value}"); - } - } - - result - + true } /// query flag enabled_rw @@ -1144,35 +834,29 @@ pub fn reset_flags() { use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; -lazy_static::lazy_static! { /// flag value cache for disabled_rw_exported - static ref CACHED_disabled_rw_exported: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_disabled_rw_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.disabled_rw_exported", - "false") == "true"; + "false") == "true"); /// flag value cache for enabled_fixed_ro_exported - static ref CACHED_enabled_fixed_ro_exported: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_enabled_fixed_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.enabled_fixed_ro_exported", - "false") == "true"; + "false") == "true"); /// flag value cache for enabled_ro_exported - static ref CACHED_enabled_ro_exported: bool = flags_rust::GetServerConfigurableFlag( + static CACHED_enabled_ro_exported: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( "aconfig_flags.aconfig_test", "com.android.aconfig.test.enabled_ro_exported", - "false") == "true"; - -} + "false") == "true"); impl FlagProvider { /// query flag disabled_rw_exported @@ -1218,12 +902,9 @@ pub fn enabled_ro_exported() -> bool { use aconfig_storage_read_api::{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; diff --git a/tools/aconfig/aconfig/src/commands.rs b/tools/aconfig/aconfig/src/commands.rs index 59f06627ee..496876e08f 100644 --- a/tools/aconfig/aconfig/src/commands.rs +++ b/tools/aconfig/aconfig/src/commands.rs @@ -17,7 +17,8 @@ use anyhow::{bail, ensure, Context, Result}; use itertools::Itertools; use protobuf::Message; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; +use std::hash::Hasher; use std::io::Read; use std::path::PathBuf; @@ -31,6 +32,7 @@ use aconfig_protos::{ ParsedFlagExt, ProtoFlagMetadata, ProtoFlagPermission, ProtoFlagState, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint, }; +use aconfig_storage_file::sip_hasher13::SipHasher13; use aconfig_storage_file::StorageFileType; pub struct Input { @@ -77,8 +79,18 @@ pub fn parse_flags( .read_to_string(&mut contents) .with_context(|| format!("failed to read {}", input.source))?; - let flag_declarations = aconfig_protos::flag_declarations::try_from_text_proto(&contents) - .with_context(|| input.error_context())?; + let mut flag_declarations = + aconfig_protos::flag_declarations::try_from_text_proto(&contents) + .with_context(|| input.error_context())?; + + // system_ext flags should be treated as system flags as we are combining /system_ext + // and /system as one container + // TODO: remove this logic when we start enforcing that system_ext cannot be set as + // container in aconfig declaration files. + if flag_declarations.container() == "system_ext" { + flag_declarations.set_container(String::from("system")); + } + ensure!( package == flag_declarations.package(), "failed to parse {}: expected package {}, got {}", @@ -268,10 +280,11 @@ pub fn create_storage( caches: Vec<Input>, container: &str, file: &StorageFileType, + version: u32, ) -> Result<Vec<u8>> { let parsed_flags_vec: Vec<ProtoParsedFlags> = caches.into_iter().map(|mut input| input.try_parse_flags()).collect::<Result<Vec<_>>>()?; - generate_storage_file(container, parsed_flags_vec.iter(), file) + generate_storage_file(container, parsed_flags_vec.iter(), file, version) } pub fn create_device_config_defaults(mut input: Input) -> Result<Vec<u8>> { @@ -410,12 +423,43 @@ where Ok(flag_ids) } +#[allow(dead_code)] // TODO: b/316357686 - Use fingerprint in codegen to + // protect hardcoded offset reads. +pub fn compute_flag_offsets_fingerprint(flags_map: &HashMap<String, u16>) -> Result<u64> { + let mut hasher = SipHasher13::new(); + + // Need to sort to ensure the data is added to the hasher in the same order + // each run. + let sorted_map: BTreeMap<&String, &u16> = flags_map.iter().collect(); + + for (flag, offset) in sorted_map { + // See https://docs.rs/siphasher/latest/siphasher/#note for use of write + // over write_i16. Similarly, use to_be_bytes rather than to_ne_bytes to + // ensure consistency. + hasher.write(flag.as_bytes()); + hasher.write(&offset.to_be_bytes()); + } + Ok(hasher.finish()) +} + #[cfg(test)] mod tests { use super::*; use aconfig_protos::ProtoFlagPurpose; #[test] + fn test_offset_fingerprint() { + let parsed_flags = crate::test::parse_test_flags(); + let package = find_unique_package(&parsed_flags.parsed_flag).unwrap().to_string(); + let flag_ids = assign_flag_ids(&package, parsed_flags.parsed_flag.iter()).unwrap(); + let expected_fingerprint = 10709892481002252132u64; + + let hash_result = compute_flag_offsets_fingerprint(&flag_ids); + + assert_eq!(hash_result.unwrap(), expected_fingerprint); + } + + #[test] fn test_parse_flags() { let parsed_flags = crate::test::parse_test_flags(); // calls parse_flags aconfig_protos::parsed_flags::verify_fields(&parsed_flags).unwrap(); diff --git a/tools/aconfig/aconfig/src/main.rs b/tools/aconfig/aconfig/src/main.rs index 1fb64f9c56..edb4fd373b 100644 --- a/tools/aconfig/aconfig/src/main.rs +++ b/tools/aconfig/aconfig/src/main.rs @@ -16,6 +16,8 @@ //! `aconfig` is a build time tool to manage build time configurations, such as feature flags. +use aconfig_storage_file::DEFAULT_FILE_VERSION; +use aconfig_storage_file::MAX_SUPPORTED_FILE_VERSION; use anyhow::{anyhow, bail, Context, Result}; use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command}; use core::any::Any; @@ -159,7 +161,13 @@ fn cli() -> Command { .value_parser(|s: &str| StorageFileType::try_from(s)), ) .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true)) - .arg(Arg::new("out").long("out").required(true)), + .arg(Arg::new("out").long("out").required(true)) + .arg( + Arg::new("version") + .long("version") + .required(false) + .value_parser(|s: &str| s.parse::<u32>()), + ), ) } @@ -309,12 +317,18 @@ fn main() -> Result<()> { write_output_to_file_or_stdout(path, &output)?; } Some(("create-storage", sub_matches)) => { + let version = + get_optional_arg::<u32>(sub_matches, "version").unwrap_or(&DEFAULT_FILE_VERSION); + if *version > MAX_SUPPORTED_FILE_VERSION { + bail!("Invalid version selected ({})", version); + } let file = get_required_arg::<StorageFileType>(sub_matches, "file") .context("Invalid storage file selection")?; let cache = open_zero_or_more_files(sub_matches, "cache")?; let container = get_required_arg::<String>(sub_matches, "container")?; let path = get_required_arg::<String>(sub_matches, "out")?; - let output = commands::create_storage(cache, container, file) + + let output = commands::create_storage(cache, container, file, *version) .context("failed to create storage files")?; write_output_to_file_or_stdout(path, &output)?; } diff --git a/tools/aconfig/aconfig/src/storage/flag_info.rs b/tools/aconfig/aconfig/src/storage/flag_info.rs new file mode 100644 index 0000000000..25326094da --- /dev/null +++ b/tools/aconfig/aconfig/src/storage/flag_info.rs @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +use crate::commands::assign_flag_ids; +use crate::storage::FlagPackage; +use aconfig_protos::ProtoFlagPermission; +use aconfig_storage_file::{FlagInfoHeader, FlagInfoList, FlagInfoNode, StorageFileType}; +use anyhow::{anyhow, Result}; + +fn new_header(container: &str, num_flags: u32, version: u32) -> FlagInfoHeader { + FlagInfoHeader { + version, + container: String::from(container), + file_type: StorageFileType::FlagInfo as u8, + file_size: 0, + num_flags, + boolean_flag_offset: 0, + } +} + +pub fn create_flag_info( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<FlagInfoList> { + // create list + let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); + + let mut is_flag_rw = vec![false; num_flags as usize]; + for pkg in packages.iter() { + let start_index = pkg.boolean_start_index as usize; + let flag_ids = assign_flag_ids(pkg.package_name, pkg.boolean_flags.iter().copied())?; + for pf in pkg.boolean_flags.iter() { + let fid = flag_ids + .get(pf.name()) + .ok_or(anyhow!(format!("missing flag id for {}", pf.name())))?; + is_flag_rw[start_index + (*fid as usize)] = + pf.permission() == ProtoFlagPermission::READ_WRITE; + } + } + + let mut list = FlagInfoList { + header: new_header(container, num_flags, version), + nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(), + }; + + // initialize all header fields + list.header.boolean_flag_offset = list.header.into_bytes().len() as u32; + let bytes_per_node = FlagInfoNode::create(false).into_bytes().len() as u32; + list.header.file_size = list.header.boolean_flag_offset + num_flags * bytes_per_node; + + Ok(list) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; + use aconfig_storage_file::DEFAULT_FILE_VERSION; + + pub fn create_test_flag_info_list_from_source() -> Result<FlagInfoList> { + let caches = parse_all_test_flags(); + let packages = group_flags_by_package(caches.iter()); + create_flag_info("mockup", &packages, DEFAULT_FILE_VERSION) + } + + #[test] + // this test point locks down the flag info creation and each field + fn test_list_contents() { + let flag_info_list = create_test_flag_info_list_from_source(); + assert!(flag_info_list.is_ok()); + let expected_flag_info_list = + aconfig_storage_file::test_utils::create_test_flag_info_list(); + assert_eq!(flag_info_list.unwrap(), expected_flag_info_list); + } +} diff --git a/tools/aconfig/aconfig/src/storage/flag_table.rs b/tools/aconfig/aconfig/src/storage/flag_table.rs index a9712119bf..6046d7ef18 100644 --- a/tools/aconfig/aconfig/src/storage/flag_table.rs +++ b/tools/aconfig/aconfig/src/storage/flag_table.rs @@ -19,13 +19,12 @@ use crate::storage::FlagPackage; use aconfig_protos::ProtoFlagPermission; use aconfig_storage_file::{ get_table_size, FlagTable, FlagTableHeader, FlagTableNode, StorageFileType, StoredFlagType, - FILE_VERSION, }; use anyhow::{anyhow, Result}; -fn new_header(container: &str, num_flags: u32) -> FlagTableHeader { +fn new_header(container: &str, num_flags: u32, version: u32) -> FlagTableHeader { FlagTableHeader { - version: FILE_VERSION, + version, container: String::from(container), file_type: StorageFileType::FlagMap as u8, file_size: 0, @@ -86,12 +85,16 @@ impl FlagTableNodeWrapper { } } -pub fn create_flag_table(container: &str, packages: &[FlagPackage]) -> Result<FlagTable> { +pub fn create_flag_table( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<FlagTable> { // create table let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); let num_buckets = get_table_size(num_flags)?; - let mut header = new_header(container, num_flags); + let mut header = new_header(container, num_flags, version); let mut buckets = vec![None; num_buckets as usize]; let mut node_wrappers = packages .iter() @@ -138,13 +141,15 @@ pub fn create_flag_table(container: &str, packages: &[FlagPackage]) -> Result<Fl #[cfg(test)] mod tests { + use aconfig_storage_file::DEFAULT_FILE_VERSION; + use super::*; use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; fn create_test_flag_table_from_source() -> Result<FlagTable> { let caches = parse_all_test_flags(); let packages = group_flags_by_package(caches.iter()); - create_flag_table("mockup", &packages) + create_flag_table("mockup", &packages, DEFAULT_FILE_VERSION) } #[test] diff --git a/tools/aconfig/aconfig/src/storage/flag_value.rs b/tools/aconfig/aconfig/src/storage/flag_value.rs index c15ba54112..6a655b9a92 100644 --- a/tools/aconfig/aconfig/src/storage/flag_value.rs +++ b/tools/aconfig/aconfig/src/storage/flag_value.rs @@ -17,12 +17,12 @@ use crate::commands::assign_flag_ids; use crate::storage::FlagPackage; use aconfig_protos::ProtoFlagState; -use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType, FILE_VERSION}; +use aconfig_storage_file::{FlagValueHeader, FlagValueList, StorageFileType}; use anyhow::{anyhow, Result}; -fn new_header(container: &str, num_flags: u32) -> FlagValueHeader { +fn new_header(container: &str, num_flags: u32, version: u32) -> FlagValueHeader { FlagValueHeader { - version: FILE_VERSION, + version, container: String::from(container), file_type: StorageFileType::FlagVal as u8, file_size: 0, @@ -31,12 +31,16 @@ fn new_header(container: &str, num_flags: u32) -> FlagValueHeader { } } -pub fn create_flag_value(container: &str, packages: &[FlagPackage]) -> Result<FlagValueList> { +pub fn create_flag_value( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<FlagValueList> { // create list let num_flags = packages.iter().map(|pkg| pkg.boolean_flags.len() as u32).sum(); let mut list = FlagValueList { - header: new_header(container, num_flags), + header: new_header(container, num_flags, version), booleans: vec![false; num_flags as usize], }; @@ -61,13 +65,15 @@ pub fn create_flag_value(container: &str, packages: &[FlagPackage]) -> Result<Fl #[cfg(test)] mod tests { + use aconfig_storage_file::DEFAULT_FILE_VERSION; + use super::*; use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; pub fn create_test_flag_value_list_from_source() -> Result<FlagValueList> { let caches = parse_all_test_flags(); let packages = group_flags_by_package(caches.iter()); - create_flag_value("mockup", &packages) + create_flag_value("mockup", &packages, DEFAULT_FILE_VERSION) } #[test] diff --git a/tools/aconfig/aconfig/src/storage/mod.rs b/tools/aconfig/aconfig/src/storage/mod.rs index 73339f24b3..9e5dad5955 100644 --- a/tools/aconfig/aconfig/src/storage/mod.rs +++ b/tools/aconfig/aconfig/src/storage/mod.rs @@ -14,15 +14,16 @@ * limitations under the License. */ +pub mod flag_info; pub mod flag_table; pub mod flag_value; pub mod package_table; -use anyhow::{anyhow, Result}; +use anyhow::Result; use std::collections::{HashMap, HashSet}; use crate::storage::{ - flag_table::create_flag_table, flag_value::create_flag_value, + flag_info::create_flag_info, flag_table::create_flag_table, flag_value::create_flag_value, package_table::create_package_table, }; use aconfig_protos::{ProtoParsedFlag, ProtoParsedFlags}; @@ -87,6 +88,7 @@ pub fn generate_storage_file<'a, I>( container: &str, parsed_flags_vec_iter: I, file: &StorageFileType, + version: u32, ) -> Result<Vec<u8>> where I: Iterator<Item = &'a ProtoParsedFlags>, @@ -95,18 +97,21 @@ where match file { StorageFileType::PackageMap => { - let package_table = create_package_table(container, &packages)?; + let package_table = create_package_table(container, &packages, version)?; Ok(package_table.into_bytes()) } StorageFileType::FlagMap => { - let flag_table = create_flag_table(container, &packages)?; + let flag_table = create_flag_table(container, &packages, version)?; Ok(flag_table.into_bytes()) } StorageFileType::FlagVal => { - let flag_value = create_flag_value(container, &packages)?; + let flag_value = create_flag_value(container, &packages, version)?; Ok(flag_value.into_bytes()) } - _ => Err(anyhow!("aconfig does not support the creation of this storage file type")), + StorageFileType::FlagInfo => { + let flag_info = create_flag_info(container, &packages, version)?; + Ok(flag_info.into_bytes()) + } } } diff --git a/tools/aconfig/aconfig/src/storage/package_table.rs b/tools/aconfig/aconfig/src/storage/package_table.rs index c53602f9cb..56559f8daa 100644 --- a/tools/aconfig/aconfig/src/storage/package_table.rs +++ b/tools/aconfig/aconfig/src/storage/package_table.rs @@ -18,14 +18,13 @@ use anyhow::Result; use aconfig_storage_file::{ get_table_size, PackageTable, PackageTableHeader, PackageTableNode, StorageFileType, - FILE_VERSION, }; use crate::storage::FlagPackage; -fn new_header(container: &str, num_packages: u32) -> PackageTableHeader { +fn new_header(container: &str, num_packages: u32, version: u32) -> PackageTableHeader { PackageTableHeader { - version: FILE_VERSION, + version, container: String::from(container), file_type: StorageFileType::PackageMap as u8, file_size: 0, @@ -56,20 +55,26 @@ impl PackageTableNodeWrapper { } } -pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result<PackageTable> { +pub fn create_package_table( + container: &str, + packages: &[FlagPackage], + version: u32, +) -> Result<PackageTable> { // create table let num_packages = packages.len() as u32; let num_buckets = get_table_size(num_packages)?; - let mut header = new_header(container, num_packages); + let mut header = new_header(container, num_packages, version); let mut buckets = vec![None; num_buckets as usize]; - let mut node_wrappers: Vec<_> = - packages.iter().map(|pkg| PackageTableNodeWrapper::new(pkg, num_buckets)).collect(); + let mut node_wrappers: Vec<_> = packages + .iter() + .map(|pkg: &FlagPackage<'_>| PackageTableNodeWrapper::new(pkg, num_buckets)) + .collect(); // initialize all header fields header.bucket_offset = header.into_bytes().len() as u32; header.node_offset = header.bucket_offset + num_buckets * 4; header.file_size = header.node_offset - + node_wrappers.iter().map(|x| x.node.into_bytes().len()).sum::<usize>() as u32; + + node_wrappers.iter().map(|x| x.node.into_bytes(version).len()).sum::<usize>() as u32; // sort node_wrappers by bucket index for efficiency node_wrappers.sort_by(|a, b| a.bucket_index.cmp(&b.bucket_index)); @@ -87,7 +92,7 @@ pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result if buckets[node_bucket_idx as usize].is_none() { buckets[node_bucket_idx as usize] = Some(offset); } - offset += node_wrappers[i].node.into_bytes().len() as u32; + offset += node_wrappers[i].node.into_bytes(version).len() as u32; if let Some(index) = next_node_bucket_idx { if index == node_bucket_idx { @@ -106,13 +111,15 @@ pub fn create_package_table(container: &str, packages: &[FlagPackage]) -> Result #[cfg(test)] mod tests { + use aconfig_storage_file::DEFAULT_FILE_VERSION; + use super::*; use crate::storage::{group_flags_by_package, tests::parse_all_test_flags}; pub fn create_test_package_table_from_source() -> Result<PackageTable> { let caches = parse_all_test_flags(); let packages = group_flags_by_package(caches.iter()); - create_package_table("mockup", &packages) + create_package_table("mockup", &packages, DEFAULT_FILE_VERSION) } #[test] diff --git a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template index cd2e3db2d0..d1cf191e29 100644 --- a/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template +++ b/tools/aconfig/aconfig/templates/FeatureFlagsImpl.java.template @@ -9,68 +9,58 @@ import android.compat.annotation.UnsupportedAppUsage; import android.provider.DeviceConfig; import android.provider.DeviceConfig.Properties; - -{{ -if allow_instrumentation }} +{{ -if not library_exported }} import android.aconfig.storage.StorageInternalReader; -import android.util.Log; - -import java.io.File; +import java.nio.file.Files; +import java.nio.file.Paths; {{ -endif }} {{ -endif }} /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags \{ {{ -if runtime_lookup_required }} +{{ -if not library_exported }} + private static final boolean isReadFromNew = Files.exists(Paths.get("/metadata/aconfig/boot/enable_only_new_storage")); + private static volatile boolean isCached = false; +{{ -endif }} {{ -for namespace_with_flags in namespace_flags }} - private static boolean {namespace_with_flags.namespace}_is_cached = false; + private static volatile boolean {namespace_with_flags.namespace}_is_cached = false; {{ -endfor- }} {{ for flag in flag_elements }} -{{- if flag.is_read_write }} +{{ -if flag.is_read_write }} private static boolean {flag.method_name} = {flag.default_value}; {{ -endif }} {{ -endfor }} -{{ -if allow_instrumentation }} - StorageInternalReader reader; - boolean readFromNewStorage; - - private final static String TAG = "AconfigJavaCodegen"; - public FeatureFlagsImpl() \{ - File file = new File("/metadata/aconfig_test_missions/mission_1"); - if (file.exists()) \{ - readFromNewStorage = true; +{{ if not library_exported }} + private void init() \{ + StorageInternalReader reader = null; + try \{ reader = new StorageInternalReader("{container}", "{package_name}"); +{{ for namespace_with_flags in namespace_flags }} +{{ -for flag in namespace_with_flags.flags }} +{{ if flag.is_read_write }} + {flag.method_name} = reader.getBooleanFlagValue({flag.flag_offset}); +{{ endif }} +{{ -endfor }} +{{ -endfor }} + } catch (Exception e) \{ + throw new RuntimeException("Cannot read flag in codegen", e); } + isCached = true; } -{{ -endif }} +{{ endif }} + + {{ for namespace_with_flags in namespace_flags }} private void load_overrides_{namespace_with_flags.namespace}() \{ try \{ -{{ -if allow_instrumentation }} - boolean val; -{{ -endif }} Properties properties = DeviceConfig.getProperties("{namespace_with_flags.namespace}"); {{ -for flag in namespace_with_flags.flags }} {{ -if flag.is_read_write }} {flag.method_name} = properties.getBoolean(Flags.FLAG_{flag.flag_name_constant_suffix}, {flag.default_value}); -{{ -if allow_instrumentation }} - if (readFromNewStorage) \{ - try \{ - val = reader.getBooleanFlagValue({flag.flag_offset}); - if (val == {flag.method_name}) \{ - Log.i(TAG, "success: {flag.method_name} value matches"); - } else \{ - Log.i(TAG, String.format( - "error: {flag.method_name} value mismatch, new storage value is %s, old storage value is %s", - val, {flag.method_name})); - } - } catch (Exception e) \{ - Log.e(TAG,"error: failed to read flag value of {flag.method_name}"); - } - } -{{ -endif }} {{ -endif }} {{ -endfor }} } catch (NullPointerException e) \{ @@ -86,6 +76,7 @@ public final class FeatureFlagsImpl implements FeatureFlags \{ {namespace_with_flags.namespace}_is_cached = true; } {{ endfor- }} + {{ -endif }}{#- end of runtime_lookup_required #} {{ -for flag in flag_elements }} @Override @@ -94,19 +85,31 @@ public final class FeatureFlagsImpl implements FeatureFlags \{ @UnsupportedAppUsage {{ -endif }} public boolean {flag.method_name}() \{ +{{ -if not library_exported }} {{ -if flag.is_read_write }} - if (!{flag.device_config_namespace}_is_cached) \{ - load_overrides_{flag.device_config_namespace}(); + if (isReadFromNew) \{ + if (!isCached) \{ + init(); + } + } else \{ + if (!{flag.device_config_namespace}_is_cached) \{ + load_overrides_{flag.device_config_namespace}(); + } } return {flag.method_name}; {{ -else }} return {flag.default_value}; {{ -endif }} +{{ else }} + if (!{flag.device_config_namespace}_is_cached) \{ + load_overrides_{flag.device_config_namespace}(); + } + return {flag.method_name}; +{{ -endif }} } {{ endfor }} } -{{ else }} -{#- Generate only stub if in test mode #} +{{ else }} {#- Generate only stub if in test mode #} /** @hide */ public final class FeatureFlagsImpl implements FeatureFlags \{ {{ for flag in flag_elements }} diff --git a/tools/aconfig/aconfig/templates/cpp_exported_header.template b/tools/aconfig/aconfig/templates/cpp_exported_header.template index 0f7853e405..4643c9775c 100644 --- a/tools/aconfig/aconfig/templates/cpp_exported_header.template +++ b/tools/aconfig/aconfig/templates/cpp_exported_header.template @@ -27,12 +27,13 @@ public: {{ -for item in class_elements}} virtual bool {item.flag_name}() = 0; + {{ -endfor }} + {{ -if is_test_mode }} + {{ -for item in class_elements}} virtual void {item.flag_name}(bool val) = 0; - {{ -endif }} {{ -endfor }} - {{ -if is_test_mode }} virtual void reset_flags() \{} {{ -endif }} }; diff --git a/tools/aconfig/aconfig/templates/cpp_source_file.template b/tools/aconfig/aconfig/templates/cpp_source_file.template index 38dda7df31..eaaf86f527 100644 --- a/tools/aconfig/aconfig/templates/cpp_source_file.template +++ b/tools/aconfig/aconfig/templates/cpp_source_file.template @@ -1,13 +1,13 @@ #include "{header}.h" {{ if allow_instrumentation }} -#include <sys/stat.h> +{{ if readwrite- }} +#include <unistd.h> #include "aconfig_storage/aconfig_storage_read_api.hpp" #include <android/log.h> - -#define ALOGI(msg, ...) \ - __android_log_print(ANDROID_LOG_INFO, "AconfigTestMission1", (msg), __VA_ARGS__) - +#define LOG_TAG "aconfig_cpp_codegen" +#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) +{{ -endif }} {{ endif }} {{ if readwrite- }} @@ -66,15 +66,97 @@ namespace {cpp_namespace} \{ class flag_provider : public flag_provider_interface \{ public: - {{ -for item in class_elements }} + {{ if allow_instrumentation- }} + {{ if readwrite- }} + flag_provider() + {{ if readwrite- }} + : cache_({readwrite_count}, -1) + , boolean_start_index_() + {{ -else- }} + : boolean_start_index_() + {{ -endif }} + , flag_value_file_(nullptr) + , read_from_new_storage_(false) + , package_exists_in_storage_(true) \{ + + if (access("/metadata/aconfig/boot/enable_only_new_storage", F_OK) == 0) \{ + read_from_new_storage_ = true; + } + + if (!read_from_new_storage_) \{ + return; + } + + auto package_map_file = aconfig_storage::get_mapped_file( + "{container}", + aconfig_storage::StorageFileType::package_map); + if (!package_map_file.ok()) \{ + ALOGE("error: failed to get package map file: %s", package_map_file.error().c_str()); + } + + auto context = aconfig_storage::get_package_read_context( + **package_map_file, "{package}"); + if (!context.ok()) \{ + ALOGE("error: failed to get package read context: %s", context.error().c_str()); + } + + if (!(context->package_exists)) \{ + package_exists_in_storage_ = false; + return; + } + + // cache package boolean flag start index + boolean_start_index_ = context->boolean_start_index; + // unmap package map file and free memory + delete *package_map_file; + + auto flag_value_file = aconfig_storage::get_mapped_file( + "{container}", + aconfig_storage::StorageFileType::flag_val); + if (!flag_value_file.ok()) \{ + ALOGE("error: failed to get flag value file: %s", flag_value_file.error().c_str()); + } + + // cache flag value file + flag_value_file_ = std::unique_ptr<aconfig_storage::MappedStorageFile>( + *flag_value_file); + + } + {{ -endif }} + {{ -endif }} + + {{ -for item in class_elements }} virtual bool {item.flag_name}() override \{ {{ -if item.readwrite }} if (cache_[{item.readwrite_idx}] == -1) \{ + {{ if allow_instrumentation- }} + if (read_from_new_storage_) \{ + if (!package_exists_in_storage_) \{ + return {item.default_value}; + } + + auto value = aconfig_storage::get_boolean_flag_value( + *flag_value_file_, + boolean_start_index_ + {item.flag_offset}); + + if (!value.ok()) \{ + ALOGE("error: failed to read flag value: %s", value.error().c_str()); + } + + cache_[{item.readwrite_idx}] = *value; + } else \{ + cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag( + "aconfig_flags.{item.device_config_namespace}", + "{item.device_config_flag}", + "{item.default_value}") == "true"; + } + {{ -else- }} cache_[{item.readwrite_idx}] = server_configurable_flags::GetServerConfigurableFlag( "aconfig_flags.{item.device_config_namespace}", "{item.device_config_flag}", "{item.default_value}") == "true"; + {{ -endif }} } return cache_[{item.readwrite_idx}]; {{ -else }} @@ -86,12 +168,22 @@ namespace {cpp_namespace} \{ {{ -endif }} } {{ -endfor }} + {{ if readwrite- }} private: std::vector<int8_t> cache_ = std::vector<int8_t>({readwrite_count}, -1); + {{ if allow_instrumentation- }} + uint32_t boolean_start_index_; + + std::unique_ptr<aconfig_storage::MappedStorageFile> flag_value_file_; + + bool read_from_new_storage_; + + bool package_exists_in_storage_; + {{ -endif }} {{ -endif }} - }; + }; {{ -endif }} @@ -107,62 +199,6 @@ bool {header}_{item.flag_name}() \{ {{ -if item.readwrite }} return {cpp_namespace}::{item.flag_name}(); {{ -else }} - {{ if allow_instrumentation }} - auto result = - {{ if item.is_fixed_read_only }} - {package_macro}_{item.flag_macro} - {{ else }} - {item.default_value} - {{ endif }}; - - struct stat buffer; - if (stat("/metadata/aconfig_test_missions/mission_1", &buffer) != 0) \{ - return result; - } - - auto package_map_file = aconfig_storage::get_mapped_file( - "{item.container}", - aconfig_storage::StorageFileType::package_map); - if (!package_map_file.ok()) \{ - ALOGI("error: failed to get package map file: %s", package_map_file.error().c_str()); - return result; - } - - auto package_read_context = aconfig_storage::get_package_read_context( - **package_map_file, "{package}"); - if (!package_read_context.ok()) \{ - ALOGI("error: failed to get package read context: %s", package_map_file.error().c_str()); - return result; - } - - delete *package_map_file; - - auto flag_val_map = aconfig_storage::get_mapped_file( - "{item.container}", - aconfig_storage::StorageFileType::flag_val); - if (!flag_val_map.ok()) \{ - ALOGI("error: failed to get flag val map: %s", package_map_file.error().c_str()); - return result; - } - - auto value = aconfig_storage::get_boolean_flag_value( - **flag_val_map, - package_read_context->boolean_start_index + {item.flag_offset}); - if (!value.ok()) \{ - ALOGI("error: failed to get flag val: %s", package_map_file.error().c_str()); - return result; - } - - delete *flag_val_map; - - if (*value != result) \{ - ALOGI("error: new storage value '%d' does not match current value '%d'", *value, result); - } else \{ - ALOGI("success: new storage value was '%d, legacy storage was '%d'", *value, result); - } - - return result; - {{ else }} {{ -if item.is_fixed_read_only }} return {package_macro}_{item.flag_macro}; {{ -else }} @@ -170,7 +206,6 @@ bool {header}_{item.flag_name}() \{ {{ -endif }} {{ -endif }} {{ -endif }} - {{ -endif }} } {{ -if is_test_mode }} @@ -185,5 +220,3 @@ void {header}_reset_flags() \{ {cpp_namespace}::reset_flags(); } {{ -endif }} - - diff --git a/tools/aconfig/aconfig/templates/rust.template b/tools/aconfig/aconfig/templates/rust.template index cfd9d6aec8..6456360734 100644 --- a/tools/aconfig/aconfig/templates/rust.template +++ b/tools/aconfig/aconfig/templates/rust.template @@ -2,88 +2,88 @@ use aconfig_storage_read_api::\{Mmap, AconfigStorageError, StorageFileType, PackageReadContext, get_mapped_storage_file, get_boolean_flag_value, get_package_read_context}; use std::path::Path; use std::io::Write; +use std::sync::LazyLock; use log::\{log, LevelFilter, Level}; -static STORAGE_MIGRATION_MARKER_FILE: &str = - "/metadata/aconfig_test_missions/mission_1"; -static MIGRATION_LOG_TAG: &str = "AconfigTestMission1"; - /// flag provider pub struct FlagProvider; {{ if has_readwrite- }} -lazy_static::lazy_static! \{ - {{ if allow_instrumentation }} - static ref PACKAGE_OFFSET: Result<Option<u32>, AconfigStorageError> = unsafe \{ - get_mapped_storage_file("{container}", StorageFileType::PackageMap) - .and_then(|package_map| get_package_read_context(&package_map, "{package}")) - .map(|context| context.map(|c| c.boolean_start_index)) - }; - - static ref FLAG_VAL_MAP: Result<Mmap, AconfigStorageError> = unsafe \{ - get_mapped_storage_file("{container}", StorageFileType::FlagVal) - }; - {{ -endif }} - +{{ if allow_instrumentation }} +static READ_FROM_NEW_STORAGE: LazyLock<bool> = LazyLock::new(|| unsafe \{ + Path::new("/metadata/aconfig/boot/enable_only_new_storage").exists() +}); + +static PACKAGE_OFFSET: LazyLock<Result<Option<u32>, AconfigStorageError>> = LazyLock::new(|| unsafe \{ + get_mapped_storage_file("{container}", StorageFileType::PackageMap) + .and_then(|package_map| get_package_read_context(&package_map, "{package}")) + .map(|context| context.map(|c| c.boolean_start_index)) +}); + +static FLAG_VAL_MAP: LazyLock<Result<Mmap, AconfigStorageError>> = LazyLock::new(|| unsafe \{ + get_mapped_storage_file("{container}", StorageFileType::FlagVal) +}); +{{ -endif }} {{ -for flag in template_flags }} - {{ -if flag.readwrite }} - /// flag value cache for {flag.name} - {{ if allow_instrumentation }} - static ref CACHED_{flag.name}: bool = \{ - let result = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.{flag.device_config_namespace}", - "{flag.device_config_flag}", - "{flag.default_value}") == "true"; - - if Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{ - // This will be called multiple times. Subsequent calls after the first are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info)); - let aconfig_storage_result = FLAG_VAL_MAP - .as_ref() - .map_err(|err| format!("failed to get flag val map: \{err}")) - .and_then(|flag_val_map| \{ - PACKAGE_OFFSET - .as_ref() - .map_err(|err| format!("failed to get package read offset: \{err}")) - .and_then(|package_offset| \{ - match package_offset \{ - Some(offset) => \{ - get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset}) - .map_err(|err| format!("failed to get flag: \{err}")) - }, - None => Err("no context found for package '{package}'".to_string()) +{{ -if flag.readwrite }} +/// flag value cache for {flag.name} +{{ if allow_instrumentation }} +static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| \{ + + if *READ_FROM_NEW_STORAGE \{ + // This will be called multiple times. Subsequent calls after the first are noops. + logger::init( + logger::Config::default() + .with_tag_on_device("aconfig_rust_codegen") + .with_max_level(LevelFilter::Info)); + + let flag_value_result = FLAG_VAL_MAP + .as_ref() + .map_err(|err| format!("failed to get flag val map: \{err}")) + .and_then(|flag_val_map| \{ + PACKAGE_OFFSET + .as_ref() + .map_err(|err| format!("failed to get package read offset: \{err}")) + .and_then(|package_offset| \{ + match package_offset \{ + Some(offset) => \{ + get_boolean_flag_value(&flag_val_map, offset + {flag.flag_offset}) + .map_err(|err| format!("failed to get flag: \{err}")) + }, + None => \{ + log!(Level::Error, "no context found for package {package}"); + Ok({flag.default_value}) } - }) - }); + } + }) + }); - match aconfig_storage_result \{ - Ok(storage_result) if storage_result == result => \{ - log!(Level::Info, "AconfigTestMission1: success! flag '{flag.name}' contained correct value. Legacy storage was \{result}, new storage was \{storage_result}"); - }, - Ok(storage_result) => \{ - log!(Level::Error, "AconfigTestMission1: error: mismatch for flag '{flag.name}'. Legacy storage was \{result}, new storage was \{storage_result}"); - }, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: \{err}") - } + match flag_value_result \{ + Ok(flag_value) => \{ + return flag_value; + }, + Err(err) => \{ + log!(Level::Error, "aconfig_rust_codegen: error: \{err}"); + panic!("failed to read flag value: \{err}"); } } + } else \{ + flags_rust::GetServerConfigurableFlag( + "aconfig_flags.{flag.device_config_namespace}", + "{flag.device_config_flag}", + "{flag.default_value}") == "true" + } - result - }; - {{ else }} - static ref CACHED_{flag.name}: bool = flags_rust::GetServerConfigurableFlag( - "aconfig_flags.{flag.device_config_namespace}", - "{flag.device_config_flag}", - "{flag.default_value}") == "true"; - {{ endif }} - {{ -endif }} +}); +{{ else }} +static CACHED_{flag.name}: LazyLock<bool> = LazyLock::new(|| flags_rust::GetServerConfigurableFlag( + "aconfig_flags.{flag.device_config_namespace}", + "{flag.device_config_flag}", + "{flag.default_value}") == "true"); +{{ endif }} +{{ -endif }} {{ -endfor }} -} {{ -endif }} impl FlagProvider \{ @@ -107,73 +107,11 @@ pub static PROVIDER: FlagProvider = FlagProvider; {{ for flag in template_flags }} /// query flag {flag.name} #[inline(always)] -{{ -if flag.readwrite }} pub fn {flag.name}() -> bool \{ +{{ -if flag.readwrite }} PROVIDER.{flag.name}() {{ -else }} -pub fn {flag.name}() -> bool \{ - {{ if not allow_instrumentation }} {flag.default_value} - {{ else }} - - let result = {flag.default_value}; - if !Path::new(STORAGE_MIGRATION_MARKER_FILE).exists() \{ - return result; - } - - // This will be called multiple times. Subsequent calls after the first - // are noops. - logger::init( - logger::Config::default() - .with_tag_on_device(MIGRATION_LOG_TAG) - .with_max_level(LevelFilter::Info), - ); - - unsafe \{ - let package_map = match get_mapped_storage_file("{flag.container}", StorageFileType::PackageMap) \{ - Ok(file) => file, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - - let package_read_context = match get_package_read_context(&package_map, "{package}") \{ - Ok(Some(context)) => context, - Ok(None) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': did not get context"); - return result; - }, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - let flag_val_map = match get_mapped_storage_file("{flag.container}", StorageFileType::FlagVal) \{ - Ok(val_map) => val_map, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - let value = match get_boolean_flag_value(&flag_val_map, {flag.flag_offset} + package_read_context.boolean_start_index) \{ - Ok(val) => val, - Err(err) => \{ - log!(Level::Error, "AconfigTestMission1: error: failed to read flag '{flag.name}': \{err}"); - return result; - } - }; - - if result != value \{ - log!(Level::Error, "AconfigTestMission1: error: flag mismatch for '{flag.name}'. Legacy storage was \{result}, new storage was \{value}"); - } else \{ - let default_value = {flag.default_value}; - log!(Level::Info, "AconfigTestMission1: success! flag '{flag.name}' contained correct value. Legacy storage was \{default_value}, new storage was \{value}"); - } - } - - result - {{ endif }} {{ -endif }} } {{ endfor }} diff --git a/tools/aconfig/aconfig_device_paths/Android.bp b/tools/aconfig/aconfig_device_paths/Android.bp index 2d943de672..bdf96ed896 100644 --- a/tools/aconfig/aconfig_device_paths/Android.bp +++ b/tools/aconfig/aconfig_device_paths/Android.bp @@ -39,8 +39,8 @@ rust_library { genrule { name: "libaconfig_java_device_paths_src", - srcs: ["src/DevicePathsTemplate.java"], - out: ["DevicePaths.java"], + srcs: ["src/DeviceProtosTemplate.java"], + out: ["DeviceProtos.java"], tool_files: ["partition_aconfig_flags_paths.txt"], cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out)", } @@ -48,5 +48,28 @@ genrule { java_library { name: "aconfig_device_paths_java", srcs: [":libaconfig_java_device_paths_src"], - sdk_version: "core_current", + static_libs: [ + "libaconfig_java_proto_nano", + ], + sdk_version: "core_platform", + apex_available: [ + "//apex_available:platform", + ], +} + +genrule { + name: "libaconfig_java_host_device_paths_src", + srcs: ["src/HostDeviceProtosTemplate.java"], + out: ["HostDeviceProtos.java"], + tool_files: [ + "partition_aconfig_flags_paths.txt", + "mainline_aconfig_flags_paths.txt", + ], + cmd: "sed -e '/TEMPLATE/{r$(location partition_aconfig_flags_paths.txt)' -e 'd}' $(in) > $(out).tmp && " + + "sed -e '/MAINLINE_T/{r$(location mainline_aconfig_flags_paths.txt)' -e 'd}' $(out).tmp > $(out)", +} + +java_library_host { + name: "aconfig_host_device_paths_java", + srcs: [":libaconfig_java_host_device_paths_src"], } diff --git a/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt new file mode 100644 index 0000000000..af73a842b9 --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/mainline_aconfig_flags_paths.txt @@ -0,0 +1,20 @@ +"/apex/com.android.adservices/etc/aconfig_flags.pb", +"/apex/com.android.appsearch/etc/aconfig_flags.pb", +"/apex/com.android.art/etc/aconfig_flags.pb", +"/apex/com.android.btservices/etc/aconfig_flags.pb", +"/apex/com.android.cellbroadcast/etc/aconfig_flags.pb", +"/apex/com.android.configinfrastructure/etc/aconfig_flags.pb", +"/apex/com.android.conscrypt/etc/aconfig_flags.pb", +"/apex/com.android.devicelock/etc/aconfig_flags.pb", +"/apex/com.android.healthfitness/etc/aconfig_flags.pb", +"/apex/com.android.ipsec/etc/aconfig_flags.pb", +"/apex/com.android.media/etc/aconfig_flags.pb", +"/apex/com.android.mediaprovider/etc/aconfig_flags.pb", +"/apex/com.android.ondevicepersonalization/etc/aconfig_flags.pb", +"/apex/com.android.os.statsd/etc/aconfig_flags.pb", +"/apex/com.android.permission/etc/aconfig_flags.pb", +"/apex/com.android.profiling/etc/aconfig_flags.pb", +"/apex/com.android.tethering/etc/aconfig_flags.pb", +"/apex/com.android.uwb/etc/aconfig_flags.pb", +"/apex/com.android.virt/etc/aconfig_flags.pb", +"/apex/com.android.wifi/etc/aconfig_flags.pb", diff --git a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt index 140cd21ac8..e997e3ddfa 100644 --- a/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt +++ b/tools/aconfig/aconfig_device_paths/partition_aconfig_flags_paths.txt @@ -1,4 +1,3 @@ "/system/etc/aconfig_flags.pb", -"/system_ext/etc/aconfig_flags.pb", "/product/etc/aconfig_flags.pb", "/vendor/etc/aconfig_flags.pb", diff --git a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java index 16355a33f2..4d4119947f 100644 --- a/tools/aconfig/aconfig_device_paths/src/DevicePathsTemplate.java +++ b/tools/aconfig/aconfig_device_paths/src/DeviceProtosTemplate.java @@ -15,7 +15,12 @@ */ package android.aconfig; +import android.aconfig.nano.Aconfig.parsed_flag; +import android.aconfig.nano.Aconfig.parsed_flags; + import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -23,14 +28,38 @@ import java.util.List; /** * @hide */ -public class DevicePaths { - static final String[] PATHS = { +public class DeviceProtos { + public static final String[] PATHS = { TEMPLATE }; private static final String APEX_DIR = "/apex"; private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb"; + /** + * Returns a list of all on-device aconfig protos. + * + * May throw an exception if the protos can't be read at the call site. For + * example, some of the protos are in the apex/ partition, which is mounted + * somewhat late in the boot process. + * + * @throws IOException if we can't read one of the protos yet + * @return a list of all on-device aconfig protos + */ + public static List<parsed_flag> loadAndParseFlagProtos() throws IOException { + ArrayList<parsed_flag> result = new ArrayList(); + + for (String path : parsedFlagsProtoPaths()) { + try (FileInputStream inputStream = new FileInputStream(path)) { + parsed_flags parsedFlags = parsed_flags.parseFrom(inputStream.readAllBytes()); + for (parsed_flag flag : parsedFlags.parsedFlag) { + result.add(flag); + } + } + } + + return result; + } /** * Returns the list of all on-device aconfig protos paths. diff --git a/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java new file mode 100644 index 0000000000..e7d0a76a8a --- /dev/null +++ b/tools/aconfig/aconfig_device_paths/src/HostDeviceProtosTemplate.java @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package android.aconfig; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * A host lib that can read all aconfig proto file paths on a given device. + * This lib is only available on device with root access (userdebug/eng). + */ +public class HostDeviceProtos { + /** + * An interface that executes ADB command and return the result. + */ + public static interface AdbCommandExecutor { + /** Executes the ADB command. */ + String executeAdbCommand(String command); + } + + static final String[] PATHS = { + TEMPLATE + }; + + static final String[] MAINLINE_PATHS = { + MAINLINE_T + }; + + private static final String APEX_DIR = "/apex"; + private static final String RECURSIVELY_LIST_APEX_DIR_COMMAND = + "shell su 0 find /apex | grep aconfig_flags"; + private static final String APEX_ACONFIG_PATH_SUFFIX = "/etc/aconfig_flags.pb"; + + + /** + * Returns the list of all on-device aconfig proto paths from host side. + */ + public static List<String> parsedFlagsProtoPaths(AdbCommandExecutor adbCommandExecutor) { + ArrayList<String> paths = new ArrayList(Arrays.asList(PATHS)); + + String adbCommandOutput = adbCommandExecutor.executeAdbCommand( + RECURSIVELY_LIST_APEX_DIR_COMMAND); + + if (adbCommandOutput == null || adbCommandOutput.isEmpty()) { + paths.addAll(Arrays.asList(MAINLINE_PATHS)); + return paths; + } + + Set<String> allFiles = new HashSet<>(Arrays.asList(adbCommandOutput.split("\n"))); + + Set<String> subdirs = allFiles.stream().map(file -> { + String[] filePaths = file.split("/"); + // The first element is "", the second element is "apex". + return filePaths.length > 2 ? filePaths[2] : ""; + }).collect(Collectors.toSet()); + + for (String prefix : subdirs) { + // For each mainline modules, there are two directories, one <modulepackage>/, + // and one <modulepackage>@<versioncode>/. Just read the former. + if (prefix.contains("@")) { + continue; + } + + String protoPath = APEX_DIR + "/" + prefix + APEX_ACONFIG_PATH_SUFFIX; + if (allFiles.contains(protoPath)) { + paths.add(protoPath); + } + } + return paths; + } +} diff --git a/tools/aconfig/aconfig_device_paths/src/lib.rs b/tools/aconfig/aconfig_device_paths/src/lib.rs index 9ab9cea267..8871b4f8ac 100644 --- a/tools/aconfig/aconfig_device_paths/src/lib.rs +++ b/tools/aconfig/aconfig_device_paths/src/lib.rs @@ -62,13 +62,12 @@ mod tests { #[test] fn test_read_partition_paths() { - assert_eq!(read_partition_paths().len(), 4); + assert_eq!(read_partition_paths().len(), 3); assert_eq!( read_partition_paths(), vec![ PathBuf::from("/system/etc/aconfig_flags.pb"), - PathBuf::from("/system_ext/etc/aconfig_flags.pb"), PathBuf::from("/product/etc/aconfig_flags.pb"), PathBuf::from("/vendor/etc/aconfig_flags.pb") ] diff --git a/tools/aconfig/aconfig_flags/Android.bp b/tools/aconfig/aconfig_flags/Android.bp new file mode 100644 index 0000000000..4c1fd4efcf --- /dev/null +++ b/tools/aconfig/aconfig_flags/Android.bp @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +rust_library { + name: "libaconfig_flags", + crate_name: "aconfig_flags", + srcs: [ + "src/lib.rs", + ], + rustlibs: [ + "libaconfig_flags_rust", + ], + host_supported: true, +} + +aconfig_declarations { + name: "aconfig_flags", + package: "com.android.aconfig.flags", + container: "system", + srcs: ["flags.aconfig"], +} + +rust_aconfig_library { + name: "libaconfig_flags_rust", + crate_name: "aconfig_flags_rust", + aconfig_declarations: "aconfig_flags", + host_supported: true, +} + +cc_aconfig_library { + name: "libaconfig_flags_cc", + aconfig_declarations: "aconfig_flags", +} + +java_aconfig_library { + name: "aconfig_flags_java", + aconfig_declarations: "aconfig_flags", +} diff --git a/tools/aconfig/aconfig_flags/Cargo.toml b/tools/aconfig/aconfig_flags/Cargo.toml new file mode 100644 index 0000000000..6eb9f14058 --- /dev/null +++ b/tools/aconfig/aconfig_flags/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "aconfig_flags" +version = "0.1.0" +edition = "2021" + +[features] +default = ["cargo"] +cargo = [] + +[dependencies]
\ No newline at end of file diff --git a/tools/aconfig/aconfig_flags/flags.aconfig b/tools/aconfig/aconfig_flags/flags.aconfig new file mode 100644 index 0000000000..0a004ca4e1 --- /dev/null +++ b/tools/aconfig/aconfig_flags/flags.aconfig @@ -0,0 +1,16 @@ +package: "com.android.aconfig.flags" +container: "system" + +flag { + name: "enable_only_new_storage" + namespace: "core_experiments_team_internal" + bug: "312235596" + description: "When enabled, aconfig flags are read from the new aconfig storage only." +} + +flag { + name: "enable_aconfigd_from_mainline" + namespace: "core_experiments_team_internal" + bug: "369808805" + description: "When enabled, launch aconfigd from config infra module." +} diff --git a/tools/aconfig/aconfig_flags/src/lib.rs b/tools/aconfig/aconfig_flags/src/lib.rs new file mode 100644 index 0000000000..2e891273ed --- /dev/null +++ b/tools/aconfig/aconfig_flags/src/lib.rs @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//! `aconfig_flags` is a crate for reading aconfig flags from Rust +// When building with the Android tool-chain +// +// - the flag functions will read from aconfig_flags_inner +// - the feature "cargo" will be disabled +// +// When building with cargo +// +// - the flag functions will all return some trivial value, like true +// - the feature "cargo" will be enabled +// +// This module hides these differences from the rest of aconfig. + +/// Module used when building with the Android tool-chain +#[cfg(not(feature = "cargo"))] +pub mod auto_generated { + /// Returns the value for the enable_only_new_storage flag. + pub fn enable_only_new_storage() -> bool { + aconfig_flags_rust::enable_only_new_storage() + } + + /// Returns the value for the enable_aconfigd_from_mainline flag. + pub fn enable_aconfigd_from_mainline() -> bool { + aconfig_flags_rust::enable_only_new_storage() + } +} + +/// Module used when building with cargo +#[cfg(feature = "cargo")] +pub mod auto_generated { + /// Returns a placeholder value for the enable_only_new_storage flag. + pub fn enable_only_new_storage() -> bool { + // Used only to enable typechecking and testing with cargo + true + } + + /// Returns a placeholder value for the enable_aconfigd_from_mainline flag. + pub fn enable_aconfigd_from_mainline() -> bool { + // Used only to enable typechecking and testing with cargo + true + } +} diff --git a/tools/aconfig/aconfig_storage_file/Android.bp b/tools/aconfig/aconfig_storage_file/Android.bp index 38591947c1..e875c7be6a 100644 --- a/tools/aconfig/aconfig_storage_file/Android.bp +++ b/tools/aconfig/aconfig_storage_file/Android.bp @@ -14,6 +14,7 @@ rust_defaults { "libclap", "libcxx", "libaconfig_storage_protos", + "libserde", ], } @@ -36,7 +37,10 @@ rust_binary_host { name: "aconfig-storage", defaults: ["aconfig_storage_file.defaults"], srcs: ["src/main.rs"], - rustlibs: ["libaconfig_storage_file"], + rustlibs: [ + "libaconfig_storage_file", + "libserde_json", + ], } rust_test_host { @@ -138,11 +142,28 @@ cc_library { double_loadable: true, } -// storage file parse api java cc_library +// storage file parse api java library java_library { name: "aconfig_storage_file_java", srcs: [ "srcs/**/*.java", ], sdk_version: "core_current", -}
\ No newline at end of file + min_sdk_version: "29", + host_supported: true, + apex_available: [ + "//apex_available:platform", + "//apex_available:anyapex", + ], +} + +// storage file parse api java library for core library +java_library { + name: "aconfig_storage_file_java_none", + srcs: [ + "srcs/**/*.java", + ], + sdk_version: "none", + system_modules: "core-all-system-modules", + host_supported: true, +} diff --git a/tools/aconfig/aconfig_storage_file/Cargo.toml b/tools/aconfig/aconfig_storage_file/Cargo.toml index 192dfad40a..a40557803f 100644 --- a/tools/aconfig/aconfig_storage_file/Cargo.toml +++ b/tools/aconfig/aconfig_storage_file/Cargo.toml @@ -14,6 +14,8 @@ tempfile = "3.9.0" thiserror = "1.0.56" clap = { version = "4.1.8", features = ["derive"] } cxx = "1.0" +serde = { version = "1.0.152", features = ["derive"] } +serde_json = "1.0.93" [[bin]] name = "aconfig-storage" diff --git a/tools/aconfig/aconfig_storage_file/src/flag_info.rs b/tools/aconfig/aconfig_storage_file/src/flag_info.rs index beac38d156..f090396901 100644 --- a/tools/aconfig/aconfig_storage_file/src/flag_info.rs +++ b/tools/aconfig/aconfig_storage_file/src/flag_info.rs @@ -20,10 +20,11 @@ use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes}; use crate::{AconfigStorageError, StorageFileType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Flag info header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagInfoHeader { pub version: u32, pub container: String, @@ -89,7 +90,7 @@ impl FlagInfoHeader { } /// bit field for flag info -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum FlagInfoBit { HasServerOverride = 1 << 0, IsReadWrite = 1 << 1, @@ -97,7 +98,7 @@ pub enum FlagInfoBit { } /// Flag info node struct -#[derive(PartialEq, Clone)] +#[derive(PartialEq, Clone, Serialize, Deserialize)] pub struct FlagInfoNode { pub attributes: u8, } @@ -138,7 +139,7 @@ impl FlagInfoNode { } /// Flag info list struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagInfoList { pub header: FlagInfoHeader, pub nodes: Vec<FlagInfoNode>, diff --git a/tools/aconfig/aconfig_storage_file/src/flag_table.rs b/tools/aconfig/aconfig_storage_file/src/flag_table.rs index 64b90eabfa..0588fe5039 100644 --- a/tools/aconfig/aconfig_storage_file/src/flag_table.rs +++ b/tools/aconfig/aconfig_storage_file/src/flag_table.rs @@ -23,10 +23,11 @@ use crate::{ }; use crate::{AconfigStorageError, StorageFileType, StoredFlagType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Flag table header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagTableHeader { pub version: u32, pub container: String, @@ -95,7 +96,7 @@ impl FlagTableHeader { } /// Flag table node struct -#[derive(PartialEq, Clone)] +#[derive(PartialEq, Clone, Serialize, Deserialize)] pub struct FlagTableNode { pub package_id: u32, pub flag_name: String, @@ -150,11 +151,11 @@ impl FlagTableNode { /// Calculate node bucket index pub fn find_bucket_index(package_id: u32, flag_name: &str, num_buckets: u32) -> u32 { let full_flag_name = package_id.to_string() + "/" + flag_name; - get_bucket_index(&full_flag_name, num_buckets) + get_bucket_index(full_flag_name.as_bytes(), num_buckets) } } -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagTable { pub header: FlagTableHeader, pub buckets: Vec<Option<u32>>, diff --git a/tools/aconfig/aconfig_storage_file/src/flag_value.rs b/tools/aconfig/aconfig_storage_file/src/flag_value.rs index 506924b339..b64c10ecdd 100644 --- a/tools/aconfig/aconfig_storage_file/src/flag_value.rs +++ b/tools/aconfig/aconfig_storage_file/src/flag_value.rs @@ -20,10 +20,11 @@ use crate::{read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes}; use crate::{AconfigStorageError, StorageFileType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Flag value header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagValueHeader { pub version: u32, pub container: String, @@ -89,7 +90,7 @@ impl FlagValueHeader { } /// Flag value list struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct FlagValueList { pub header: FlagValueHeader, pub booleans: Vec<bool>, diff --git a/tools/aconfig/aconfig_storage_file/src/lib.rs b/tools/aconfig/aconfig_storage_file/src/lib.rs index 26e9c1a3be..1d92ba49ab 100644 --- a/tools/aconfig/aconfig_storage_file/src/lib.rs +++ b/tools/aconfig/aconfig_storage_file/src/lib.rs @@ -37,26 +37,33 @@ pub mod flag_table; pub mod flag_value; pub mod package_table; pub mod protos; +pub mod sip_hasher13; pub mod test_utils; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::cmp::Ordering; -use std::collections::hash_map::DefaultHasher; use std::fs::File; -use std::hash::{Hash, Hasher}; +use std::hash::Hasher; use std::io::Read; pub use crate::flag_info::{FlagInfoBit, FlagInfoHeader, FlagInfoList, FlagInfoNode}; pub use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode}; pub use crate::flag_value::{FlagValueHeader, FlagValueList}; pub use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode}; +pub use crate::sip_hasher13::SipHasher13; use crate::AconfigStorageError::{ BytesParseFail, HashTableSizeLimit, InvalidFlagValueType, InvalidStoredFlagType, }; -/// Storage file version -pub const FILE_VERSION: u32 = 1; +/// The max storage file version from which we can safely read/write. May be +/// experimental. +pub const MAX_SUPPORTED_FILE_VERSION: u32 = 2; + +/// The newest fully-released version. Unless otherwise specified, this is the +/// version we will write. +pub const DEFAULT_FILE_VERSION: u32 = 1; /// Good hash table prime number pub(crate) const HASH_PRIMES: [u32; 29] = [ @@ -106,7 +113,7 @@ impl TryFrom<u8> for StorageFileType { /// Flag type enum as stored by storage file /// ONLY APPEND, NEVER REMOVE FOR BACKWARD COMPATIBILITY. THE MAX IS U16. -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum StoredFlagType { ReadWriteBoolean = 0, ReadOnlyBoolean = 1, @@ -211,10 +218,12 @@ pub fn get_table_size(entries: u32) -> Result<u32, AconfigStorageError> { } /// Get the corresponding bucket index given the key and number of buckets -pub(crate) fn get_bucket_index<T: Hash>(val: &T, num_buckets: u32) -> u32 { - let mut s = DefaultHasher::new(); - val.hash(&mut s); - (s.finish() % num_buckets as u64) as u32 +pub(crate) fn get_bucket_index(val: &[u8], num_buckets: u32) -> u32 { + let mut s = SipHasher13::new(); + s.write(val); + s.write_u8(0xff); + let ret = (s.finish() % num_buckets as u64) as u32; + ret } /// Read and parse bytes as u8 @@ -240,6 +249,11 @@ pub(crate) fn read_u16_from_bytes( Ok(val) } +/// Read and parse the first 4 bytes of buf as u32. +pub fn read_u32_from_start_of_bytes(buf: &[u8]) -> Result<u32, AconfigStorageError> { + read_u32_from_bytes(buf, &mut 0) +} + /// Read and parse bytes as u32 pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32, AconfigStorageError> { let val = @@ -250,6 +264,16 @@ pub fn read_u32_from_bytes(buf: &[u8], head: &mut usize) -> Result<u32, AconfigS Ok(val) } +// Read and parse bytes as u64 +pub fn read_u64_from_bytes(buf: &[u8], head: &mut usize) -> Result<u64, AconfigStorageError> { + let val = + u64::from_le_bytes(buf[*head..*head + 8].try_into().map_err(|errmsg| { + BytesParseFail(anyhow!("fail to parse u64 from bytes: {}", errmsg)) + })?); + *head += 8; + Ok(val) +} + /// Read and parse bytes as string pub(crate) fn read_str_from_bytes( buf: &[u8], diff --git a/tools/aconfig/aconfig_storage_file/src/main.rs b/tools/aconfig/aconfig_storage_file/src/main.rs index 8b9e38da02..a9cfd19066 100644 --- a/tools/aconfig/aconfig_storage_file/src/main.rs +++ b/tools/aconfig/aconfig_storage_file/src/main.rs @@ -20,9 +20,29 @@ use aconfig_storage_file::{ list_flags, list_flags_with_info, read_file_to_bytes, AconfigStorageError, FlagInfoList, FlagTable, FlagValueList, PackageTable, StorageFileType, }; - use clap::{builder::ArgAction, Arg, Command}; +use serde::Serialize; +use serde_json; +use std::fmt; +use std::fs; +use std::fs::File; +use std::io::Write; +/** + * Usage Examples + * + * Print file: + * $ aconfig-storage print --file=path/to/flag.map --type=flag_map + * + * List flags: + * $ aconfig-storage list --flag-map=path/to/flag.map \ + * --flag-val=path/to/flag.val --package-map=path/to/package.map + * + * Write binary file for testing: + * $ aconfig-storage print --file=path/to/flag.map --type=flag_map --format=json > flag_map.json + * $ vim flag_map.json // Manually make updates + * $ aconfig-storage write-bytes --input-file=flag_map.json --output-file=path/to/flag.map --type=flag_map + */ fn cli() -> Command { Command::new("aconfig-storage") .subcommand_required(true) @@ -34,7 +54,8 @@ fn cli() -> Command { .long("type") .required(true) .value_parser(|s: &str| StorageFileType::try_from(s)), - ), + ) + .arg(Arg::new("format").long("format").required(false).action(ArgAction::Set)), ) .subcommand( Command::new("list") @@ -50,41 +71,75 @@ fn cli() -> Command { Arg::new("flag-info").long("flag-info").required(false).action(ArgAction::Set), ), ) + .subcommand( + Command::new("write-bytes") + // Where to write the output bytes. Suggest to use the StorageFileType names (e.g. flag.map). + .arg( + Arg::new("output-file") + .long("output-file") + .required(true) + .action(ArgAction::Set), + ) + // Input file should be json. + .arg( + Arg::new("input-file").long("input-file").required(true).action(ArgAction::Set), + ) + .arg( + Arg::new("type") + .long("type") + .required(true) + .value_parser(|s: &str| StorageFileType::try_from(s)), + ), + ) } fn print_storage_file( file_path: &str, file_type: &StorageFileType, + as_json: bool, ) -> Result<(), AconfigStorageError> { let bytes = read_file_to_bytes(file_path)?; match file_type { StorageFileType::PackageMap => { let package_table = PackageTable::from_bytes(&bytes)?; - println!("{:?}", package_table); + println!("{}", to_print_format(package_table, as_json)); } StorageFileType::FlagMap => { let flag_table = FlagTable::from_bytes(&bytes)?; - println!("{:?}", flag_table); + println!("{}", to_print_format(flag_table, as_json)); } StorageFileType::FlagVal => { let flag_value = FlagValueList::from_bytes(&bytes)?; - println!("{:?}", flag_value); + println!("{}", to_print_format(flag_value, as_json)); } StorageFileType::FlagInfo => { let flag_info = FlagInfoList::from_bytes(&bytes)?; - println!("{:?}", flag_info); + println!("{}", to_print_format(flag_info, as_json)); } } Ok(()) } +fn to_print_format<T>(file_contents: T, as_json: bool) -> String +where + T: Serialize + fmt::Debug, +{ + if as_json { + serde_json::to_string(&file_contents).unwrap() + } else { + format!("{:?}", file_contents) + } +} + fn main() -> Result<(), AconfigStorageError> { let matches = cli().get_matches(); match matches.subcommand() { Some(("print", sub_matches)) => { let file_path = sub_matches.get_one::<String>("file").unwrap(); let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap(); - print_storage_file(file_path, file_type)? + let format = sub_matches.get_one::<String>("format"); + let as_json: bool = format == Some(&"json".to_string()); + print_storage_file(file_path, file_type, as_json)? } Some(("list", sub_matches)) => { let package_map = sub_matches.get_one::<String>("package-map").unwrap(); @@ -96,10 +151,10 @@ fn main() -> Result<(), AconfigStorageError> { let flags = list_flags_with_info(package_map, flag_map, flag_val, info_file)?; for flag in flags.iter() { println!( - "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}", - flag.package_name, flag.flag_name, flag.flag_value, flag.value_type, - flag.is_readwrite, flag.has_server_override, flag.has_local_override, - ); + "{} {} {} {:?} IsReadWrite: {}, HasServerOverride: {}, HasLocalOverride: {}", + flag.package_name, flag.flag_name, flag.flag_value, flag.value_type, + flag.is_readwrite, flag.has_server_override, flag.has_local_override, + ); } } None => { @@ -113,6 +168,40 @@ fn main() -> Result<(), AconfigStorageError> { } } } + // Converts JSON of the file into raw bytes (as is used on-device). + // Intended to generate/easily update these files for testing. + Some(("write-bytes", sub_matches)) => { + let input_file_path = sub_matches.get_one::<String>("input-file").unwrap(); + let input_json = fs::read_to_string(input_file_path).unwrap(); + + let file_type = sub_matches.get_one::<StorageFileType>("type").unwrap(); + let output_bytes: Vec<u8>; + match file_type { + StorageFileType::FlagVal => { + let list: FlagValueList = serde_json::from_str(&input_json).unwrap(); + output_bytes = list.into_bytes(); + } + StorageFileType::FlagInfo => { + let list: FlagInfoList = serde_json::from_str(&input_json).unwrap(); + output_bytes = list.into_bytes(); + } + StorageFileType::FlagMap => { + let table: FlagTable = serde_json::from_str(&input_json).unwrap(); + output_bytes = table.into_bytes(); + } + StorageFileType::PackageMap => { + let table: PackageTable = serde_json::from_str(&input_json).unwrap(); + output_bytes = table.into_bytes(); + } + } + + let output_file_path = sub_matches.get_one::<String>("output-file").unwrap(); + let file = File::create(output_file_path); + if file.is_err() { + panic!("can't make file"); + } + let _ = file.unwrap().write_all(&output_bytes); + } _ => unreachable!(), } Ok(()) diff --git a/tools/aconfig/aconfig_storage_file/src/package_table.rs b/tools/aconfig/aconfig_storage_file/src/package_table.rs index b734972f33..af39fbc783 100644 --- a/tools/aconfig/aconfig_storage_file/src/package_table.rs +++ b/tools/aconfig/aconfig_storage_file/src/package_table.rs @@ -17,13 +17,17 @@ //! package table module defines the package table file format and methods for serialization //! and deserialization -use crate::{get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u8_from_bytes}; +use crate::{ + get_bucket_index, read_str_from_bytes, read_u32_from_bytes, read_u64_from_bytes, + read_u8_from_bytes, +}; use crate::{AconfigStorageError, StorageFileType}; use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use std::fmt; /// Package table header struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct PackageTableHeader { pub version: u32, pub container: String, @@ -92,7 +96,7 @@ impl PackageTableHeader { } /// Package table node struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct PackageTableNode { pub package_name: String, pub package_id: u32, @@ -116,7 +120,16 @@ impl fmt::Debug for PackageTableNode { impl PackageTableNode { /// Serialize to bytes - pub fn into_bytes(&self) -> Vec<u8> { + pub fn into_bytes(&self, version: u32) -> Vec<u8> { + match version { + 1 => Self::into_bytes_v1(self), + 2 => Self::into_bytes_v2(self), + // TODO(b/316357686): into_bytes should return a Result. + _ => Self::into_bytes_v2(&self), + } + } + + fn into_bytes_v1(&self) -> Vec<u8> { let mut result = Vec::new(); let name_bytes = self.package_name.as_bytes(); result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes()); @@ -127,18 +140,64 @@ impl PackageTableNode { result } - /// Deserialize from bytes - pub fn from_bytes(bytes: &[u8]) -> Result<Self, AconfigStorageError> { + fn into_bytes_v2(&self) -> Vec<u8> { + let mut result = Vec::new(); + let name_bytes = self.package_name.as_bytes(); + result.extend_from_slice(&(name_bytes.len() as u32).to_le_bytes()); + result.extend_from_slice(name_bytes); + result.extend_from_slice(&self.package_id.to_le_bytes()); + // V2 storage files have a fingerprint. Current struct (v1) does not, so + // we write 0. + result.extend_from_slice(&0u64.to_le_bytes()); + result.extend_from_slice(&self.boolean_start_index.to_le_bytes()); + result.extend_from_slice(&self.next_offset.unwrap_or(0).to_le_bytes()); + result + } + + /// Deserialize from bytes based on file version. + pub fn from_bytes(bytes: &[u8], version: u32) -> Result<Self, AconfigStorageError> { + match version { + 1 => Self::from_bytes_v1(bytes), + 2 => Self::from_bytes_v2(bytes), + _ => { + return Err(AconfigStorageError::BytesParseFail(anyhow!( + "Binary file is an unsupported version: {}", + version + ))) + } + } + } + + fn from_bytes_v1(bytes: &[u8]) -> Result<Self, AconfigStorageError> { let mut head = 0; - let node = Self { - package_name: read_str_from_bytes(bytes, &mut head)?, - package_id: read_u32_from_bytes(bytes, &mut head)?, - boolean_start_index: read_u32_from_bytes(bytes, &mut head)?, - next_offset: match read_u32_from_bytes(bytes, &mut head)? { - 0 => None, - val => Some(val), - }, + let package_name = read_str_from_bytes(bytes, &mut head)?; + let package_id = read_u32_from_bytes(bytes, &mut head)?; + let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?; + let next_offset = match read_u32_from_bytes(bytes, &mut head)? { + 0 => None, + val => Some(val), + }; + + let node = Self { package_name, package_id, boolean_start_index, next_offset }; + Ok(node) + } + + fn from_bytes_v2(bytes: &[u8]) -> Result<Self, AconfigStorageError> { + let mut head = 0; + let package_name = read_str_from_bytes(bytes, &mut head)?; + let package_id = read_u32_from_bytes(bytes, &mut head)?; + + // Fingerprint is unused in the current struct (v1), but we need to read + // the bytes if the storage file type is v2 or else the subsequent + // fields will be inaccurate. + let _fingerprint = read_u64_from_bytes(bytes, &mut head)?; + let boolean_start_index = read_u32_from_bytes(bytes, &mut head)?; + let next_offset = match read_u32_from_bytes(bytes, &mut head)? { + 0 => None, + val => Some(val), }; + + let node = Self { package_name, package_id, boolean_start_index, next_offset }; Ok(node) } @@ -146,12 +205,12 @@ impl PackageTableNode { /// construction side (aconfig binary) and consumption side (flag read lib) /// use the same method of hashing pub fn find_bucket_index(package: &str, num_buckets: u32) -> u32 { - get_bucket_index(&package, num_buckets) + get_bucket_index(package.as_bytes(), num_buckets) } } /// Package table struct -#[derive(PartialEq)] +#[derive(PartialEq, Serialize, Deserialize)] pub struct PackageTable { pub header: PackageTableHeader, pub buckets: Vec<Option<u32>>, @@ -179,7 +238,11 @@ impl PackageTable { [ self.header.into_bytes(), self.buckets.iter().map(|v| v.unwrap_or(0).to_le_bytes()).collect::<Vec<_>>().concat(), - self.nodes.iter().map(|v| v.into_bytes()).collect::<Vec<_>>().concat(), + self.nodes + .iter() + .map(|v| v.into_bytes(self.header.version)) + .collect::<Vec<_>>() + .concat(), ] .concat() } @@ -198,8 +261,8 @@ impl PackageTable { .collect(); let nodes = (0..num_packages) .map(|_| { - let node = PackageTableNode::from_bytes(&bytes[head..])?; - head += node.into_bytes().len(); + let node = PackageTableNode::from_bytes(&bytes[head..], header.version)?; + head += node.into_bytes(header.version).len(); Ok(node) }) .collect::<Result<Vec<_>, AconfigStorageError>>() @@ -218,7 +281,8 @@ impl PackageTable { #[cfg(test)] mod tests { use super::*; - use crate::test_utils::create_test_package_table; + use crate::read_u32_from_start_of_bytes; + use crate::{test_utils::create_test_package_table, DEFAULT_FILE_VERSION}; #[test] // this test point locks down the table serialization @@ -231,7 +295,9 @@ mod tests { let nodes: &Vec<PackageTableNode> = &package_table.nodes; for node in nodes.iter() { - let reinterpreted_node = PackageTableNode::from_bytes(&node.into_bytes()).unwrap(); + let reinterpreted_node = + PackageTableNode::from_bytes(&node.into_bytes(header.version), header.version) + .unwrap(); assert_eq!(node, &reinterpreted_node); } @@ -248,9 +314,36 @@ mod tests { fn test_version_number() { let package_table = create_test_package_table(); let bytes = &package_table.into_bytes(); - let mut head = 0; - let version = read_u32_from_bytes(bytes, &mut head).unwrap(); - assert_eq!(version, 1); + let version = read_u32_from_start_of_bytes(bytes).unwrap(); + assert_eq!(version, DEFAULT_FILE_VERSION); + } + + #[test] + fn test_round_trip_v1() { + let table_v1: PackageTable = create_test_package_table(); + let table_bytes_v1 = table_v1.into_bytes(); + + // Will automatically read from version 2 as the version code is encoded + // into the bytes. + let reinterpreted_table = PackageTable::from_bytes(&table_bytes_v1).unwrap(); + + assert_eq!(table_v1, reinterpreted_table); + } + + #[test] + fn test_round_trip_v2() { + // Have to fake v2 because though we will set the version to v2 + // and write the bytes as v2, we don't have the ability to actually set + // the fingerprint yet. + let mut fake_table_v2 = create_test_package_table(); + fake_table_v2.header.version = 2; + let table_bytes_v2 = fake_table_v2.into_bytes(); + + // Will automatically read from version 2 as the version code is encoded + // into the bytes. + let reinterpreted_table = PackageTable::from_bytes(&table_bytes_v2).unwrap(); + + assert_eq!(fake_table_v2, reinterpreted_table); } #[test] diff --git a/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs new file mode 100644 index 0000000000..9be3175e18 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/src/sip_hasher13.rs @@ -0,0 +1,327 @@ +/* + * Copyright (C) 2023 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//! An implementation of SipHash13 + +use std::cmp; +use std::mem; +use std::ptr; +use std::slice; + +use std::hash::Hasher; + +/// An implementation of SipHash 2-4. +/// +#[derive(Debug, Clone, Default)] +pub struct SipHasher13 { + k0: u64, + k1: u64, + length: usize, // how many bytes we've processed + state: State, // hash State + tail: u64, // unprocessed bytes le + ntail: usize, // how many bytes in tail are valid +} + +#[derive(Debug, Clone, Copy, Default)] +#[repr(C)] +struct State { + // v0, v2 and v1, v3 show up in pairs in the algorithm, + // and simd implementations of SipHash will use vectors + // of v02 and v13. By placing them in this order in the struct, + // the compiler can pick up on just a few simd optimizations by itself. + v0: u64, + v2: u64, + v1: u64, + v3: u64, +} + +macro_rules! compress { + ($state:expr) => {{ + compress!($state.v0, $state.v1, $state.v2, $state.v3) + }}; + ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{ + $v0 = $v0.wrapping_add($v1); + $v1 = $v1.rotate_left(13); + $v1 ^= $v0; + $v0 = $v0.rotate_left(32); + $v2 = $v2.wrapping_add($v3); + $v3 = $v3.rotate_left(16); + $v3 ^= $v2; + $v0 = $v0.wrapping_add($v3); + $v3 = $v3.rotate_left(21); + $v3 ^= $v0; + $v2 = $v2.wrapping_add($v1); + $v1 = $v1.rotate_left(17); + $v1 ^= $v2; + $v2 = $v2.rotate_left(32); + }}; +} + +/// Load an integer of the desired type from a byte stream, in LE order. Uses +/// `copy_nonoverlapping` to let the compiler generate the most efficient way +/// to load it from a possibly unaligned address. +/// +/// Unsafe because: unchecked indexing at i..i+size_of(int_ty) +macro_rules! load_int_le { + ($buf:expr, $i:expr, $int_ty:ident) => {{ + debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); + let mut data = 0 as $int_ty; + ptr::copy_nonoverlapping( + $buf.get_unchecked($i), + &mut data as *mut _ as *mut u8, + mem::size_of::<$int_ty>(), + ); + data.to_le() + }}; +} + +/// Load an u64 using up to 7 bytes of a byte slice. +/// +/// Unsafe because: unchecked indexing at start..start+len +#[inline] +unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { + debug_assert!(len < 8); + let mut i = 0; // current byte index (from LSB) in the output u64 + let mut out = 0; + if i + 3 < len { + out = load_int_le!(buf, start + i, u32) as u64; + i += 4; + } + if i + 1 < len { + out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); + i += 2 + } + if i < len { + out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); + i += 1; + } + debug_assert_eq!(i, len); + out +} + +impl SipHasher13 { + /// Creates a new `SipHasher13` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher13 { + SipHasher13::new_with_keys(0, 0) + } + + /// Creates a `SipHasher13` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { + let mut sip_hasher = SipHasher13 { + k0: key0, + k1: key1, + length: 0, + state: State { v0: 0, v1: 0, v2: 0, v3: 0 }, + tail: 0, + ntail: 0, + }; + sip_hasher.reset(); + sip_hasher + } + + #[inline] + fn c_rounds(state: &mut State) { + compress!(state); + } + + #[inline] + fn d_rounds(state: &mut State) { + compress!(state); + compress!(state); + compress!(state); + } + + #[inline] + fn reset(&mut self) { + self.length = 0; + self.state.v0 = self.k0 ^ 0x736f6d6570736575; + self.state.v1 = self.k1 ^ 0x646f72616e646f6d; + self.state.v2 = self.k0 ^ 0x6c7967656e657261; + self.state.v3 = self.k1 ^ 0x7465646279746573; + self.ntail = 0; + } + + // Specialized write function that is only valid for buffers with len <= 8. + // It's used to force inlining of write_u8 and write_usize, those would normally be inlined + // except for composite types (that includes slices and str hashing because of delimiter). + // Without this extra push the compiler is very reluctant to inline delimiter writes, + // degrading performance substantially for the most common use cases. + #[inline] + fn short_write(&mut self, msg: &[u8]) { + debug_assert!(msg.len() <= 8); + let length = msg.len(); + self.length += length; + + let needed = 8 - self.ntail; + let fill = cmp::min(length, needed); + if fill == 8 { + // safe to call since msg hasn't been loaded + self.tail = unsafe { load_int_le!(msg, 0, u64) }; + } else { + // safe to call since msg hasn't been loaded, and fill <= msg.len() + self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail); + if length < needed { + self.ntail += length; + return; + } + } + self.state.v3 ^= self.tail; + Self::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + + // Buffered tail is now flushed, process new input. + self.ntail = length - needed; + // safe to call since number of `needed` bytes has been loaded + // and self.ntail + needed == msg.len() + self.tail = unsafe { u8to64_le(msg, needed, self.ntail) }; + } +} + +impl Hasher for SipHasher13 { + // see short_write comment for explanation + #[inline] + fn write_usize(&mut self, i: usize) { + // safe to call, since convert the pointer to u8 + let bytes = unsafe { + slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::<usize>()) + }; + self.short_write(bytes); + } + + // see short_write comment for explanation + #[inline] + fn write_u8(&mut self, i: u8) { + self.short_write(&[i]); + } + + #[inline] + fn write(&mut self, msg: &[u8]) { + let length = msg.len(); + self.length += length; + + let mut needed = 0; + + // loading unprocessed byte from last write + if self.ntail != 0 { + needed = 8 - self.ntail; + // safe to call, since msg hasn't been processed + // and cmp::min(length, needed) < 8 + self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail; + if length < needed { + self.ntail += length; + return; + } else { + self.state.v3 ^= self.tail; + Self::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + self.ntail = 0; + } + } + + // Buffered tail is now flushed, process new input. + let len = length - needed; + let left = len & 0x7; + + let mut i = needed; + while i < len - left { + // safe to call since if i < len - left, it means msg has at least 1 byte to load + let mi = unsafe { load_int_le!(msg, i, u64) }; + + self.state.v3 ^= mi; + Self::c_rounds(&mut self.state); + self.state.v0 ^= mi; + + i += 8; + } + + // safe to call since if left == 0, since this call will load nothing + // if left > 0, it means there are number of `left` bytes in msg + self.tail = unsafe { u8to64_le(msg, i, left) }; + self.ntail = left; + } + + #[inline] + fn finish(&self) -> u64 { + let mut state = self.state; + + let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail; + + state.v3 ^= b; + Self::c_rounds(&mut state); + state.v0 ^= b; + + state.v2 ^= 0xff; + Self::d_rounds(&mut state); + + state.v0 ^ state.v1 ^ state.v2 ^ state.v3 + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::hash::{Hash, Hasher}; + use std::string::String; + + #[test] + // this test point locks down the value list serialization + fn test_sip_hash13_string_hash() { + let mut sip_hash13 = SipHasher13::new(); + let test_str1 = String::from("com.google.android.test"); + test_str1.hash(&mut sip_hash13); + assert_eq!(17898838669067067585, sip_hash13.finish()); + + let test_str2 = String::from("adfadfadf adfafadadf 1231241241"); + test_str2.hash(&mut sip_hash13); + assert_eq!(13543518987672889310, sip_hash13.finish()); + } + + #[test] + fn test_sip_hash13_write() { + let mut sip_hash13 = SipHasher13::new(); + let test_str1 = String::from("com.google.android.test"); + sip_hash13.write(test_str1.as_bytes()); + sip_hash13.write_u8(0xff); + assert_eq!(17898838669067067585, sip_hash13.finish()); + + let mut sip_hash132 = SipHasher13::new(); + let test_str1 = String::from("com.google.android.test"); + sip_hash132.write(test_str1.as_bytes()); + assert_eq!(9685440969685209025, sip_hash132.finish()); + sip_hash132.write(test_str1.as_bytes()); + assert_eq!(6719694176662736568, sip_hash132.finish()); + + let mut sip_hash133 = SipHasher13::new(); + let test_str2 = String::from("abcdefg"); + test_str2.hash(&mut sip_hash133); + assert_eq!(2492161047327640297, sip_hash133.finish()); + + let mut sip_hash134 = SipHasher13::new(); + let test_str3 = String::from("abcdefgh"); + test_str3.hash(&mut sip_hash134); + assert_eq!(6689927370435554326, sip_hash134.finish()); + } + + #[test] + fn test_sip_hash13_write_short() { + let mut sip_hash13 = SipHasher13::new(); + sip_hash13.write_u8(0x61); + assert_eq!(4644417185603328019, sip_hash13.finish()); + } +} diff --git a/tools/aconfig/aconfig_storage_file/src/test_utils.rs b/tools/aconfig/aconfig_storage_file/src/test_utils.rs index 106666c47f..5c364f6bc2 100644 --- a/tools/aconfig/aconfig_storage_file/src/test_utils.rs +++ b/tools/aconfig/aconfig_storage_file/src/test_utils.rs @@ -18,7 +18,7 @@ use crate::flag_info::{FlagInfoHeader, FlagInfoList, FlagInfoNode}; use crate::flag_table::{FlagTable, FlagTableHeader, FlagTableNode}; use crate::flag_value::{FlagValueHeader, FlagValueList}; use crate::package_table::{PackageTable, PackageTableHeader, PackageTableNode}; -use crate::{AconfigStorageError, StorageFileType, StoredFlagType}; +use crate::{AconfigStorageError, StorageFileType, StoredFlagType, DEFAULT_FILE_VERSION}; use anyhow::anyhow; use std::io::Write; @@ -26,7 +26,7 @@ use tempfile::NamedTempFile; pub fn create_test_package_table() -> PackageTable { let header = PackageTableHeader { - version: 1, + version: DEFAULT_FILE_VERSION, container: String::from("mockup"), file_type: StorageFileType::PackageMap as u8, file_size: 209, @@ -78,7 +78,7 @@ impl FlagTableNode { pub fn create_test_flag_table() -> FlagTable { let header = FlagTableHeader { - version: 1, + version: DEFAULT_FILE_VERSION, container: String::from("mockup"), file_type: StorageFileType::FlagMap as u8, file_size: 321, @@ -120,7 +120,7 @@ pub fn create_test_flag_table() -> FlagTable { pub fn create_test_flag_value_list() -> FlagValueList { let header = FlagValueHeader { - version: 1, + version: DEFAULT_FILE_VERSION, container: String::from("mockup"), file_type: StorageFileType::FlagVal as u8, file_size: 35, @@ -133,7 +133,7 @@ pub fn create_test_flag_value_list() -> FlagValueList { pub fn create_test_flag_info_list() -> FlagInfoList { let header = FlagInfoHeader { - version: 1, + version: DEFAULT_FILE_VERSION, container: String::from("mockup"), file_type: StorageFileType::FlagInfo as u8, file_size: 35, diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java index 1c72364e6b..4bea0836f0 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/ByteBufferReader.java @@ -48,6 +48,10 @@ public class ByteBufferReader { return new String(bytes, StandardCharsets.UTF_8); } + public int readByte(int i) { + return Byte.toUnsignedInt(mByteBuffer.get(i)); + } + public void position(int newPosition) { mByteBuffer.position(newPosition); } diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java index e85fdee20f..757844a603 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagTable.java @@ -16,41 +16,57 @@ package android.aconfig.storage; +import static java.nio.charset.StandardCharsets.UTF_8; + import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; import java.util.Objects; public class FlagTable { private Header mHeader; - private Map<String, Node> mNodeMap; + private ByteBufferReader mReader; public static FlagTable fromBytes(ByteBuffer bytes) { FlagTable flagTable = new FlagTable(); - ByteBufferReader reader = new ByteBufferReader(bytes); - Header header = Header.fromBytes(reader); - flagTable.mHeader = header; - flagTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumFlags)); - reader.position(header.mNodeOffset); - for (int i = 0; i < header.mNumFlags; i++) { - Node node = Node.fromBytes(reader); - flagTable.mNodeMap.put(makeKey(node.mPackageId, node.mFlagName), node); - } + flagTable.mReader = new ByteBufferReader(bytes); + flagTable.mHeader = Header.fromBytes(flagTable.mReader); + return flagTable; } public Node get(int packageId, String flagName) { - return mNodeMap.get(makeKey(packageId, flagName)); + int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4; + int bucketIndex = TableUtils.getBucketIndex(makeKey(packageId, flagName), numBuckets); + int newPosition = mHeader.mBucketOffset + bucketIndex * 4; + if (newPosition >= mHeader.mNodeOffset) { + return null; + } + + mReader.position(newPosition); + int nodeIndex = mReader.readInt(); + if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) { + return null; + } + + while (nodeIndex != -1) { + mReader.position(nodeIndex); + Node node = Node.fromBytes(mReader); + if (Objects.equals(flagName, node.mFlagName) && packageId == node.mPackageId) { + return node; + } + nodeIndex = node.mNextOffset; + } + + return null; } public Header getHeader() { return mHeader; } - private static String makeKey(int packageId, String flagName) { + private static byte[] makeKey(int packageId, String flagName) { StringBuilder ret = new StringBuilder(); - return ret.append(packageId).append('/').append(flagName).toString(); + return ret.append(packageId).append('/').append(flagName).toString().getBytes(UTF_8); } public static class Header { diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java index 0ddc147e82..493436d2a2 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/FlagValueList.java @@ -17,33 +17,21 @@ package android.aconfig.storage; import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.List; public class FlagValueList { private Header mHeader; - private List<Boolean> mList; - - private int mSize; + private ByteBufferReader mReader; public static FlagValueList fromBytes(ByteBuffer bytes) { FlagValueList flagValueList = new FlagValueList(); - ByteBufferReader reader = new ByteBufferReader(bytes); - Header header = Header.fromBytes(reader); - flagValueList.mHeader = header; - flagValueList.mList = new ArrayList(header.mNumFlags); - reader.position(header.mBooleanValueOffset); - for (int i = 0; i < header.mNumFlags; i++) { - boolean val = reader.readByte() == 1; - flagValueList.mList.add(val); - } - flagValueList.mSize = flagValueList.mList.size(); + flagValueList.mReader = new ByteBufferReader(bytes); + flagValueList.mHeader = Header.fromBytes(flagValueList.mReader); return flagValueList; } - public boolean get(int index) { - return mList.get(index); + public boolean getBoolean(int index) { + return mReader.readByte(mHeader.mBooleanValueOffset + index) == 1; } public Header getHeader() { @@ -51,7 +39,7 @@ public class FlagValueList { } public int size() { - return mSize; + return mHeader.mNumFlags; } public static class Header { diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java index d04e1ac391..39b7e59d7e 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/PackageTable.java @@ -16,32 +16,48 @@ package android.aconfig.storage; +import static java.nio.charset.StandardCharsets.UTF_8; + import java.nio.ByteBuffer; -import java.util.HashMap; -import java.util.Map; import java.util.Objects; public class PackageTable { private Header mHeader; - private Map<String, Node> mNodeMap; + private ByteBufferReader mReader; public static PackageTable fromBytes(ByteBuffer bytes) { PackageTable packageTable = new PackageTable(); - ByteBufferReader reader = new ByteBufferReader(bytes); - Header header = Header.fromBytes(reader); - packageTable.mHeader = header; - packageTable.mNodeMap = new HashMap(TableUtils.getTableSize(header.mNumPackages)); - reader.position(header.mNodeOffset); - for (int i = 0; i < header.mNumPackages; i++) { - Node node = Node.fromBytes(reader); - packageTable.mNodeMap.put(node.mPackageName, node); - } + packageTable.mReader = new ByteBufferReader(bytes); + packageTable.mHeader = Header.fromBytes(packageTable.mReader); + return packageTable; } public Node get(String packageName) { - return mNodeMap.get(packageName); + int numBuckets = (mHeader.mNodeOffset - mHeader.mBucketOffset) / 4; + int bucketIndex = TableUtils.getBucketIndex(packageName.getBytes(UTF_8), numBuckets); + int newPosition = mHeader.mBucketOffset + bucketIndex * 4; + if (newPosition >= mHeader.mNodeOffset) { + return null; + } + mReader.position(newPosition); + int nodeIndex = mReader.readInt(); + + if (nodeIndex < mHeader.mNodeOffset || nodeIndex >= mHeader.mFileSize) { + return null; + } + + while (nodeIndex != -1) { + mReader.position(nodeIndex); + Node node = Node.fromBytes(mReader, mHeader.mVersion); + if (Objects.equals(packageName, node.mPackageName)) { + return node; + } + nodeIndex = node.mNextOffset; + } + + return null; } public Header getHeader() { @@ -58,7 +74,7 @@ public class PackageTable { private int mBucketOffset; private int mNodeOffset; - public static Header fromBytes(ByteBufferReader reader) { + private static Header fromBytes(ByteBufferReader reader) { Header header = new Header(); header.mVersion = reader.readInt(); header.mContainer = reader.readString(); @@ -111,7 +127,29 @@ public class PackageTable { private int mBooleanStartIndex; private int mNextOffset; - public static Node fromBytes(ByteBufferReader reader) { + private static Node fromBytes(ByteBufferReader reader, int version) { + switch (version) { + case 1: + return fromBytesV1(reader); + case 2: + return fromBytesV2(reader); + default: + // Do we want to throw here? + return new Node(); + } + } + + private static Node fromBytesV1(ByteBufferReader reader) { + Node node = new Node(); + node.mPackageName = reader.readString(); + node.mPackageId = reader.readInt(); + node.mBooleanStartIndex = reader.readInt(); + node.mNextOffset = reader.readInt(); + node.mNextOffset = node.mNextOffset == 0 ? -1 : node.mNextOffset; + return node; + } + + private static Node fromBytesV2(ByteBufferReader reader) { Node node = new Node(); node.mPackageName = reader.readString(); node.mPackageId = reader.readInt(); diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java new file mode 100644 index 0000000000..64714ee5f8 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/SipHasher13.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage; + +public class SipHasher13 { + static class State { + private long v0; + private long v2; + private long v1; + private long v3; + + public State(long k0, long k1) { + v0 = k0 ^ 0x736f6d6570736575L; + v1 = k1 ^ 0x646f72616e646f6dL; + v2 = k0 ^ 0x6c7967656e657261L; + v3 = k1 ^ 0x7465646279746573L; + } + + public void compress(long m) { + v3 ^= m; + cRounds(); + v0 ^= m; + } + + public long finish() { + v2 ^= 0xff; + dRounds(); + return v0 ^ v1 ^ v2 ^ v3; + } + + private void cRounds() { + v0 += v1; + v1 = Long.rotateLeft(v1, 13); + v1 ^= v0; + v0 = Long.rotateLeft(v0, 32); + v2 += v3; + v3 = Long.rotateLeft(v3, 16); + v3 ^= v2; + v0 += v3; + v3 = Long.rotateLeft(v3, 21); + v3 ^= v0; + v2 += v1; + v1 = Long.rotateLeft(v1, 17); + v1 ^= v2; + v2 = Long.rotateLeft(v2, 32); + } + + private void dRounds() { + for (int i = 0; i < 3; i++) { + v0 += v1; + v1 = Long.rotateLeft(v1, 13); + v1 ^= v0; + v0 = Long.rotateLeft(v0, 32); + v2 += v3; + v3 = Long.rotateLeft(v3, 16); + v3 ^= v2; + v0 += v3; + v3 = Long.rotateLeft(v3, 21); + v3 ^= v0; + v2 += v1; + v1 = Long.rotateLeft(v1, 17); + v1 ^= v2; + v2 = Long.rotateLeft(v2, 32); + } + } + } + + public static long hash(byte[] data) { + State state = new State(0, 0); + int len = data.length; + int left = len & 0x7; + int index = 0; + + while (index < len - left) { + long mi = loadLe(data, index, 8); + index += 8; + state.compress(mi); + } + + // padding the end with 0xff to be consistent with rust + long m = (0xffL << (left * 8)) | loadLe(data, index, left); + if (left == 0x7) { + // compress the m w-2 + state.compress(m); + m = 0L; + } + // len adds 1 since padded 0xff + m |= (((len + 1) & 0xffL) << 56); + state.compress(m); + + return state.finish(); + } + + private static long loadLe(byte[] data, int offset, int size) { + long m = 0; + for (int i = 0; i < size; i++) { + m |= (data[i + offset] & 0xffL) << (i * 8); + } + return m; + } +} diff --git a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java index 714b53bf31..81168f538e 100644 --- a/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java +++ b/tools/aconfig/aconfig_storage_file/srcs/android/aconfig/storage/TableUtils.java @@ -58,4 +58,9 @@ public class TableUtils { } throw new AconfigStorageException("Number of items in a hash table exceeds limit"); } + + public static int getBucketIndex(byte[] val, int numBuckets) { + long hashVal = SipHasher13.hash(val); + return (int) Long.remainderUnsigned(hashVal, numBuckets); + } } diff --git a/tools/aconfig/aconfig_storage_file/tests/Android.bp b/tools/aconfig/aconfig_storage_file/tests/Android.bp index c33127fc86..13d321408b 100644 --- a/tools/aconfig/aconfig_storage_file/tests/Android.bp +++ b/tools/aconfig/aconfig_storage_file/tests/Android.bp @@ -10,10 +10,10 @@ cc_test { "libbase", ], data: [ - "package.map", - "flag.map", - "flag.val", - "flag.info", + "data/v1/package.map", + "data/v1/flag.map", + "data/v1/flag.val", + "data/v1/flag.info", ], test_suites: [ "device-tests", @@ -28,19 +28,20 @@ android_test { "srcs/**/*.java", ], static_libs: [ - "aconfig_storage_file_java", "androidx.test.runner", "junit", + "aconfig_storage_file_java", ], - sdk_version: "test_current", test_config: "AndroidStorageJaveTest.xml", + sdk_version: "test_current", data: [ - "package.map", - "flag.map", - "flag.val", - "flag.info", + "data/v1/package.map", + "data/v1/flag.map", + "data/v1/flag.val", + "data/v1/flag.info", ], test_suites: [ "general-tests", ], + jarjar_rules: "jarjar.txt", } diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.info b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.info Binary files differindex 6223edf369..6223edf369 100644 --- a/tools/aconfig/aconfig_storage_file/tests/flag.info +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.info diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.map Binary files differindex e868f53d7e..e868f53d7e 100644 --- a/tools/aconfig/aconfig_storage_file/tests/flag.map +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.map diff --git a/tools/aconfig/aconfig_storage_file/tests/flag.val b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.val Binary files differindex ed203d4d13..ed203d4d13 100644 --- a/tools/aconfig/aconfig_storage_file/tests/flag.val +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/flag.val diff --git a/tools/aconfig/aconfig_storage_file/tests/package.map b/tools/aconfig/aconfig_storage_file/tests/data/v1/package.map Binary files differindex 6c46a0339c..6c46a0339c 100644 --- a/tools/aconfig/aconfig_storage_file/tests/package.map +++ b/tools/aconfig/aconfig_storage_file/tests/data/v1/package.map diff --git a/tools/aconfig/aconfig_storage_file/tests/jarjar.txt b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt new file mode 100644 index 0000000000..a6c17fa476 --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/jarjar.txt @@ -0,0 +1,15 @@ +rule android.aconfig.storage.AconfigStorageException android.aconfig.storage.test.AconfigStorageException +rule android.aconfig.storage.FlagTable android.aconfig.storage.test.FlagTable +rule android.aconfig.storage.PackageTable android.aconfig.storage.test.PackageTable +rule android.aconfig.storage.ByteBufferReader android.aconfig.storage.test.ByteBufferReader +rule android.aconfig.storage.FlagType android.aconfig.storage.test.FlagType +rule android.aconfig.storage.SipHasher13 android.aconfig.storage.test.SipHasher13 +rule android.aconfig.storage.FileType android.aconfig.storage.test.FileType +rule android.aconfig.storage.FlagValueList android.aconfig.storage.test.FlagValueList +rule android.aconfig.storage.TableUtils android.aconfig.storage.test.TableUtils + + +rule android.aconfig.storage.FlagTable$* android.aconfig.storage.test.FlagTable$@1 +rule android.aconfig.storage.PackageTable$* android.aconfig.storage.test.PackageTable$@1 +rule android.aconfig.storage.FlagValueList$* android.aconfig.storage.test.FlagValueList@1 +rule android.aconfig.storage.SipHasher13$* android.aconfig.storage.test.SipHasher13@1 diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java index c18590accc..1b0de630c7 100644 --- a/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/FlagValueListTest.java @@ -53,7 +53,7 @@ public class FlagValueListTest { assertEquals(expected.length, flagValueList.size()); for (int i = 0; i < flagValueList.size(); i++) { - assertEquals(expected[i], flagValueList.get(i)); + assertEquals(expected[i], flagValueList.getBoolean(i)); } } @@ -68,10 +68,10 @@ public class FlagValueListTest { PackageTable.Node pNode = packageTable.get("com.android.aconfig.storage.test_1"); FlagTable.Node fNode = flagTable.get(pNode.getPackageId(), "enabled_rw"); - assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); + assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); pNode = packageTable.get("com.android.aconfig.storage.test_4"); fNode = flagTable.get(pNode.getPackageId(), "enabled_fixed_ro"); - assertTrue(flagValueList.get(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); + assertTrue(flagValueList.getBoolean(pNode.getBooleanStartIndex() + fNode.getFlagIndex())); } } diff --git a/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java new file mode 100644 index 0000000000..10620d272b --- /dev/null +++ b/tools/aconfig/aconfig_storage_file/tests/srcs/SipHasher13Test.java @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.aconfig.storage.test; + +import static org.junit.Assert.assertEquals; +import static java.nio.charset.StandardCharsets.UTF_8; + +import android.aconfig.storage.SipHasher13; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +@RunWith(JUnit4.class) +public class SipHasher13Test { + @Test + public void testSipHash_hashString() throws Exception { + String testStr = "com.google.android.test"; + long result = SipHasher13.hash(testStr.getBytes(UTF_8)); + assertEquals(0xF86572EFF9C4A0C1L, result); + + testStr = "abcdefg"; + result = SipHasher13.hash(testStr.getBytes(UTF_8)); + assertEquals(0x2295EF44BD078AE9L, result); + + testStr = "abcdefgh"; + result = SipHasher13.hash(testStr.getBytes(UTF_8)); + assertEquals(0x5CD7657FA7F96C16L, result); + } +} diff --git a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp index ebd1dd89bd..3626f72e6c 100644 --- a/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp +++ b/tools/aconfig/aconfig_storage_file/tests/storage_file_test.cpp @@ -53,7 +53,8 @@ void verify_value_info(const FlagValueAndInfoSummary& flag, } TEST(AconfigStorageFileTest, test_list_flag) { - auto const test_dir = GetExecutableDirectory(); + auto const test_base_dir = GetExecutableDirectory(); + auto const test_dir = test_base_dir + "/data/v1"; auto const package_map = test_dir + "/package.map"; auto const flag_map = test_dir + "/flag.map"; auto const flag_val = test_dir + "/flag.val"; @@ -82,7 +83,8 @@ TEST(AconfigStorageFileTest, test_list_flag) { } TEST(AconfigStorageFileTest, test_list_flag_with_info) { - auto const test_dir = GetExecutableDirectory(); + auto const base_test_dir = GetExecutableDirectory(); + auto const test_dir = base_test_dir + "/data/v1"; auto const package_map = test_dir + "/package.map"; auto const flag_map = test_dir + "/flag.map"; auto const flag_val = test_dir + "/flag.val"; diff --git a/tools/aconfig/aconfig_storage_read_api/Android.bp b/tools/aconfig/aconfig_storage_read_api/Android.bp index c2f4c1803f..80b8ece1be 100644 --- a/tools/aconfig/aconfig_storage_read_api/Android.bp +++ b/tools/aconfig/aconfig_storage_read_api/Android.bp @@ -36,10 +36,10 @@ rust_test_host { "librand", ], data: [ - "tests/package.map", - "tests/flag.map", - "tests/flag.val", - "tests/flag.info", + "tests/data/v1/package.map", + "tests/data/v1/flag.map", + "tests/data/v1/flag.val", + "tests/data/v1/flag.info", ], } @@ -87,6 +87,9 @@ cc_library { generated_sources: ["libcxx_aconfig_storage_read_api_bridge_code"], whole_static_libs: ["libaconfig_storage_read_api_cxx_bridge"], export_include_dirs: ["include"], + static_libs: [ + "libbase", + ], host_supported: true, vendor_available: true, product_available: true, @@ -104,31 +107,12 @@ cc_library { afdo: true, } -soong_config_module_type { - name: "aconfig_lib_cc_shared_link_defaults", - module_type: "cc_defaults", - config_namespace: "Aconfig", - bool_variables: [ - "read_from_new_storage", - ], - properties: [ - "shared_libs", - ], -} - -soong_config_bool_variable { - name: "read_from_new_storage", -} - -aconfig_lib_cc_shared_link_defaults { +cc_defaults { name: "aconfig_lib_cc_shared_link.defaults", - soong_config_variables: { - read_from_new_storage: { - shared_libs: [ - "libaconfig_storage_read_api_cc", - ], - }, - }, + shared_libs: select(release_flag("RELEASE_READ_FROM_NEW_STORAGE"), { + true: ["libaconfig_storage_read_api_cc"], + default: [], + }), } cc_defaults { @@ -144,6 +128,7 @@ rust_ffi_shared { crate_name: "aconfig_storage_read_api_rust_jni", srcs: ["srcs/lib.rs"], rustlibs: [ + "libaconfig_storage_file", "libaconfig_storage_read_api", "libanyhow", "libjni", @@ -171,8 +156,35 @@ java_library { srcs: [ "srcs/android/aconfig/storage/StorageInternalReader.java", ], + libs: [ + "unsupportedappusage", + "strict_mode_stub", + ], static_libs: [ "aconfig_storage_file_java", ], sdk_version: "core_current", + host_supported: true, + min_sdk_version: "29", + apex_available: [ + "//apex_available:platform", + "//apex_available:anyapex", + ], +} + +java_library { + name: "aconfig_storage_reader_java_none", + srcs: [ + "srcs/android/aconfig/storage/StorageInternalReader.java", + ], + libs: [ + "unsupportedappusage-sdk-none", + "fake_device_config", + ], + static_libs: [ + "aconfig_storage_file_java_none", + ], + sdk_version: "none", + system_modules: "core-all-system-modules", + host_supported: true, } diff --git a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp index 97ada3a33e..8e0c4e1a12 100644 --- a/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp +++ b/tools/aconfig/aconfig_storage_read_api/aconfig_storage_read_api.cpp @@ -1,3 +1,4 @@ +#include <android-base/unique_fd.h> #include <sys/mman.h> #include <sys/stat.h> #include <fcntl.h> @@ -59,22 +60,22 @@ Result<MappedStorageFile*> get_mapped_file_impl( /// Map a storage file Result<MappedStorageFile*> map_storage_file(std::string const& file) { - int fd = open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY); - if (fd == -1) { + android::base::unique_fd ufd(open(file.c_str(), O_CLOEXEC | O_NOFOLLOW | O_RDONLY)); + if (ufd.get() == -1) { auto result = Result<MappedStorageFile*>(); result.errmsg = std::string("failed to open ") + file + ": " + strerror(errno); return result; }; struct stat fd_stat; - if (fstat(fd, &fd_stat) < 0) { + if (fstat(ufd.get(), &fd_stat) < 0) { auto result = Result<MappedStorageFile*>(); result.errmsg = std::string("fstat failed: ") + strerror(errno); return result; } size_t file_size = fd_stat.st_size; - void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, fd, 0); + void* const map_result = mmap(nullptr, file_size, PROT_READ, MAP_SHARED, ufd.get(), 0); if (map_result == MAP_FAILED) { auto result = Result<MappedStorageFile*>(); result.errmsg = std::string("mmap failed: ") + strerror(errno); diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs index 6d03377683..fe57a6dd78 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/flag_info_query.rs @@ -16,8 +16,10 @@ //! flag value query module defines the flag value file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; -use aconfig_storage_file::{flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType}; +use crate::AconfigStorageError; +use aconfig_storage_file::{ + flag_info::FlagInfoHeader, read_u8_from_bytes, FlagValueType, MAX_SUPPORTED_FILE_VERSION, +}; use anyhow::anyhow; /// Get flag attribute bitfield @@ -27,11 +29,11 @@ pub fn find_flag_attribute( flag_index: u32, ) -> Result<u8, AconfigStorageError> { let interpreted_header = FlagInfoHeader::from_bytes(buf)?; - if interpreted_header.version > crate::FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -108,15 +110,15 @@ mod tests { // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { let mut info_list = create_test_flag_info_list(); - info_list.header.version = crate::FILE_VERSION + 1; + info_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let flag_info = info_list.into_bytes(); let error = find_flag_attribute(&flag_info[..], FlagValueType::Boolean, 4).unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs index a1a4793bc2..e9bc6041cf 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/flag_table_query.rs @@ -16,9 +16,10 @@ //! flag table query module defines the flag table file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; +use crate::AconfigStorageError; use aconfig_storage_file::{ flag_table::FlagTableHeader, flag_table::FlagTableNode, read_u32_from_bytes, StoredFlagType, + MAX_SUPPORTED_FILE_VERSION, }; use anyhow::anyhow; @@ -36,11 +37,11 @@ pub fn find_flag_read_context( flag: &str, ) -> Result<Option<FlagReadContext>, AconfigStorageError> { let interpreted_header = FlagTableHeader::from_bytes(buf)?; - if interpreted_header.version > crate::FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -111,15 +112,15 @@ mod tests { // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { let mut table = create_test_flag_table(); - table.header.version = crate::FILE_VERSION + 1; + table.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let flag_table = table.into_bytes(); let error = find_flag_read_context(&flag_table[..], 0, "enabled_ro").unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs index 9d32a16ac8..12c1e83628 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/flag_value_query.rs @@ -16,18 +16,20 @@ //! flag value query module defines the flag value file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; -use aconfig_storage_file::{flag_value::FlagValueHeader, read_u8_from_bytes}; +use crate::AconfigStorageError; +use aconfig_storage_file::{ + flag_value::FlagValueHeader, read_u8_from_bytes, MAX_SUPPORTED_FILE_VERSION, +}; use anyhow::anyhow; /// Query flag value pub fn find_boolean_flag_value(buf: &[u8], flag_index: u32) -> Result<bool, AconfigStorageError> { let interpreted_header = FlagValueHeader::from_bytes(buf)?; - if interpreted_header.version > crate::FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -74,15 +76,15 @@ mod tests { // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { let mut value_list = create_test_flag_value_list(); - value_list.header.version = crate::FILE_VERSION + 1; + value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let flag_value = value_list.into_bytes(); let error = find_boolean_flag_value(&flag_value[..], 4).unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/src/lib.rs b/tools/aconfig/aconfig_storage_read_api/src/lib.rs index d76cf3fe4e..884f148de9 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/lib.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/lib.rs @@ -46,7 +46,7 @@ pub use aconfig_storage_file::{AconfigStorageError, FlagValueType, StorageFileTy pub use flag_table_query::FlagReadContext; pub use package_table_query::PackageReadContext; -use aconfig_storage_file::{read_u32_from_bytes, FILE_VERSION}; +use aconfig_storage_file::read_u32_from_bytes; use flag_info_query::find_flag_attribute; use flag_table_query::find_flag_read_context; use flag_value_query::find_boolean_flag_value; @@ -412,10 +412,10 @@ mod tests { let flag_map = storage_dir.clone() + "/maps/mockup.flag.map"; let flag_val = storage_dir.clone() + "/boot/mockup.val"; let flag_info = storage_dir.clone() + "/boot/mockup.info"; - fs::copy("./tests/package.map", &package_map).unwrap(); - fs::copy("./tests/flag.map", &flag_map).unwrap(); - fs::copy("./tests/flag.val", &flag_val).unwrap(); - fs::copy("./tests/flag.info", &flag_info).unwrap(); + fs::copy("./tests/data/v1/package.map", &package_map).unwrap(); + fs::copy("./tests/data/v1/flag.map", &flag_map).unwrap(); + fs::copy("./tests/data/v1/flag.val", &flag_val).unwrap(); + fs::copy("./tests/data/v1/flag.info", &flag_info).unwrap(); return storage_dir; } @@ -507,9 +507,9 @@ mod tests { #[test] // this test point locks down flag storage file version number query api fn test_storage_version_query() { - assert_eq!(get_storage_file_version("./tests/package.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./tests/flag.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./tests/flag.val").unwrap(), 1); - assert_eq!(get_storage_file_version("./tests/flag.info").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/package.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/flag.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/flag.val").unwrap(), 1); + assert_eq!(get_storage_file_version("./tests/data/v1/flag.info").unwrap(), 1); } } diff --git a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs index 5a1664535f..32dbed88e3 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/mapped_file.rs @@ -97,10 +97,10 @@ mod tests { let flag_map = storage_dir.clone() + "/maps/mockup.flag.map"; let flag_val = storage_dir.clone() + "/boot/mockup.val"; let flag_info = storage_dir.clone() + "/boot/mockup.info"; - fs::copy("./tests/package.map", &package_map).unwrap(); - fs::copy("./tests/flag.map", &flag_map).unwrap(); - fs::copy("./tests/flag.val", &flag_val).unwrap(); - fs::copy("./tests/flag.info", &flag_info).unwrap(); + fs::copy("./tests/data/v1/package.map", &package_map).unwrap(); + fs::copy("./tests/data/v1/flag.map", &flag_map).unwrap(); + fs::copy("./tests/data/v1/flag.val", &flag_val).unwrap(); + fs::copy("./tests/data/v1/flag.info", &flag_info).unwrap(); return storage_dir; } @@ -108,9 +108,9 @@ mod tests { #[test] fn test_mapped_file_contents() { let storage_dir = create_test_storage_files(); - map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/package.map"); - map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/flag.map"); - map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/flag.val"); - map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/flag.info"); + map_and_verify(&storage_dir, StorageFileType::PackageMap, "./tests/data/v1/package.map"); + map_and_verify(&storage_dir, StorageFileType::FlagMap, "./tests/data/v1/flag.map"); + map_and_verify(&storage_dir, StorageFileType::FlagVal, "./tests/data/v1/flag.val"); + map_and_verify(&storage_dir, StorageFileType::FlagInfo, "./tests/data/v1/flag.info"); } } diff --git a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs index 2cb854b1b1..acb60f6b3a 100644 --- a/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs +++ b/tools/aconfig/aconfig_storage_read_api/src/package_table_query.rs @@ -16,9 +16,10 @@ //! package table query module defines the package table file read from mapped bytes -use crate::{AconfigStorageError, FILE_VERSION}; +use crate::AconfigStorageError; use aconfig_storage_file::{ package_table::PackageTableHeader, package_table::PackageTableNode, read_u32_from_bytes, + MAX_SUPPORTED_FILE_VERSION, }; use anyhow::anyhow; @@ -35,11 +36,11 @@ pub fn find_package_read_context( package: &str, ) -> Result<Option<PackageReadContext>, AconfigStorageError> { let interpreted_header = PackageTableHeader::from_bytes(buf)?; - if interpreted_header.version > FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot read storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -55,7 +56,8 @@ pub fn find_package_read_context( } loop { - let interpreted_node = PackageTableNode::from_bytes(&buf[package_node_offset..])?; + let interpreted_node = + PackageTableNode::from_bytes(&buf[package_node_offset..], interpreted_header.version)?; if interpreted_node.package_name == package { return Ok(Some(PackageReadContext { package_id: interpreted_node.package_id, @@ -118,7 +120,7 @@ mod tests { // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { let mut table = create_test_package_table(); - table.header.version = crate::FILE_VERSION + 1; + table.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let package_table = table.into_bytes(); let error = find_package_read_context(&package_table[..], "com.android.aconfig.storage.test_1") @@ -127,8 +129,8 @@ mod tests { format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot read storage file with a higher version of {} with lib version {})", - crate::FILE_VERSION + 1, - crate::FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java index 406ff24dd3..850c2b8146 100644 --- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java +++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/AconfigStorageReadAPI.java @@ -16,18 +16,14 @@ package android.aconfig.storage; +import dalvik.annotation.optimization.FastNative; + import java.io.FileInputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; -import java.nio.channels.FileChannel.MapMode; - -import android.aconfig.storage.PackageReadContext; -import android.aconfig.storage.FlagReadContext; - -import dalvik.annotation.optimization.FastNative; public class AconfigStorageReadAPI { @@ -50,9 +46,8 @@ public class AconfigStorageReadAPI { } // Map a storage file given container and file type - public static MappedByteBuffer getMappedFile( - String container, - StorageFileType type) throws IOException{ + public static MappedByteBuffer getMappedFile(String container, StorageFileType type) + throws IOException { switch (type) { case PACKAGE_MAP: return mapStorageFile(STORAGEDIR + "/maps/" + container + ".package.map"); @@ -73,14 +68,14 @@ public class AconfigStorageReadAPI { // @throws IOException if the passed in file is not a valid package map file @FastNative private static native ByteBuffer getPackageReadContextImpl( - ByteBuffer mappedFile, String packageName) throws IOException; + ByteBuffer mappedFile, String packageName) throws IOException; // API to get package read context // @param mappedFile: memory mapped package map file // @param packageName: package name // @throws IOException if the passed in file is not a valid package map file - static public PackageReadContext getPackageReadContext ( - ByteBuffer mappedFile, String packageName) throws IOException { + public static PackageReadContext getPackageReadContext( + ByteBuffer mappedFile, String packageName) throws IOException { ByteBuffer buffer = getPackageReadContextImpl(mappedFile, packageName); buffer.order(ByteOrder.LITTLE_ENDIAN); return new PackageReadContext(buffer.getInt(), buffer.getInt(4)); @@ -94,7 +89,7 @@ public class AconfigStorageReadAPI { // @throws IOException if the passed in file is not a valid flag map file @FastNative private static native ByteBuffer getFlagReadContextImpl( - ByteBuffer mappedFile, int packageId, String flagName) throws IOException; + ByteBuffer mappedFile, int packageId, String flagName) throws IOException; // API to get flag read context // @param mappedFile: memory mapped flag map file @@ -103,7 +98,7 @@ public class AconfigStorageReadAPI { // @param flagName: flag name // @throws IOException if the passed in file is not a valid flag map file public static FlagReadContext getFlagReadContext( - ByteBuffer mappedFile, int packageId, String flagName) throws IOException { + ByteBuffer mappedFile, int packageId, String flagName) throws IOException { ByteBuffer buffer = getFlagReadContextImpl(mappedFile, packageId, flagName); buffer.order(ByteOrder.LITTLE_ENDIAN); return new FlagReadContext(buffer.getInt(), buffer.getInt(4)); @@ -115,8 +110,11 @@ public class AconfigStorageReadAPI { // @throws IOException if the passed in file is not a valid flag value file or the // flag index went over the file boundary. @FastNative - public static native boolean getBooleanFlagValue( - ByteBuffer mappedFile, int flagIndex) throws IOException; + public static native boolean getBooleanFlagValue(ByteBuffer mappedFile, int flagIndex) + throws IOException; + + @FastNative + public static native long hash(String packageName) throws IOException; static { System.loadLibrary("aconfig_storage_read_api_rust_jni"); diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java index 5f31017d21..6fbcdb354a 100644 --- a/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java +++ b/tools/aconfig/aconfig_storage_read_api/srcs/android/aconfig/storage/StorageInternalReader.java @@ -16,10 +16,16 @@ package android.aconfig.storage; -import java.io.FileInputStream; +import android.compat.annotation.UnsupportedAppUsage; +import android.os.StrictMode; + +import java.io.Closeable; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +/** @hide */ public class StorageInternalReader { private static final String MAP_PATH = "/metadata/aconfig/maps/"; @@ -30,22 +36,24 @@ public class StorageInternalReader { private int mPackageBooleanStartOffset; + @UnsupportedAppUsage public StorageInternalReader(String container, String packageName) { this(packageName, MAP_PATH + container + ".package.map", BOOT_PATH + container + ".val"); } + @UnsupportedAppUsage public StorageInternalReader(String packageName, String packageMapFile, String flagValueFile) { + StrictMode.ThreadPolicy oldPolicy = StrictMode.allowThreadDiskReads(); mPackageTable = PackageTable.fromBytes(mapStorageFile(packageMapFile)); mFlagValueList = FlagValueList.fromBytes(mapStorageFile(flagValueFile)); + StrictMode.setThreadPolicy(oldPolicy); mPackageBooleanStartOffset = getPackageBooleanStartOffset(packageName); } + @UnsupportedAppUsage public boolean getBooleanFlagValue(int index) { index += mPackageBooleanStartOffset; - if (index >= mFlagValueList.size()) { - throw new AconfigStorageException("Fail to get boolean flag value"); - } - return mFlagValueList.get(index); + return mFlagValueList.getBoolean(index); } private int getPackageBooleanStartOffset(String packageName) { @@ -62,13 +70,25 @@ public class StorageInternalReader { // Map a storage file given file path private static MappedByteBuffer mapStorageFile(String file) { + FileChannel channel = null; try { - FileInputStream stream = new FileInputStream(file); - FileChannel channel = stream.getChannel(); + channel = FileChannel.open(Paths.get(file), StandardOpenOption.READ); return channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); } catch (Exception e) { throw new AconfigStorageException( String.format("Fail to mmap storage file %s", file), e); + } finally { + quietlyDispose(channel); + } + } + + private static void quietlyDispose(Closeable closable) { + try { + if (closable != null) { + closable.close(); + } + } catch (Exception e) { + // no need to care, at least as of now } } } diff --git a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs index 304a059c90..f5f12bb1fa 100644 --- a/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs +++ b/tools/aconfig/aconfig_storage_read_api/srcs/lib.rs @@ -1,5 +1,6 @@ //! aconfig storage read api java rust interlop +use aconfig_storage_file::SipHasher13; use aconfig_storage_read_api::flag_table_query::find_flag_read_context; use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value; use aconfig_storage_read_api::package_table_query::find_package_read_context; @@ -7,8 +8,9 @@ use aconfig_storage_read_api::{FlagReadContext, PackageReadContext}; use anyhow::Result; use jni::objects::{JByteBuffer, JClass, JString}; -use jni::sys::{jboolean, jint}; +use jni::sys::{jboolean, jint, jlong}; use jni::JNIEnv; +use std::hash::Hasher; /// Call rust find package read context fn get_package_read_context_java( @@ -158,3 +160,30 @@ pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_getBoo } } } + +/// Get flag value JNI +#[no_mangle] +#[allow(unused)] +pub extern "system" fn Java_android_aconfig_storage_AconfigStorageReadAPI_hash<'local>( + mut env: JNIEnv<'local>, + class: JClass<'local>, + package_name: JString<'local>, +) -> jlong { + match siphasher13_hash(&mut env, package_name) { + Ok(value) => value as jlong, + Err(errmsg) => { + env.throw(("java/io/IOException", errmsg.to_string())).expect("failed to throw"); + 0i64 + } + } +} + +fn siphasher13_hash(env: &mut JNIEnv, package_name: JString) -> Result<u64> { + // SAFETY: + // The safety here is ensured as the flag name is guaranteed to be a java string + let flag_name: String = unsafe { env.get_string_unchecked(&package_name)?.into() }; + let mut s = SipHasher13::new(); + s.write(flag_name.as_bytes()); + s.write_u8(0xff); + Ok(s.finish()) +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp index ed0c728215..b8e510d68c 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/Android.bp +++ b/tools/aconfig/aconfig_storage_read_api/tests/Android.bp @@ -1,9 +1,10 @@ filegroup { name: "read_api_test_storage_files", - srcs: ["package.map", - "flag.map", - "flag.val", - "flag.info" + srcs: [ + "data/v1/package.map", + "data/v1/flag.map", + "data/v1/flag.val", + "data/v1/flag.info", ], } diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.info b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.info Binary files differindex 6223edf369..6223edf369 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/flag.info +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.info diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.map Binary files differindex e868f53d7e..e868f53d7e 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/flag.map +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.map diff --git a/tools/aconfig/aconfig_storage_read_api/tests/flag.val b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.val Binary files differindex ed203d4d13..ed203d4d13 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/flag.val +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/flag.val diff --git a/tools/aconfig/aconfig_storage_read_api/tests/package.map b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/package.map Binary files differindex 6c46a0339c..6c46a0339c 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/package.map +++ b/tools/aconfig/aconfig_storage_read_api/tests/data/v1/package.map diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java index a26b25707d..191741ef51 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java +++ b/tools/aconfig/aconfig_storage_read_api/tests/java/AconfigStorageReadAPITest.java @@ -16,28 +16,29 @@ package android.aconfig.storage.test; -import java.io.IOException; -import java.nio.MappedByteBuffer; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; +import android.aconfig.DeviceProtos; +import android.aconfig.nano.Aconfig.parsed_flag; import android.aconfig.storage.AconfigStorageReadAPI; -import android.aconfig.storage.PackageReadContext; import android.aconfig.storage.FlagReadContext; import android.aconfig.storage.FlagReadContext.StoredFlagType; +import android.aconfig.storage.PackageReadContext; +import android.aconfig.storage.SipHasher13; +import android.aconfig.storage.StorageInternalReader; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.IOException; +import java.nio.MappedByteBuffer; +import java.util.ArrayList; +import java.util.List; @RunWith(JUnit4.class) -public class AconfigStorageReadAPITest{ +public class AconfigStorageReadAPITest { private String mStorageDir = "/data/local/tmp/aconfig_java_api_test"; @@ -45,26 +46,29 @@ public class AconfigStorageReadAPITest{ public void testPackageContextQuery() { MappedByteBuffer packageMap = null; try { - packageMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.package.map"); - } catch(IOException ex){ + packageMap = + AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } assertTrue(packageMap != null); try { - PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "com.android.aconfig.storage.test_1"); + PackageReadContext context = + AconfigStorageReadAPI.getPackageReadContext( + packageMap, "com.android.aconfig.storage.test_1"); assertEquals(context.mPackageId, 0); assertEquals(context.mBooleanStartIndex, 0); - context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "com.android.aconfig.storage.test_2"); + context = + AconfigStorageReadAPI.getPackageReadContext( + packageMap, "com.android.aconfig.storage.test_2"); assertEquals(context.mPackageId, 1); assertEquals(context.mBooleanStartIndex, 3); - context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "com.android.aconfig.storage.test_4"); + context = + AconfigStorageReadAPI.getPackageReadContext( + packageMap, "com.android.aconfig.storage.test_4"); assertEquals(context.mPackageId, 2); assertEquals(context.mBooleanStartIndex, 6); } catch (IOException ex) { @@ -76,19 +80,19 @@ public class AconfigStorageReadAPITest{ public void testNonExistPackageContextQuery() { MappedByteBuffer packageMap = null; try { - packageMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.package.map"); - } catch(IOException ex){ + packageMap = + AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.package.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } assertTrue(packageMap != null); try { - PackageReadContext context = AconfigStorageReadAPI.getPackageReadContext( - packageMap, "unknown"); + PackageReadContext context = + AconfigStorageReadAPI.getPackageReadContext(packageMap, "unknown"); assertEquals(context.mPackageId, -1); assertEquals(context.mBooleanStartIndex, -1); - } catch(IOException ex){ + } catch (IOException ex) { assertTrue(ex.toString(), false); } } @@ -97,12 +101,11 @@ public class AconfigStorageReadAPITest{ public void testFlagContextQuery() { MappedByteBuffer flagMap = null; try { - flagMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.flag.map"); - } catch(IOException ex){ + flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagMap!= null); + assertTrue(flagMap != null); class Baseline { public int mPackageId; @@ -110,10 +113,8 @@ public class AconfigStorageReadAPITest{ public StoredFlagType mFlagType; public int mFlagIndex; - public Baseline(int packageId, - String flagName, - StoredFlagType flagType, - int flagIndex) { + public Baseline( + int packageId, String flagName, StoredFlagType flagType, int flagIndex) { mPackageId = packageId; mFlagName = flagName; mFlagType = flagType; @@ -133,8 +134,9 @@ public class AconfigStorageReadAPITest{ try { for (Baseline baseline : baselines) { - FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext( - flagMap, baseline.mPackageId, baseline.mFlagName); + FlagReadContext context = + AconfigStorageReadAPI.getFlagReadContext( + flagMap, baseline.mPackageId, baseline.mFlagName); assertEquals(context.mFlagType, baseline.mFlagType); assertEquals(context.mFlagIndex, baseline.mFlagIndex); } @@ -147,21 +149,19 @@ public class AconfigStorageReadAPITest{ public void testNonExistFlagContextQuery() { MappedByteBuffer flagMap = null; try { - flagMap = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/maps/mockup.flag.map"); - } catch(IOException ex){ + flagMap = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/maps/mockup.flag.map"); + } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagMap!= null); + assertTrue(flagMap != null); try { - FlagReadContext context = AconfigStorageReadAPI.getFlagReadContext( - flagMap, 0, "unknown"); + FlagReadContext context = + AconfigStorageReadAPI.getFlagReadContext(flagMap, 0, "unknown"); assertEquals(context.mFlagType, null); assertEquals(context.mFlagIndex, -1); - context = AconfigStorageReadAPI.getFlagReadContext( - flagMap, 3, "enabled_ro"); + context = AconfigStorageReadAPI.getFlagReadContext(flagMap, 3, "enabled_ro"); assertEquals(context.mFlagType, null); assertEquals(context.mFlagIndex, -1); } catch (IOException ex) { @@ -173,12 +173,11 @@ public class AconfigStorageReadAPITest{ public void testBooleanFlagValueQuery() { MappedByteBuffer flagVal = null; try { - flagVal = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/boot/mockup.val"); + flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val"); } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagVal!= null); + assertTrue(flagVal != null); boolean[] baselines = {false, true, true, false, true, true, true, true}; for (int i = 0; i < 8; ++i) { @@ -195,12 +194,11 @@ public class AconfigStorageReadAPITest{ public void testInvalidBooleanFlagValueQuery() { MappedByteBuffer flagVal = null; try { - flagVal = AconfigStorageReadAPI.mapStorageFile( - mStorageDir + "/boot/mockup.val"); + flagVal = AconfigStorageReadAPI.mapStorageFile(mStorageDir + "/boot/mockup.val"); } catch (IOException ex) { assertTrue(ex.toString(), false); } - assertTrue(flagVal!= null); + assertTrue(flagVal != null); try { Boolean value = AconfigStorageReadAPI.getBooleanFlagValue(flagVal, 9); @@ -210,4 +208,63 @@ public class AconfigStorageReadAPITest{ assertTrue(ex.toString(), ex.toString().contains(expectedErrmsg)); } } - } + + @Test + public void testRustJavaEqualHash() throws IOException { + List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos(); + for (parsed_flag flag : flags) { + String packageName = flag.package_; + String flagName = flag.name; + long rHash = AconfigStorageReadAPI.hash(packageName); + long jHash = SipHasher13.hash(packageName.getBytes()); + assertEquals(rHash, jHash); + + String fullFlagName = packageName + "/" + flagName; + rHash = AconfigStorageReadAPI.hash(fullFlagName); + jHash = SipHasher13.hash(fullFlagName.getBytes()); + assertEquals(rHash, jHash); + } + } + + @Test + public void testRustJavaEqualFlag() throws IOException { + List<parsed_flag> flags = DeviceProtos.loadAndParseFlagProtos(); + + String mapPath = "/metadata/aconfig/maps/"; + String flagsPath = "/metadata/aconfig/boot/"; + + for (parsed_flag flag : flags) { + + String container = flag.container; + String packageName = flag.package_; + String flagName = flag.name; + String fullFlagName = packageName + "/" + flagName; + + MappedByteBuffer packageMap = + AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".package.map"); + MappedByteBuffer flagMap = + AconfigStorageReadAPI.mapStorageFile(mapPath + container + ".flag.map"); + MappedByteBuffer flagValList = + AconfigStorageReadAPI.mapStorageFile(flagsPath + container + ".val"); + + PackageReadContext packageContext = + AconfigStorageReadAPI.getPackageReadContext(packageMap, packageName); + + FlagReadContext flagContext = + AconfigStorageReadAPI.getFlagReadContext( + flagMap, packageContext.mPackageId, flagName); + + boolean rVal = + AconfigStorageReadAPI.getBooleanFlagValue( + flagValList, + packageContext.mBooleanStartIndex + flagContext.mFlagIndex); + + StorageInternalReader reader = new StorageInternalReader(container, packageName); + boolean jVal = reader.getBooleanFlagValue(flagContext.mFlagIndex); + + long rHash = AconfigStorageReadAPI.hash(packageName); + long jHash = SipHasher13.hash(packageName.getBytes()); + assertEquals(rVal, jVal); + } + } +} diff --git a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp index 11b3824e82..3d4e9ad218 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp +++ b/tools/aconfig/aconfig_storage_read_api/tests/java/Android.bp @@ -2,6 +2,8 @@ android_test { name: "aconfig_storage_read_api.test.java", srcs: ["./**/*.java"], static_libs: [ + "aconfig_device_paths_java", + "aconfig_storage_file_java", "aconfig_storage_reader_java", "androidx.test.rules", "libaconfig_storage_read_api_java", diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp index 6d29045efe..753764369d 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp +++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.cpp @@ -45,7 +45,8 @@ class AconfigStorageTest : public ::testing::Test { } void SetUp() override { - auto const test_dir = android::base::GetExecutableDirectory(); + auto const test_base_dir = android::base::GetExecutableDirectory(); + auto const test_dir = test_base_dir + "/data/v1"; storage_dir = std::string(root_dir.path); auto maps_dir = storage_dir + "/maps"; auto boot_dir = storage_dir + "/boot"; diff --git a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs index afc44d4d70..0d943f8981 100644 --- a/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs +++ b/tools/aconfig/aconfig_storage_read_api/tests/storage_read_api_test.rs @@ -26,10 +26,10 @@ mod aconfig_storage_rust_test { let flag_map = storage_dir.clone() + "/maps/mockup.flag.map"; let flag_val = storage_dir.clone() + "/boot/mockup.val"; let flag_info = storage_dir.clone() + "/boot/mockup.info"; - fs::copy("./package.map", package_map).unwrap(); - fs::copy("./flag.map", flag_map).unwrap(); - fs::copy("./flag.val", flag_val).unwrap(); - fs::copy("./flag.info", flag_info).unwrap(); + fs::copy("./data/v1/package.map", package_map).unwrap(); + fs::copy("./data/v1/flag.map", flag_map).unwrap(); + fs::copy("./data/v1/flag.val", flag_val).unwrap(); + fs::copy("./data/v1/flag.info", flag_info).unwrap(); storage_dir } @@ -200,9 +200,9 @@ mod aconfig_storage_rust_test { #[test] fn test_storage_version_query() { - assert_eq!(get_storage_file_version("./package.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./flag.map").unwrap(), 1); - assert_eq!(get_storage_file_version("./flag.val").unwrap(), 1); - assert_eq!(get_storage_file_version("./flag.info").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/package.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/flag.map").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/flag.val").unwrap(), 1); + assert_eq!(get_storage_file_version("./data/v1/flag.info").unwrap(), 1); } } diff --git a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp index 7b435746da..03a8fa284a 100644 --- a/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp +++ b/tools/aconfig/aconfig_storage_write_api/aconfig_storage_write_api.cpp @@ -100,18 +100,4 @@ android::base::Result<void> set_flag_has_local_override( return {}; } -android::base::Result<void> create_flag_info( - std::string const& package_map, - std::string const& flag_map, - std::string const& flag_info_out) { - auto creation_cxx = create_flag_info_cxx( - rust::Str(package_map.c_str()), - rust::Str(flag_map.c_str()), - rust::Str(flag_info_out.c_str())); - if (creation_cxx.success) { - return {}; - } else { - return android::base::Error() << creation_cxx.error_message; - } -} } // namespace aconfig_storage diff --git a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp index 0bba7ffcfc..50a51889b1 100644 --- a/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp +++ b/tools/aconfig/aconfig_storage_write_api/include/aconfig_storage/aconfig_storage_write_api.hpp @@ -36,13 +36,4 @@ android::base::Result<void> set_flag_has_local_override( uint32_t offset, bool value); -/// Create flag info file based on package and flag map -/// \input package_map: package map file -/// \input flag_map: flag map file -/// \input flag_info_out: flag info file to be created -android::base::Result<void> create_flag_info( - std::string const& package_map, - std::string const& flag_map, - std::string const& flag_info_out); - } // namespace aconfig_storage diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs index 7e6071340c..5640922f57 100644 --- a/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs +++ b/tools/aconfig/aconfig_storage_write_api/src/flag_info_update.rs @@ -18,7 +18,7 @@ use aconfig_storage_file::{ read_u8_from_bytes, AconfigStorageError, FlagInfoBit, FlagInfoHeader, FlagValueType, - FILE_VERSION, + MAX_SUPPORTED_FILE_VERSION, }; use anyhow::anyhow; @@ -28,11 +28,11 @@ fn get_flag_info_offset( flag_index: u32, ) -> Result<usize, AconfigStorageError> { let interpreted_header = FlagInfoHeader::from_bytes(buf)?; - if interpreted_header.version > FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot write to storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } diff --git a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs index dd15c996a6..06a9b15241 100644 --- a/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs +++ b/tools/aconfig/aconfig_storage_write_api/src/flag_value_update.rs @@ -16,7 +16,7 @@ //! flag value update module defines the flag value file write to mapped bytes -use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, FILE_VERSION}; +use aconfig_storage_file::{AconfigStorageError, FlagValueHeader, MAX_SUPPORTED_FILE_VERSION}; use anyhow::anyhow; /// Set flag value @@ -26,11 +26,11 @@ pub fn update_boolean_flag_value( flag_value: bool, ) -> Result<usize, AconfigStorageError> { let interpreted_header = FlagValueHeader::from_bytes(buf)?; - if interpreted_header.version > FILE_VERSION { + if interpreted_header.version > MAX_SUPPORTED_FILE_VERSION { return Err(AconfigStorageError::HigherStorageFileVersion(anyhow!( "Cannot write to storage file with a higher version of {} with lib version {}", interpreted_header.version, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION ))); } @@ -84,15 +84,15 @@ mod tests { // this test point locks down query error when file has a higher version fn test_higher_version_storage_file() { let mut value_list = create_test_flag_value_list(); - value_list.header.version = FILE_VERSION + 1; + value_list.header.version = MAX_SUPPORTED_FILE_VERSION + 1; let mut flag_value = value_list.into_bytes(); let error = update_boolean_flag_value(&mut flag_value[..], 4, true).unwrap_err(); assert_eq!( format!("{:?}", error), format!( "HigherStorageFileVersion(Cannot write to storage file with a higher version of {} with lib version {})", - FILE_VERSION + 1, - FILE_VERSION + MAX_SUPPORTED_FILE_VERSION + 1, + MAX_SUPPORTED_FILE_VERSION ) ); } diff --git a/tools/aconfig/aconfig_storage_write_api/src/lib.rs b/tools/aconfig/aconfig_storage_write_api/src/lib.rs index 0396a63d4e..09bb41f54f 100644 --- a/tools/aconfig/aconfig_storage_write_api/src/lib.rs +++ b/tools/aconfig/aconfig_storage_write_api/src/lib.rs @@ -24,15 +24,10 @@ pub mod mapped_file; #[cfg(test)] mod test_utils; -use aconfig_storage_file::{ - AconfigStorageError, FlagInfoHeader, FlagInfoList, FlagInfoNode, FlagTable, FlagValueType, - PackageTable, StorageFileType, StoredFlagType, FILE_VERSION, -}; +use aconfig_storage_file::{AconfigStorageError, FlagValueType}; use anyhow::anyhow; use memmap2::MmapMut; -use std::fs::File; -use std::io::{Read, Write}; /// Get read write mapped storage files. /// @@ -104,86 +99,6 @@ pub fn set_flag_has_local_override( }) } -/// Read in storage file as bytes -fn read_file_to_bytes(file_path: &str) -> Result<Vec<u8>, AconfigStorageError> { - let mut file = File::open(file_path).map_err(|errmsg| { - AconfigStorageError::FileReadFail(anyhow!("Failed to open file {}: {}", file_path, errmsg)) - })?; - let mut buffer = Vec::new(); - file.read_to_end(&mut buffer).map_err(|errmsg| { - AconfigStorageError::FileReadFail(anyhow!( - "Failed to read bytes from file {}: {}", - file_path, - errmsg - )) - })?; - Ok(buffer) -} - -/// Create flag info file given package map file and flag map file -/// \input package_map: package map file -/// \input flag_map: flag map file -/// \output flag_info_out: created flag info file -pub fn create_flag_info( - package_map: &str, - flag_map: &str, - flag_info_out: &str, -) -> Result<(), AconfigStorageError> { - let package_table = PackageTable::from_bytes(&read_file_to_bytes(package_map)?)?; - let flag_table = FlagTable::from_bytes(&read_file_to_bytes(flag_map)?)?; - - if package_table.header.container != flag_table.header.container { - return Err(AconfigStorageError::FileCreationFail(anyhow!( - "container for package map {} and flag map {} does not match", - package_table.header.container, - flag_table.header.container, - ))); - } - - let mut package_start_index = vec![0; package_table.header.num_packages as usize]; - for node in package_table.nodes.iter() { - package_start_index[node.package_id as usize] = node.boolean_start_index; - } - - let mut is_flag_rw = vec![false; flag_table.header.num_flags as usize]; - for node in flag_table.nodes.iter() { - let flag_index = package_start_index[node.package_id as usize] + node.flag_index as u32; - is_flag_rw[flag_index as usize] = node.flag_type == StoredFlagType::ReadWriteBoolean; - } - - let mut list = FlagInfoList { - header: FlagInfoHeader { - version: FILE_VERSION, - container: flag_table.header.container, - file_type: StorageFileType::FlagInfo as u8, - file_size: 0, - num_flags: flag_table.header.num_flags, - boolean_flag_offset: 0, - }, - nodes: is_flag_rw.iter().map(|&rw| FlagInfoNode::create(rw)).collect(), - }; - - list.header.boolean_flag_offset = list.header.into_bytes().len() as u32; - list.header.file_size = list.into_bytes().len() as u32; - - let mut file = File::create(flag_info_out).map_err(|errmsg| { - AconfigStorageError::FileCreationFail(anyhow!( - "fail to create file {}: {}", - flag_info_out, - errmsg - )) - })?; - file.write_all(&list.into_bytes()).map_err(|errmsg| { - AconfigStorageError::FileCreationFail(anyhow!( - "fail to write to file {}: {}", - flag_info_out, - errmsg - )) - })?; - - Ok(()) -} - // *************************************** // // CC INTERLOP // *************************************** // @@ -212,12 +127,6 @@ mod ffi { pub error_message: String, } - // Flag info file creation return for cc interlop - pub struct FlagInfoCreationCXX { - pub success: bool, - pub error_message: String, - } - // Rust export to c++ extern "Rust" { pub fn update_boolean_flag_value_cxx( @@ -239,12 +148,6 @@ mod ffi { offset: u32, value: bool, ) -> FlagHasLocalOverrideUpdateCXX; - - pub fn create_flag_info_cxx( - package_map: &str, - flag_map: &str, - flag_info_out: &str, - ) -> FlagInfoCreationCXX; } } @@ -329,34 +232,15 @@ pub(crate) fn update_flag_has_local_override_cxx( } } -/// Create flag info file cc interlop -pub(crate) fn create_flag_info_cxx( - package_map: &str, - flag_map: &str, - flag_info_out: &str, -) -> ffi::FlagInfoCreationCXX { - match create_flag_info(package_map, flag_map, flag_info_out) { - Ok(()) => ffi::FlagInfoCreationCXX { success: true, error_message: String::from("") }, - Err(errmsg) => { - ffi::FlagInfoCreationCXX { success: false, error_message: format!("{:?}", errmsg) } - } - } -} - #[cfg(test)] mod tests { use super::*; use crate::test_utils::copy_to_temp_file; - use aconfig_storage_file::test_utils::{ - create_test_flag_info_list, create_test_flag_table, create_test_package_table, - write_bytes_to_temp_file, - }; use aconfig_storage_file::FlagInfoBit; use aconfig_storage_read_api::flag_info_query::find_flag_attribute; use aconfig_storage_read_api::flag_value_query::find_boolean_flag_value; use std::fs::File; use std::io::Read; - use tempfile::NamedTempFile; fn get_boolean_flag_value_at_offset(file: &str, offset: u32) -> bool { let mut f = File::open(&file).unwrap(); @@ -439,31 +323,4 @@ mod tests { } } } - - fn create_empty_temp_file() -> Result<NamedTempFile, AconfigStorageError> { - let file = NamedTempFile::new().map_err(|_| { - AconfigStorageError::FileCreationFail(anyhow!("Failed to create temp file")) - })?; - Ok(file) - } - - #[test] - // this test point locks down the flag info creation - fn test_create_flag_info() { - let package_table = - write_bytes_to_temp_file(&create_test_package_table().into_bytes()).unwrap(); - let flag_table = write_bytes_to_temp_file(&create_test_flag_table().into_bytes()).unwrap(); - let flag_info = create_empty_temp_file().unwrap(); - - let package_table_path = package_table.path().display().to_string(); - let flag_table_path = flag_table.path().display().to_string(); - let flag_info_path = flag_info.path().display().to_string(); - - assert!(create_flag_info(&package_table_path, &flag_table_path, &flag_info_path).is_ok()); - - let flag_info = - FlagInfoList::from_bytes(&read_file_to_bytes(&flag_info_path).unwrap()).unwrap(); - let expected_flag_info = create_test_flag_info_list(); - assert_eq!(flag_info, expected_flag_info); - } } diff --git a/tools/aconfig/aflags/Android.bp b/tools/aconfig/aflags/Android.bp index 2a023792b6..a7aceeebad 100644 --- a/tools/aconfig/aflags/Android.bp +++ b/tools/aconfig/aflags/Android.bp @@ -10,7 +10,9 @@ rust_defaults { srcs: ["src/main.rs"], rustlibs: [ "libaconfig_device_paths", + "libaconfig_flags", "libaconfig_protos", + "libaconfigd_protos_rust", "libaconfig_storage_read_api", "libaconfig_storage_file", "libanyhow", @@ -18,11 +20,16 @@ rust_defaults { "libnix", "libprotobuf", "libregex", + // TODO: b/371021174 remove this fake dependency once we find a proper strategy to + // deal with test aconfig libs are not present in storage because they are never used + // by the actual build + "libaconfig_test_rust_library", ], } rust_binary { name: "aflags", + host_supported: true, defaults: ["aflags.defaults"], } diff --git a/tools/aconfig/aflags/Cargo.toml b/tools/aconfig/aflags/Cargo.toml index eeae295316..d31e232975 100644 --- a/tools/aconfig/aflags/Cargo.toml +++ b/tools/aconfig/aflags/Cargo.toml @@ -9,8 +9,10 @@ paste = "1.0.11" protobuf = "3.2.0" regex = "1.10.3" aconfig_protos = { path = "../aconfig_protos" } +aconfigd_protos = { version = "0.1.0", path = "../../../../../packages/modules/ConfigInfrastructure/aconfigd/proto"} nix = { version = "0.28.0", features = ["user"] } aconfig_storage_file = { version = "0.1.0", path = "../aconfig_storage_file" } aconfig_storage_read_api = { version = "0.1.0", path = "../aconfig_storage_read_api" } clap = {version = "4.5.2" } aconfig_device_paths = { version = "0.1.0", path = "../aconfig_device_paths" } +aconfig_flags = { version = "0.1.0", path = "../aconfig_flags" } diff --git a/tools/aconfig/aflags/src/aconfig_storage_source.rs b/tools/aconfig/aflags/src/aconfig_storage_source.rs index 04140c7fa3..68edf7d3ac 100644 --- a/tools/aconfig/aflags/src/aconfig_storage_source.rs +++ b/tools/aconfig/aflags/src/aconfig_storage_source.rs @@ -1,52 +1,141 @@ -use crate::{Flag, FlagPermission, FlagSource, FlagValue, ValuePickedFrom}; -use anyhow::{anyhow, Result}; - -use std::fs::File; -use std::io::Read; +use crate::load_protos; +use crate::{Flag, FlagSource}; +use crate::{FlagPermission, FlagValue, ValuePickedFrom}; +use aconfigd_protos::{ + ProtoFlagQueryReturnMessage, ProtoListStorageMessage, ProtoListStorageMessageMsg, + ProtoStorageRequestMessage, ProtoStorageRequestMessageMsg, ProtoStorageRequestMessages, + ProtoStorageReturnMessage, ProtoStorageReturnMessageMsg, ProtoStorageReturnMessages, +}; +use anyhow::anyhow; +use anyhow::Result; +use protobuf::Message; +use protobuf::SpecialFields; +use std::collections::HashMap; +use std::io::{Read, Write}; +use std::net::Shutdown; +use std::os::unix::net::UnixStream; pub struct AconfigStorageSource {} -use aconfig_storage_file::protos::ProtoStorageFiles; +fn load_flag_to_container() -> Result<HashMap<String, String>> { + Ok(load_protos::load()?.into_iter().map(|p| (p.qualified_name(), p.container)).collect()) +} -static STORAGE_INFO_FILE_PATH: &str = "/metadata/aconfig/persistent_storage_file_records.pb"; +fn convert(msg: ProtoFlagQueryReturnMessage, containers: &HashMap<String, String>) -> Result<Flag> { + let (value, value_picked_from) = match ( + &msg.boot_flag_value, + msg.default_flag_value, + msg.local_flag_value, + msg.has_local_override, + ) { + (_, _, Some(local), Some(has_local)) if has_local => { + (FlagValue::try_from(local.as_str())?, ValuePickedFrom::Local) + } + (Some(boot), Some(default), _, _) => { + let value = FlagValue::try_from(boot.as_str())?; + if *boot == default { + (value, ValuePickedFrom::Default) + } else { + (value, ValuePickedFrom::Server) + } + } + _ => return Err(anyhow!("missing override")), + }; -impl FlagSource for AconfigStorageSource { - fn list_flags() -> Result<Vec<Flag>> { - let mut result = Vec::new(); - - let mut file = File::open(STORAGE_INFO_FILE_PATH)?; - let mut bytes = Vec::new(); - file.read_to_end(&mut bytes)?; - let storage_file_info: ProtoStorageFiles = protobuf::Message::parse_from_bytes(&bytes)?; - - for file_info in storage_file_info.files { - let package_map = - file_info.package_map.ok_or(anyhow!("storage file is missing package map"))?; - let flag_map = file_info.flag_map.ok_or(anyhow!("storage file is missing flag map"))?; - let flag_val = file_info.flag_val.ok_or(anyhow!("storage file is missing flag val"))?; - let container = - file_info.container.ok_or(anyhow!("storage file is missing container"))?; - - for listed_flag in aconfig_storage_file::list_flags(&package_map, &flag_map, &flag_val)? - { - result.push(Flag { - name: listed_flag.flag_name, - package: listed_flag.package_name, - value: FlagValue::try_from(listed_flag.flag_value.as_str())?, - container: container.to_string(), - - // TODO(b/324436145): delete namespace field once DeviceConfig isn't in CLI. - namespace: "-".to_string(), - - // TODO(b/324436145): Populate with real values once API is available. - staged_value: None, - permission: FlagPermission::ReadOnly, - value_picked_from: ValuePickedFrom::Default, - }); + let staged_value = match (msg.boot_flag_value, msg.server_flag_value, msg.has_server_override) { + (Some(boot), Some(server), _) if boot == server => None, + (Some(boot), Some(server), Some(has_server)) if boot != server && has_server => { + Some(FlagValue::try_from(server.as_str())?) + } + _ => None, + }; + + let permission = match msg.is_readwrite { + Some(is_readwrite) => { + if is_readwrite { + FlagPermission::ReadWrite + } else { + FlagPermission::ReadOnly } } + None => return Err(anyhow!("missing permission")), + }; + + let name = msg.flag_name.ok_or(anyhow!("missing flag name"))?; + let package = msg.package_name.ok_or(anyhow!("missing package name"))?; + let qualified_name = format!("{package}.{name}"); + Ok(Flag { + name, + package, + value, + permission, + value_picked_from, + staged_value, + container: containers + .get(&qualified_name) + .cloned() + .unwrap_or_else(|| "<no container>".to_string()) + .to_string(), + // TODO: remove once DeviceConfig is not in the CLI. + namespace: "-".to_string(), + }) +} - Ok(result) +fn read_from_socket() -> Result<Vec<ProtoFlagQueryReturnMessage>> { + let messages = ProtoStorageRequestMessages { + msgs: vec![ProtoStorageRequestMessage { + msg: Some(ProtoStorageRequestMessageMsg::ListStorageMessage(ProtoListStorageMessage { + msg: Some(ProtoListStorageMessageMsg::All(true)), + special_fields: SpecialFields::new(), + })), + special_fields: SpecialFields::new(), + }], + special_fields: SpecialFields::new(), + }; + + let mut socket = UnixStream::connect("/dev/socket/aconfigd")?; + + let message_buffer = messages.write_to_bytes()?; + let mut message_length_buffer: [u8; 4] = [0; 4]; + let message_size = &message_buffer.len(); + message_length_buffer[0] = (message_size >> 24) as u8; + message_length_buffer[1] = (message_size >> 16) as u8; + message_length_buffer[2] = (message_size >> 8) as u8; + message_length_buffer[3] = *message_size as u8; + socket.write_all(&message_length_buffer)?; + socket.write_all(&message_buffer)?; + socket.shutdown(Shutdown::Write)?; + + let mut response_length_buffer: [u8; 4] = [0; 4]; + socket.read_exact(&mut response_length_buffer)?; + let response_length = u32::from_be_bytes(response_length_buffer) as usize; + let mut response_buffer = vec![0; response_length]; + socket.read_exact(&mut response_buffer)?; + + let response: ProtoStorageReturnMessages = + protobuf::Message::parse_from_bytes(&response_buffer)?; + + match response.msgs.as_slice() { + [ProtoStorageReturnMessage { + msg: Some(ProtoStorageReturnMessageMsg::ListStorageMessage(list_storage_message)), + .. + }] => Ok(list_storage_message.flags.clone()), + _ => Err(anyhow!("unexpected response from aconfigd")), + } +} + +impl FlagSource for AconfigStorageSource { + fn list_flags() -> Result<Vec<Flag>> { + let containers = load_flag_to_container()?; + read_from_socket() + .map(|query_messages| { + query_messages + .iter() + .map(|message| convert(message.clone(), &containers)) + .collect::<Vec<_>>() + })? + .into_iter() + .collect() } fn override_flag(_namespace: &str, _qualified_name: &str, _value: &str) -> Result<()> { diff --git a/tools/aconfig/aflags/src/main.rs b/tools/aconfig/aflags/src/main.rs index 810f2e31e6..07b7243ab4 100644 --- a/tools/aconfig/aflags/src/main.rs +++ b/tools/aconfig/aflags/src/main.rs @@ -50,6 +50,7 @@ impl std::fmt::Display for FlagPermission { enum ValuePickedFrom { Default, Server, + Local, } impl std::fmt::Display for ValuePickedFrom { @@ -60,6 +61,7 @@ impl std::fmt::Display for ValuePickedFrom { match &self { Self::Default => "default", Self::Server => "server", + Self::Local => "local", } ) } @@ -114,9 +116,10 @@ impl Flag { } fn display_staged_value(&self) -> String { - match self.staged_value { - Some(v) => format!("(->{})", v), - None => "-".to_string(), + match (&self.permission, self.staged_value) { + (FlagPermission::ReadOnly, _) => "-".to_string(), + (FlagPermission::ReadWrite, None) => "-".to_string(), + (FlagPermission::ReadWrite, Some(v)) => format!("(->{})", v), } } } @@ -162,10 +165,6 @@ struct Cli { enum Command { /// List all aconfig flags on this device. List { - /// Read from the new flag storage. - #[clap(long)] - use_new_storage: bool, - /// Optionally filter by container name. #[clap(short = 'c', long = "container")] container: Option<String>, @@ -182,6 +181,9 @@ enum Command { /// <package>.<flag_name> qualified_name: String, }, + + /// Display which flag storage backs aconfig flags. + WhichBacking, } struct PaddingInfo { @@ -280,19 +282,31 @@ fn list(source_type: FlagSourceType, container: Option<String>) -> Result<String Ok(result) } +fn display_which_backing() -> String { + if aconfig_flags::auto_generated::enable_only_new_storage() { + "aconfig_storage".to_string() + } else { + "device_config".to_string() + } +} + fn main() -> Result<()> { ensure!(nix::unistd::Uid::current().is_root(), "must be root"); let cli = Cli::parse(); let output = match cli.command { - Command::List { use_new_storage: true, container } => { - list(FlagSourceType::AconfigStorage, container).map(Some) - } - Command::List { use_new_storage: false, container } => { - list(FlagSourceType::DeviceConfig, container).map(Some) + Command::List { container } => { + if aconfig_flags::auto_generated::enable_only_new_storage() { + list(FlagSourceType::AconfigStorage, container) + .map_err(|err| anyhow!("storage may not be enabled: {err}")) + .map(Some) + } else { + list(FlagSourceType::DeviceConfig, container).map(Some) + } } Command::Enable { qualified_name } => set_flag(&qualified_name, "true").map(|_| None), Command::Disable { qualified_name } => set_flag(&qualified_name, "false").map(|_| None), + Command::WhichBacking => Ok(Some(display_which_backing())), }; match output { Ok(Some(text)) => println!("{text}"), diff --git a/tools/aconfig/fake_device_config/Android.bp b/tools/aconfig/fake_device_config/Android.bp index 4566bf985a..1f17e6b89f 100644 --- a/tools/aconfig/fake_device_config/Android.bp +++ b/tools/aconfig/fake_device_config/Android.bp @@ -13,10 +13,22 @@ // limitations under the License. java_library { - name: "fake_device_config", - srcs: ["src/**/*.java"], - sdk_version: "none", - system_modules: "core-all-system-modules", - host_supported: true, + name: "fake_device_config", + srcs: [ + "src/**/*.java", + ], + sdk_version: "none", + system_modules: "core-all-system-modules", + host_supported: true, + is_stubs_module: true, } +java_library { + name: "strict_mode_stub", + srcs: [ + "src/android/os/StrictMode.java", + ], + sdk_version: "core_current", + host_supported: true, + is_stubs_module: true, +} diff --git a/tools/aconfig/fake_device_config/src/android/os/StrictMode.java b/tools/aconfig/fake_device_config/src/android/os/StrictMode.java new file mode 100644 index 0000000000..641625206c --- /dev/null +++ b/tools/aconfig/fake_device_config/src/android/os/StrictMode.java @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.os; + +public class StrictMode { + public static ThreadPolicy allowThreadDiskReads() { + throw new UnsupportedOperationException("Stub!"); + } + + public static void setThreadPolicy(final ThreadPolicy policy) { + throw new UnsupportedOperationException("Stub!"); + } + + public static final class ThreadPolicy {} +} diff --git a/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java b/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java new file mode 100644 index 0000000000..2f01b8c7e6 --- /dev/null +++ b/tools/aconfig/fake_device_config/src/android/provider/AconfigPackage.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2024 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.provider; + +/* + * This class allows generated aconfig code to compile independently of the framework. + */ +public class AconfigPackage { + + /** Flag value is true */ + public static final int FLAG_BOOLEAN_VALUE_TRUE = 1; + + /** Flag value is false */ + public static final int FLAG_BOOLEAN_VALUE_FALSE = 0; + + /** Flag value doesn't exist */ + public static final int FLAG_BOOLEAN_VALUE_NOT_EXIST = 2; + + public static int getBooleanFlagValue(String packageName, String flagName) { + return 0; + } + + public AconfigPackage(String packageName) {} + + public int getBooleanFlagValue(String flagName) { + return 0; + } +}
\ No newline at end of file diff --git a/tools/aconfig/fake_device_config/src/android/util/Log.java b/tools/aconfig/fake_device_config/src/android/util/Log.java new file mode 100644 index 0000000000..79de68060e --- /dev/null +++ b/tools/aconfig/fake_device_config/src/android/util/Log.java @@ -0,0 +1,19 @@ +package android.util; + +public final class Log { + public static int i(String tag, String msg) { + return 0; + } + + public static int w(String tag, String msg) { + return 0; + } + + public static int e(String tag, String msg) { + return 0; + } + + public static int e(String tag, String msg, Throwable tr) { + return 0; + } +} diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py index 8ee599a1ec..d54c4121e4 100755 --- a/tools/auto_gen_test_config.py +++ b/tools/auto_gen_test_config.py @@ -34,6 +34,7 @@ PLACEHOLDER_MODULE = '{MODULE}' PLACEHOLDER_PACKAGE = '{PACKAGE}' PLACEHOLDER_RUNNER = '{RUNNER}' PLACEHOLDER_TEST_TYPE = '{TEST_TYPE}' +PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS = '{EXTRA_TEST_RUNNER_CONFIGS}' def main(argv): @@ -59,6 +60,7 @@ def main(argv): "instrumentation_test_config_template", help="Path to the instrumentation test config template.") parser.add_argument("--extra-configs", default="") + parser.add_argument("--extra-test-runner-configs", default="") args = parser.parse_args(argv) target_config = args.target_config @@ -66,6 +68,7 @@ def main(argv): empty_config = args.empty_config instrumentation_test_config_template = args.instrumentation_test_config_template extra_configs = '\n'.join(args.extra_configs.split('\\n')) + extra_test_runner_configs = '\n'.join(args.extra_test_runner_configs.split('\\n')) module = os.path.splitext(os.path.basename(target_config))[0] @@ -131,6 +134,7 @@ def main(argv): config = config.replace(PLACEHOLDER_PACKAGE, package) config = config.replace(PLACEHOLDER_TEST_TYPE, test_type) config = config.replace(PLACEHOLDER_EXTRA_CONFIGS, extra_configs) + config = config.replace(PLACEHOLDER_EXTRA_TEST_RUNNER_CONFIGS, extra_test_runner_configs) config = config.replace(PLACEHOLDER_RUNNER, runner) with open(target_config, 'w') as config_file: config_file.write(config) diff --git a/tools/edit_monitor/Android.bp b/tools/edit_monitor/Android.bp new file mode 100644 index 0000000000..34978214a9 --- /dev/null +++ b/tools/edit_monitor/Android.bp @@ -0,0 +1,66 @@ +// Copyright 2024 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Set of error prone rules to ensure code quality +// PackageLocation check requires the androidCompatible=false otherwise it does not do anything. + +package { + default_applicable_licenses: ["Android-Apache-2.0"], + default_team: "trendy_team_adte", +} + +python_library_host { + name: "edit_event_proto", + srcs: [ + "proto/edit_event.proto", + ], + proto: { + canonical_path_from_root: false, + }, +} + +python_library_host { + name: "edit_monitor_lib", + pkg_path: "edit_monitor", + srcs: [ + "daemon_manager.py", + ], +} + +python_test_host { + name: "daemon_manager_test", + main: "daemon_manager_test.py", + pkg_path: "edit_monitor", + srcs: [ + "daemon_manager_test.py", + ], + libs: [ + "edit_monitor_lib", + ], + test_options: { + unit_test: true, + }, +} + +python_binary_host { + name: "edit_monitor", + pkg_path: "edit_monitor", + srcs: [ + "main.py", + ], + libs: [ + "edit_monitor_lib", + ], + main: "main.py", +} diff --git a/tools/edit_monitor/OWNERS b/tools/edit_monitor/OWNERS new file mode 100644 index 0000000000..8f0f3646dd --- /dev/null +++ b/tools/edit_monitor/OWNERS @@ -0,0 +1 @@ +include platform/tools/asuite:/OWNERS_ADTE_TEAM
\ No newline at end of file diff --git a/tools/edit_monitor/daemon_manager.py b/tools/edit_monitor/daemon_manager.py new file mode 100644 index 0000000000..445d849a49 --- /dev/null +++ b/tools/edit_monitor/daemon_manager.py @@ -0,0 +1,349 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import hashlib +import logging +import multiprocessing +import os +import pathlib +import signal +import subprocess +import sys +import tempfile +import time + + +DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS = 1 +DEFAULT_MONITOR_INTERVAL_SECONDS = 5 +DEFAULT_MEMORY_USAGE_THRESHOLD = 2000 +DEFAULT_CPU_USAGE_THRESHOLD = 200 +DEFAULT_REBOOT_TIMEOUT_SECONDS = 60 * 60 * 24 +BLOCK_SIGN_FILE = "edit_monitor_block_sign" + + +def default_daemon_target(): + """Place holder for the default daemon target.""" + print("default daemon target") + + +class DaemonManager: + """Class to manage and monitor the daemon run as a subprocess.""" + + def __init__( + self, + binary_path: str, + daemon_target: callable = default_daemon_target, + daemon_args: tuple = (), + ): + self.binary_path = binary_path + self.daemon_target = daemon_target + self.daemon_args = daemon_args + + self.pid = os.getpid() + self.daemon_process = None + + self.max_memory_usage = 0 + self.max_cpu_usage = 0 + + pid_file_dir = pathlib.Path(tempfile.gettempdir()).joinpath("edit_monitor") + pid_file_dir.mkdir(parents=True, exist_ok=True) + self.pid_file_path = self._get_pid_file_path(pid_file_dir) + self.block_sign = pathlib.Path(tempfile.gettempdir()).joinpath( + BLOCK_SIGN_FILE + ) + + def start(self): + """Writes the pidfile and starts the daemon proces.""" + if self.block_sign.exists(): + logging.warning("Block sign found, exiting...") + return + + self._stop_any_existing_instance() + self._write_pid_to_pidfile() + self._start_daemon_process() + + def monitor_daemon( + self, + interval: int = DEFAULT_MONITOR_INTERVAL_SECONDS, + memory_threshold: float = DEFAULT_MEMORY_USAGE_THRESHOLD, + cpu_threshold: float = DEFAULT_CPU_USAGE_THRESHOLD, + reboot_timeout: int = DEFAULT_REBOOT_TIMEOUT_SECONDS, + ): + """Monits the daemon process status. + + Periodically check the CPU/Memory usage of the daemon process as long as the + process is still running and kill the process if the resource usage is above + given thresholds. + """ + if not self.daemon_process: + return + + logging.info("start monitoring daemon process %d.", self.daemon_process.pid) + reboot_time = time.time() + reboot_timeout + while self.daemon_process.is_alive(): + if time.time() > reboot_time: + self.reboot() + try: + memory_usage = self._get_process_memory_percent(self.daemon_process.pid) + self.max_memory_usage = max(self.max_memory_usage, memory_usage) + + cpu_usage = self._get_process_cpu_percent(self.daemon_process.pid) + self.max_cpu_usage = max(self.max_cpu_usage, cpu_usage) + + time.sleep(interval) + except Exception as e: + # Logging the error and continue. + logging.warning("Failed to monitor daemon process with error: %s", e) + + if ( + self.max_memory_usage >= memory_threshold + or self.max_cpu_usage >= cpu_threshold + ): + logging.error( + "Daemon process is consuming too much resource, killing..." + ), + self._terminate_process(self.daemon_process.pid) + + logging.info( + "Daemon process %d terminated. Max memory usage: %f, Max cpu" + " usage: %f.", + self.daemon_process.pid, + self.max_memory_usage, + self.max_cpu_usage, + ) + + def stop(self): + """Stops the daemon process and removes the pidfile.""" + + logging.debug("in daemon manager cleanup.") + try: + if self.daemon_process and self.daemon_process.is_alive(): + self._terminate_process(self.daemon_process.pid) + self._remove_pidfile() + logging.debug("Successfully stopped daemon manager.") + except Exception as e: + logging.exception("Failed to stop daemon manager with error %s", e) + + def reboot(self): + """Reboots the current process. + + Stops the current daemon manager and reboots the entire process based on + the binary file. Exits directly If the binary file no longer exists. + """ + logging.debug("Rebooting process based on binary %s.", self.binary_path) + + # Stop the current daemon manager first. + self.stop() + + # If the binary no longer exists, exit directly. + if not os.path.exists(self.binary_path): + logging.info("binary %s no longer exists, exiting.", self.binary_path) + sys.exit(0) + + try: + os.execv(self.binary_path, sys.argv) + except OSError as e: + logging.exception("Failed to reboot process with error: %s.", e) + sys.exit(1) # Indicate an error occurred + + def cleanup(self): + """Wipes out all edit monitor instances in the system. + + Stops all the existing edit monitor instances and place a block sign + to prevent any edit monitor process to start. This method is only used + in emergency case when there's something goes wrong with the edit monitor + that requires immediate cleanup to prevent damanger to the system. + """ + logging.debug("Start cleaning up all existing instances.") + + try: + # First places a block sign to prevent any edit monitor process to start. + self.block_sign.touch() + except (FileNotFoundError, PermissionError, OSError): + logging.exception("Failed to place the block sign") + + # Finds and kills all the existing instances of edit monitor. + existing_instances_pids = self._find_all_instances_pids() + for pid in existing_instances_pids: + logging.info( + "Found existing edit monitor instance with pid %d, killing...", pid + ) + try: + self._terminate_process(pid) + except Exception: + logging.exception("Failed to terminate process %d", pid) + + def _stop_any_existing_instance(self): + if not self.pid_file_path.exists(): + logging.debug("No existing instances.") + return + + ex_pid = self._read_pid_from_pidfile() + + if ex_pid: + logging.info("Found another instance with pid %d.", ex_pid) + self._terminate_process(ex_pid) + self._remove_pidfile() + + def _read_pid_from_pidfile(self): + with open(self.pid_file_path, "r") as f: + return int(f.read().strip()) + + def _write_pid_to_pidfile(self): + """Creates a pidfile and writes the current pid to the file. + + Raise FileExistsError if the pidfile already exists. + """ + try: + # Use the 'x' mode to open the file for exclusive creation + with open(self.pid_file_path, "x") as f: + f.write(f"{self.pid}") + except FileExistsError as e: + # This could be caused due to race condition that a user is trying + # to start two edit monitors at the same time. Or because there is + # already an existing edit monitor running and we can not kill it + # for some reason. + logging.exception("pidfile %s already exists.", self.pid_file_path) + raise e + + def _start_daemon_process(self): + """Starts a subprocess to run the daemon.""" + p = multiprocessing.Process( + target=self.daemon_target, args=self.daemon_args + ) + p.start() + + logging.info("Start subprocess with PID %d", p.pid) + self.daemon_process = p + + def _terminate_process( + self, pid: int, timeout: int = DEFAULT_PROCESS_TERMINATION_TIMEOUT_SECONDS + ): + """Terminates a process with given pid. + + It first sends a SIGTERM to the process to allow it for proper + termination with a timeout. If the process is not terminated within + the timeout, kills it forcefully. + """ + try: + os.kill(pid, signal.SIGTERM) + if not self._wait_for_process_terminate(pid, timeout): + logging.warning( + "Process %d not terminated within timeout, try force kill", pid + ) + os.kill(pid, signal.SIGKILL) + except ProcessLookupError: + logging.info("Process with PID %d not found (already terminated)", pid) + + def _wait_for_process_terminate(self, pid: int, timeout: int) -> bool: + start_time = time.time() + + while time.time() < start_time + timeout: + if not self._is_process_alive(pid): + return True + time.sleep(1) + + logging.error("Process %d not terminated within %d seconds.", pid, timeout) + return False + + def _is_process_alive(self, pid: int) -> bool: + try: + output = subprocess.check_output( + ["ps", "-p", str(pid), "-o", "state="], text=True + ).strip() + state = output.split()[0] + return state != "Z" # Check if the state is not 'Z' (zombie) + except subprocess.CalledProcessError: + # Process not found (already dead). + return False + except (FileNotFoundError, OSError, ValueError) as e: + logging.warning( + "Unable to check the status for process %d with error: %s.", pid, e + ) + return True + + def _remove_pidfile(self): + try: + os.remove(self.pid_file_path) + except FileNotFoundError: + logging.info("pid file %s already removed.", self.pid_file_path) + + def _get_pid_file_path(self, pid_file_dir: pathlib.Path) -> pathlib.Path: + """Generates the path to store the pidfile. + + The file path should have the format of "/tmp/edit_monitor/xxxx.lock" + where xxxx is a hashed value based on the binary path that starts the + process. + """ + hash_object = hashlib.sha256() + hash_object.update(self.binary_path.encode("utf-8")) + pid_file_path = pid_file_dir.joinpath(hash_object.hexdigest() + ".lock") + logging.info("pid_file_path: %s", pid_file_path) + + return pid_file_path + + def _get_process_memory_percent(self, pid: int) -> float: + try: + with open(f"/proc/{pid}/stat", "r") as f: + stat_data = f.readline().split() + # RSS is the 24th field in /proc/[pid]/stat + rss_pages = int(stat_data[23]) + return rss_pages * 4 / 1024 # Covert to MB + except (FileNotFoundError, IndexError, ValueError, IOError) as e: + logging.exception("Failed to get memory usage.") + raise e + + def _get_process_cpu_percent(self, pid: int, interval: int = 1) -> float: + try: + total_start_time = self._get_total_cpu_time(pid) + with open("/proc/uptime", "r") as f: + uptime_start = float(f.readline().split()[0]) + + time.sleep(interval) + + total_end_time = self._get_total_cpu_time(pid) + with open("/proc/uptime", "r") as f: + uptime_end = float(f.readline().split()[0]) + + return ( + (total_end_time - total_start_time) + / (uptime_end - uptime_start) + * 100 + ) + except (FileNotFoundError, IndexError, ValueError, IOError) as e: + logging.exception("Failed to get CPU usage.") + raise e + + def _get_total_cpu_time(self, pid: int) -> float: + with open(f"/proc/{str(pid)}/stat", "r") as f: + stats = f.readline().split() + # utime is the 14th field in /proc/[pid]/stat measured in clock ticks. + utime = int(stats[13]) + # stime is the 15th field in /proc/[pid]/stat measured in clock ticks. + stime = int(stats[14]) + return (utime + stime) / os.sysconf(os.sysconf_names["SC_CLK_TCK"]) + + def _find_all_instances_pids(self) -> list[int]: + pids = [] + + for file in os.listdir(self.pid_file_path.parent): + if file.endswith(".lock"): + try: + with open(self.pid_file_path.parent.joinpath(file), "r") as f: + pids.append(int(f.read().strip())) + except (FileNotFoundError, IOError, ValueError, TypeError): + logging.exception("Failed to get pid from file path: %s", file) + + return pids
\ No newline at end of file diff --git a/tools/edit_monitor/daemon_manager_test.py b/tools/edit_monitor/daemon_manager_test.py new file mode 100644 index 0000000000..d62eade361 --- /dev/null +++ b/tools/edit_monitor/daemon_manager_test.py @@ -0,0 +1,372 @@ +# Copyright 2024, The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unittests for DaemonManager.""" + +import logging +import multiprocessing +import os +import pathlib +import signal +import subprocess +import sys +import tempfile +import time +import unittest +from unittest import mock +from edit_monitor import daemon_manager + + +TEST_BINARY_FILE = '/path/to/test_binary' +TEST_PID_FILE_PATH = ( + '587239c2d1050afdf54512e2d799f3b929f86b43575eb3c7b4bab105dd9bd25e.lock' +) + + +def simple_daemon(output_file): + with open(output_file, 'w') as f: + f.write('running daemon target') + + +def long_running_daemon(): + while True: + time.sleep(1) + + +def memory_consume_daemon_target(size_mb): + try: + size_bytes = size_mb * 1024 * 1024 + dummy_data = bytearray(size_bytes) + time.sleep(10) + except MemoryError: + print(f'Process failed to allocate {size_mb} MB of memory.') + + +def cpu_consume_daemon_target(target_usage_percent): + while True: + start_time = time.time() + while time.time() - start_time < target_usage_percent / 100: + pass # Busy loop to consume CPU + + # Sleep to reduce CPU usage + time.sleep(1 - target_usage_percent / 100) + + +class DaemonManagerTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + super().setUpClass() + # Configure to print logging to stdout. + logging.basicConfig(filename=None, level=logging.DEBUG) + console = logging.StreamHandler(sys.stdout) + logging.getLogger('').addHandler(console) + + def setUp(self): + super().setUp() + self.original_tempdir = tempfile.tempdir + self.working_dir = tempfile.TemporaryDirectory() + # Sets the tempdir under the working dir so any temp files created during + # tests will be cleaned. + tempfile.tempdir = self.working_dir.name + + def tearDown(self): + # Cleans up any child processes left by the tests. + self._cleanup_child_processes() + self.working_dir.cleanup() + # Restores tempdir. + tempfile.tempdir = self.original_tempdir + super().tearDown() + + def test_start_success_with_no_existing_instance(self): + self.assert_run_simple_daemon_success() + + def test_start_success_with_existing_instance_running(self): + # Create a running daemon subprocess + p = self._create_fake_deamon_process() + + self.assert_run_simple_daemon_success() + + def test_start_success_with_existing_instance_already_dead(self): + # Create a pidfile with pid that does not exist. + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f: + f.write('123456') + + self.assert_run_simple_daemon_success() + + def test_start_success_with_existing_instance_from_different_binary(self): + # First start an instance based on "some_binary_path" + existing_dm = daemon_manager.DaemonManager( + 'some_binary_path', + daemon_target=long_running_daemon, + ) + existing_dm.start() + + self.assert_run_simple_daemon_success() + existing_dm.stop() + + def test_start_return_directly_if_block_sign_exists(self): + # Creates the block sign. + pathlib.Path(self.working_dir.name).joinpath( + daemon_manager.BLOCK_SIGN_FILE + ).touch() + + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE) + dm.start() + # Verify no daemon process is started. + self.assertIsNone(dm.daemon_process) + + @mock.patch('os.kill') + def test_start_failed_to_kill_existing_instance(self, mock_kill): + mock_kill.side_effect = OSError('Unknown OSError') + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + with open(pid_file_path_dir.joinpath(TEST_PID_FILE_PATH), 'w') as f: + f.write('123456') + + with self.assertRaises(OSError) as error: + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE) + dm.start() + + def test_start_failed_to_write_pidfile(self): + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + # Makes the directory read-only so write pidfile will fail. + os.chmod(pid_file_path_dir, 0o555) + + with self.assertRaises(PermissionError) as error: + dm = daemon_manager.DaemonManager(TEST_BINARY_FILE) + dm.start() + + def test_start_failed_to_start_daemon_process(self): + with self.assertRaises(TypeError) as error: + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target='wrong_target', daemon_args=(1) + ) + dm.start() + + def test_monitor_daemon_subprocess_killed_high_memory_usage(self): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=memory_consume_daemon_target, + daemon_args=(2,), + ) + dm.start() + dm.monitor_daemon(interval=1, memory_threshold=2) + + self.assertTrue(dm.max_memory_usage >= 2) + self.assert_no_subprocess_running() + + def test_monitor_daemon_subprocess_killed_high_cpu_usage(self): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=cpu_consume_daemon_target, + daemon_args=(20,), + ) + dm.start() + dm.monitor_daemon(interval=1, cpu_threshold=20) + + self.assertTrue(dm.max_cpu_usage >= 20) + self.assert_no_subprocess_running() + + @mock.patch('subprocess.check_output') + def test_monitor_daemon_failed_does_not_matter(self, mock_output): + mock_output.side_effect = OSError('Unknown OSError') + self.assert_run_simple_daemon_success() + + @mock.patch('os.execv') + def test_monitor_daemon_reboot_triggered(self, mock_execv): + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, daemon_target=long_running_daemon + ) + dm.start() + dm.monitor_daemon(reboot_timeout=0.5) + mock_execv.assert_called_once() + + def test_stop_success(self): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target=long_running_daemon + ) + dm.start() + dm.stop() + + self.assert_no_subprocess_running() + self.assertFalse(dm.pid_file_path.exists()) + + @mock.patch('os.kill') + def test_stop_failed_to_kill_daemon_process(self, mock_kill): + mock_kill.side_effect = OSError('Unknown OSError') + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target=long_running_daemon + ) + dm.start() + dm.stop() + + self.assertTrue(dm.daemon_process.is_alive()) + self.assertTrue(dm.pid_file_path.exists()) + + @mock.patch('os.remove') + def test_stop_failed_to_remove_pidfile(self, mock_remove): + mock_remove.side_effect = OSError('Unknown OSError') + + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target=long_running_daemon + ) + dm.start() + dm.stop() + + self.assert_no_subprocess_running() + self.assertTrue(dm.pid_file_path.exists()) + + @mock.patch('os.execv') + def test_reboot_success(self, mock_execv): + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, daemon_target=long_running_daemon + ) + dm.start() + dm.reboot() + + # Verifies the old process is stopped + self.assert_no_subprocess_running() + self.assertFalse(dm.pid_file_path.exists()) + + mock_execv.assert_called_once() + + @mock.patch('os.execv') + def test_reboot_binary_no_longer_exists(self, mock_execv): + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, daemon_target=long_running_daemon + ) + dm.start() + + with self.assertRaises(SystemExit) as cm: + dm.reboot() + mock_execv.assert_not_called() + self.assertEqual(cm.exception.code, 0) + + @mock.patch('os.execv') + def test_reboot_failed(self, mock_execv): + mock_execv.side_effect = OSError('Unknown OSError') + binary_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + + dm = daemon_manager.DaemonManager( + binary_file.name, daemon_target=long_running_daemon + ) + dm.start() + + with self.assertRaises(SystemExit) as cm: + dm.reboot() + self.assertEqual(cm.exception.code, 1) + + def assert_run_simple_daemon_success(self): + damone_output_file = tempfile.NamedTemporaryFile( + dir=self.working_dir.name, delete=False + ) + dm = daemon_manager.DaemonManager( + TEST_BINARY_FILE, + daemon_target=simple_daemon, + daemon_args=(damone_output_file.name,), + ) + dm.start() + dm.monitor_daemon(interval=1) + + # Verifies the expected pid file is created. + expected_pid_file_path = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor', TEST_PID_FILE_PATH + ) + self.assertTrue(expected_pid_file_path.exists()) + + # Verify the daemon process is executed successfully. + with open(damone_output_file.name, 'r') as f: + contents = f.read() + self.assertEqual(contents, 'running daemon target') + + def assert_no_subprocess_running(self): + child_pids = self._get_child_processes(os.getpid()) + for child_pid in child_pids: + self.assertFalse( + self._is_process_alive(child_pid), f'process {child_pid} still alive' + ) + + def _get_child_processes(self, parent_pid: int) -> list[int]: + try: + output = subprocess.check_output( + ['ps', '-o', 'pid,ppid', '--no-headers'], text=True + ) + + child_processes = [] + for line in output.splitlines(): + pid, ppid = line.split() + if int(ppid) == parent_pid: + child_processes.append(int(pid)) + return child_processes + except subprocess.CalledProcessError as e: + self.fail(f'failed to get child process, error: {e}') + + def _is_process_alive(self, pid: int) -> bool: + try: + output = subprocess.check_output( + ['ps', '-p', str(pid), '-o', 'state='], text=True + ).strip() + state = output.split()[0] + return state != 'Z' # Check if the state is not 'Z' (zombie) + except subprocess.CalledProcessError: + return False + + def _cleanup_child_processes(self): + child_pids = self._get_child_processes(os.getpid()) + for child_pid in child_pids: + try: + os.kill(child_pid, signal.SIGKILL) + except ProcessLookupError: + # process already terminated + pass + + def _create_fake_deamon_process( + self, name: str = '' + ) -> multiprocessing.Process: + # Create a long running subprocess + p = multiprocessing.Process(target=long_running_daemon) + p.start() + + # Create the pidfile with the subprocess pid + pid_file_path_dir = pathlib.Path(self.working_dir.name).joinpath( + 'edit_monitor' + ) + pid_file_path_dir.mkdir(parents=True, exist_ok=True) + with open(pid_file_path_dir.joinpath(name + 'pid.lock'), 'w') as f: + f.write(str(p.pid)) + return p + + +if __name__ == '__main__': + unittest.main() diff --git a/tools/edit_monitor/main.py b/tools/edit_monitor/main.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/tools/edit_monitor/main.py diff --git a/tools/edit_monitor/proto/edit_event.proto b/tools/edit_monitor/proto/edit_event.proto new file mode 100644 index 0000000000..b3630bc944 --- /dev/null +++ b/tools/edit_monitor/proto/edit_event.proto @@ -0,0 +1,58 @@ +syntax = "proto3"; + +package tools.asuite.edit_monitor; + +message EditEvent { + enum EditType { + UNSUPPORTED_TYPE = 0; + CREATE = 1; + MODIFY = 2; + DELETE = 3; + MOVE = 4; + } + + enum ErrorType { + UNKNOWN_ERROR = 0; + FAILED_TO_START_EDIT_MONITOR = 1; + FAILED_TO_STOP_EDIT_MONITOR = 2; + FAILED_TO_REBOOT_EDIT_MONITOR = 3; + KILLED_DUE_TO_EXCEEDED_RESOURCE_USAGE = 4; + FORCE_CLEANUP = 5; + } + + // Event that logs a single edit + message SingleEditEvent { + // Full path of the file that edited. + string file_path = 1; + // Type of the edit. + EditType edit_type = 2; + } + + // Event that logs aggregated info for a set of edits. + message AggregatedEditEvent { + int32 num_edits = 1; + } + + // Event that logs errors happened in the edit monitor. + message EditMonitorErrorEvent { + ErrorType error_type = 1; + string error_msg = 2; + string stack_trace = 3; + } + + // ------------------------ + // FIELDS FOR EditEvent + // ------------------------ + // Internal user name. + string user_name = 1; + // The root of Android source. + string source_root = 2; + // Name of the host workstation. + string host_name = 3; + + oneof event { + SingleEditEvent single_edit_event = 4; + AggregatedEditEvent aggregated_edit_event = 5; + EditMonitorErrorEvent edit_monitor_error_event = 6; + } +} diff --git a/tools/filelistdiff/Android.bp b/tools/filelistdiff/Android.bp index ab766d6d93..3826e50ff3 100644 --- a/tools/filelistdiff/Android.bp +++ b/tools/filelistdiff/Android.bp @@ -24,4 +24,9 @@ python_binary_host { prebuilt_etc_host { name: "system_image_diff_allowlist", src: "allowlist", -}
\ No newline at end of file +} + +prebuilt_etc_host { + name: "system_image_diff_allowlist_next", + src: "allowlist_next", +} diff --git a/tools/filelistdiff/OWNERS b/tools/filelistdiff/OWNERS new file mode 100644 index 0000000000..690fb178fc --- /dev/null +++ b/tools/filelistdiff/OWNERS @@ -0,0 +1 @@ +per-file allowlist = justinyun@google.com, jeongik@google.com, kiyoungkim@google.com, inseob@google.com diff --git a/tools/filelistdiff/allowlist b/tools/filelistdiff/allowlist index 72c12a0f54..eb785872cf 100644 --- a/tools/filelistdiff/allowlist +++ b/tools/filelistdiff/allowlist @@ -1,99 +1,5 @@ -# Known diffs only in the KATI system image -etc/NOTICE.xml.gz -etc/compatconfig/TeleService-platform-compat-config.xml -etc/compatconfig/calendar-provider-compat-config.xml -etc/compatconfig/contacts-provider-platform-compat-config.xml -etc/compatconfig/documents-ui-compat-config.xml -etc/compatconfig/framework-location-compat-config.xml -etc/compatconfig/framework-platform-compat-config.xml -etc/compatconfig/icu4j-platform-compat-config.xml -etc/compatconfig/services-platform-compat-config.xml -etc/permissions/android.software.credentials.xml -etc/permissions/android.software.preview_sdk.xml -etc/permissions/android.software.webview.xml -etc/permissions/android.software.window_magnification.xml -etc/permissions/com.android.adservices.sdksandbox.xml -etc/security/otacerts.zip -etc/vintf/compatibility_matrix.202404.xml -etc/vintf/compatibility_matrix.202504.xml -etc/vintf/compatibility_matrix.5.xml -etc/vintf/compatibility_matrix.6.xml -etc/vintf/compatibility_matrix.7.xml -etc/vintf/compatibility_matrix.8.xml -etc/vintf/compatibility_matrix.device.xml -etc/vintf/manifest.xml -framework/boot-apache-xml.vdex -framework/boot-apache-xml.vdex.fsv_meta -framework/boot-bouncycastle.vdex -framework/boot-bouncycastle.vdex.fsv_meta -framework/boot-core-icu4j.vdex -framework/boot-core-icu4j.vdex.fsv_meta -framework/boot-core-libart.vdex -framework/boot-core-libart.vdex.fsv_meta -framework/boot-ext.vdex -framework/boot-ext.vdex.fsv_meta -framework/boot-framework-adservices.vdex -framework/boot-framework-adservices.vdex.fsv_meta -framework/boot-framework-graphics.vdex -framework/boot-framework-graphics.vdex.fsv_meta -framework/boot-framework-location.vdex -framework/boot-framework-location.vdex.fsv_meta -framework/boot-framework.vdex -framework/boot-framework.vdex.fsv_meta -framework/boot-ims-common.vdex -framework/boot-ims-common.vdex.fsv_meta -framework/boot-okhttp.vdex -framework/boot-okhttp.vdex.fsv_meta -framework/boot-telephony-common.vdex -framework/boot-telephony-common.vdex.fsv_meta -framework/boot-voip-common.vdex -framework/boot-voip-common.vdex.fsv_meta -framework/boot.vdex -framework/boot.vdex.fsv_meta -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex -framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta -lib/aaudio-aidl-cpp.so -lib/android.hardware.biometrics.fingerprint@2.1.so -lib/android.hardware.radio.config@1.0.so -lib/android.hardware.radio.deprecated@1.0.so -lib/android.hardware.radio@1.0.so -lib/android.hardware.radio@1.1.so -lib/android.hardware.radio@1.2.so -lib/android.hardware.radio@1.3.so -lib/android.hardware.radio@1.4.so -lib/android.hardware.secure_element@1.0.so -lib/com.android.media.aaudio-aconfig-cc.so -lib/heapprofd_client.so -lib/heapprofd_client_api.so -lib/libaaudio.so -lib/libaaudio_internal.so -lib/libalarm_jni.so -lib/libamidi.so -lib/libcups.so -lib/libjni_deviceAsWebcam.so -lib/libprintspooler_jni.so -lib/libvendorsupport.so -lib/libwfds.so -lib/libyuv.so - -# b/351258461 -adb_keys +# Known diffs that are installed in either system image with the configuration +# b/353429422 init.environ.rc - -# Known diffs only in the Soong system image -lib/libhidcommand_jni.so -lib/libuinputcommand_jni.so - -# Known diffs in internal source -bin/uprobestats -etc/aconfig/flag.map -etc/aconfig/flag.val -etc/aconfig/package.map -etc/bpf/uprobestats/BitmapAllocation.o -etc/bpf/uprobestats/GenericInstrumentation.o -etc/init/UprobeStats.rc -lib/libuprobestats_client.so -lib64/libuprobestats_client.so -priv-app/DeviceDiagnostics/DeviceDiagnostics.apk
\ No newline at end of file +# b/338342381 +etc/NOTICE.xml.gz diff --git a/tools/filelistdiff/allowlist_next b/tools/filelistdiff/allowlist_next new file mode 100644 index 0000000000..8f91c9f3e4 --- /dev/null +++ b/tools/filelistdiff/allowlist_next @@ -0,0 +1,9 @@ +# Allowlist only for the next release configuration. +# TODO(b/369678122): The list will be cleared when the trunk configurations are +# available to the next. + +# KATI only installed files +framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex +framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.odex.fsv_meta +framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex +framework/oat/x86_64/apex@com.android.compos@javalib@service-compos.jar@classes.vdex.fsv_meta diff --git a/tools/filelistdiff/file_list_diff.py b/tools/filelistdiff/file_list_diff.py index cdc5b2ee41..951325f431 100644 --- a/tools/filelistdiff/file_list_diff.py +++ b/tools/filelistdiff/file_list_diff.py @@ -19,13 +19,16 @@ COLOR_WARNING = '\033[93m' COLOR_ERROR = '\033[91m' COLOR_NORMAL = '\033[0m' -def find_unique_items(kati_installed_files, soong_installed_files, allowlist, system_module_name): +def find_unique_items(kati_installed_files, soong_installed_files, system_module_name, allowlists): with open(kati_installed_files, 'r') as kati_list_file, \ - open(soong_installed_files, 'r') as soong_list_file, \ - open(allowlist, 'r') as allowlist_file: + open(soong_installed_files, 'r') as soong_list_file: kati_files = set(kati_list_file.read().split()) soong_files = set(soong_list_file.read().split()) - allowed_files = set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n')))) + + allowed_files = set() + for allowlist in allowlists: + with open(allowlist, 'r') as allowlist_file: + allowed_files.update(set(filter(lambda x: len(x), map(lambda x: x.lstrip().split('#',1)[0].rstrip() , allowlist_file.read().split('\n'))))) def is_unknown_diff(filepath): return not filepath in allowed_files @@ -34,23 +37,24 @@ def find_unique_items(kati_installed_files, soong_installed_files, allowlist, sy unique_in_soong = set(filter(is_unknown_diff, soong_files - kati_files)) if unique_in_kati: - print(f'{COLOR_ERROR}Please add following modules into system image module {system_module_name}.{COLOR_NORMAL}') - print(f'{COLOR_WARNING}KATI only module(s):{COLOR_NORMAL}') + print('') + print(f'{COLOR_ERROR}Missing required modules in {system_module_name} module.{COLOR_NORMAL}') + print(f'To resolve this issue, please add the modules to the Android.bp file for the {system_module_name} to install the following KATI only installed files.') + print(f'You can find the correct Android.bp file using the command "gomod {system_module_name}".') + print(f'{COLOR_WARNING}KATI only installed file(s):{COLOR_NORMAL}') for item in sorted(unique_in_kati): - print(item) + print(' '+item) if unique_in_soong: - if unique_in_kati: - print('') - - print(f'{COLOR_ERROR}Please add following modules into build/make/target/product/base_system.mk.{COLOR_NORMAL}') - print(f'{COLOR_WARNING}Soong only module(s):{COLOR_NORMAL}') + print('') + print(f'{COLOR_ERROR}Missing packages in base_system.mk.{COLOR_NORMAL}') + print('Please add packages into build/make/target/product/base_system.mk or build/make/tools/filelistdiff/allowlist to install or skip the following Soong only installed files.') + print(f'{COLOR_WARNING}Soong only installed file(s):{COLOR_NORMAL}') for item in sorted(unique_in_soong): - print(item) + print(' '+item) if unique_in_kati or unique_in_soong: print('') - print(f'{COLOR_ERROR}FAILED: System image from KATI and SOONG differs from installed file list.{COLOR_NORMAL}') sys.exit(1) @@ -59,8 +63,8 @@ if __name__ == '__main__': parser.add_argument('kati_installed_file_list') parser.add_argument('soong_installed_file_list') - parser.add_argument('allowlist') parser.add_argument('system_module_name') + parser.add_argument('--allowlists', nargs='+') args = parser.parse_args() - find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.allowlist, args.system_module_name)
\ No newline at end of file + find_unique_items(args.kati_installed_file_list, args.soong_installed_file_list, args.system_module_name, args.allowlists)
\ No newline at end of file diff --git a/tools/ide_query/ide_query.go b/tools/ide_query/ide_query.go index 23c7abd2a0..89ac78fa5f 100644 --- a/tools/ide_query/ide_query.go +++ b/tools/ide_query/ide_query.go @@ -363,6 +363,7 @@ func getJavaInputs(env Env, modulesByPath map[string]string, modules map[string] Id: name, SourceFilePaths: mod.Srcs, GeneratedFiles: genFiles(env, paths), + DependencyIds: mod.Deps, } for _, d := range mod.Deps { diff --git a/tools/ide_query/ide_query.sh b/tools/ide_query/ide_query.sh index 6f9b0c4b8b..8dfffc1cfa 100755 --- a/tools/ide_query/ide_query.sh +++ b/tools/ide_query/ide_query.sh @@ -19,7 +19,7 @@ source $(pwd)/../../shell_utils.sh require_top # Ensure cogsetup (out/ will be symlink outside the repo) -. ${TOP}/build/make/cogsetup.sh +setup_cog_env_if_needed case $(uname -s) in Linux) diff --git a/tools/ide_query/prober_scripts/jvm/Foo.java b/tools/ide_query/prober_scripts/jvm/Foo.java new file mode 100644 index 0000000000..a043f72e32 --- /dev/null +++ b/tools/ide_query/prober_scripts/jvm/Foo.java @@ -0,0 +1,37 @@ +/* + * Copyright 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package jvm; + +import java.util.ArrayList; +import java.util.HashSet; + +/** Foo class. */ +public final class Foo { + + void testCompletion() { + ArrayList<Integer> list = new ArrayList<>(); + System.out.println(list); + + // ^ + + // step + // ; Test completion on the standard types. + // type("list.") + // completion.trigger() + // assert completion.items.filter(label="add.*") + } +} diff --git a/tools/ide_query/prober_scripts/jvm/suite.textpb b/tools/ide_query/prober_scripts/jvm/suite.textpb new file mode 100644 index 0000000000..460e08ca31 --- /dev/null +++ b/tools/ide_query/prober_scripts/jvm/suite.textpb @@ -0,0 +1,4 @@ +tests: { + name: "general" + scripts: "build/make/tools/ide_query/prober_scripts/jvm/Foo.java" +} diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp index 9b134f22d4..8c710449f9 100644 --- a/tools/releasetools/Android.bp +++ b/tools/releasetools/Android.bp @@ -96,6 +96,7 @@ python_defaults { ], libs: [ "apex_manifest", + "releasetools_apex_utils", "releasetools_common", ], required: [ @@ -107,7 +108,7 @@ python_defaults { python_library_host { name: "ota_metadata_proto", srcs: [ - "ota_metadata.proto", + "ota_metadata.proto", ], proto: { canonical_path_from_root: false, @@ -117,7 +118,7 @@ python_library_host { cc_library_static { name: "ota_metadata_proto_cc", srcs: [ - "ota_metadata.proto", + "ota_metadata.proto", ], host_supported: true, recovery_available: true, @@ -144,7 +145,7 @@ java_library_static { static_libs: ["libprotobuf-java-nano"], }, }, - visibility: ["//frameworks/base:__subpackages__"] + visibility: ["//frameworks/base:__subpackages__"], } python_defaults { @@ -367,6 +368,9 @@ python_binary_host { libs: [ "ota_utils_lib", ], + required: [ + "signapk", + ], } python_binary_host { @@ -436,7 +440,7 @@ python_binary_host { name: "check_target_files_vintf", defaults: [ "releasetools_binary_defaults", - "releasetools_check_target_files_vintf_defaults" + "releasetools_check_target_files_vintf_defaults", ], } @@ -546,13 +550,15 @@ python_binary_host { defaults: ["releasetools_binary_defaults"], srcs: [ "sign_target_files_apks.py", - "payload_signer.py", - "ota_signing_utils.py", + "ota_from_raw_img.py", ], libs: [ "releasetools_add_img_to_target_files", "releasetools_apex_utils", "releasetools_common", + "ota_metadata_proto", + "ota_utils_lib", + "update_payload", ], } @@ -632,7 +638,7 @@ python_defaults { data: [ "testdata/**/*", ":com.android.apex.compressed.v1", - ":com.android.apex.vendor.foo.with_vintf" + ":com.android.apex.vendor.foo.with_vintf", ], target: { darwin: { diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py index b39a82cf45..c25ff2718c 100644 --- a/tools/releasetools/add_img_to_target_files.py +++ b/tools/releasetools/add_img_to_target_files.py @@ -464,6 +464,7 @@ def AddDtbo(output_zip): dtbo_prebuilt_path = os.path.join( OPTIONS.input_tmp, "PREBUILT_IMAGES", "dtbo.img") assert os.path.exists(dtbo_prebuilt_path) + os.makedirs(os.path.dirname(img.name), exist_ok=True) shutil.copy(dtbo_prebuilt_path, img.name) # AVB-sign the image as needed. diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py index 3abef3bece..54df955e9f 100644 --- a/tools/releasetools/apex_utils.py +++ b/tools/releasetools/apex_utils.py @@ -36,6 +36,8 @@ APEX_PAYLOAD_IMAGE = 'apex_payload.img' APEX_PUBKEY = 'apex_pubkey' +# Partitions supporting APEXes +PARTITIONS = ['system', 'system_ext', 'product', 'vendor', 'odm'] class ApexInfoError(Exception): """An Exception raised during Apex Information command.""" @@ -550,7 +552,7 @@ def GetApexInfoFromTargetFiles(input_file): if not isinstance(input_file, str): raise RuntimeError("must pass filepath to target-files zip or directory") apex_infos = [] - for partition in ['system', 'system_ext', 'product', 'vendor']: + for partition in PARTITIONS: apex_infos.extend(GetApexInfoForPartition(input_file, partition)) return apex_infos diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py index b8dcd8465c..dc123efb46 100755 --- a/tools/releasetools/check_target_files_vintf.py +++ b/tools/releasetools/check_target_files_vintf.py @@ -30,6 +30,7 @@ import subprocess import sys import zipfile +import apex_utils import common from apex_manifest import ParseApexManifest @@ -229,7 +230,7 @@ def PrepareApexDirectory(inp, dirmap): apex_host = os.path.join(OPTIONS.search_path, 'bin', 'apexd_host') cmd = [apex_host, '--tool_path', OPTIONS.search_path] cmd += ['--apex_path', dirmap['/apex']] - for p in ['system', 'system_ext', 'product', 'vendor']: + for p in apex_utils.PARTITIONS: if '/' + p in dirmap: cmd += ['--' + p + '_path', dirmap['/' + p]] common.RunAndCheckOutput(cmd) diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py index 4834834bcc..f04dfb703d 100644 --- a/tools/releasetools/common.py +++ b/tools/releasetools/common.py @@ -898,7 +898,7 @@ def LoadInfoDict(input_file, repacking=False): if key.endswith("selinux_fc"): fc_basename = os.path.basename(d[key]) fc_config = os.path.join(input_file, "META", fc_basename) - assert os.path.exists(fc_config) + assert os.path.exists(fc_config), "{} does not exist".format(fc_config) d[key] = fc_config @@ -907,9 +907,10 @@ def LoadInfoDict(input_file, repacking=False): d["root_fs_config"] = os.path.join( input_file, "META", "root_filesystem_config.txt") + partitions = ["system", "vendor", "system_ext", "product", "odm", + "vendor_dlkm", "odm_dlkm", "system_dlkm"] # Redirect {partition}_base_fs_file for each of the named partitions. - for part_name in ["system", "vendor", "system_ext", "product", "odm", - "vendor_dlkm", "odm_dlkm", "system_dlkm"]: + for part_name in partitions: key_name = part_name + "_base_fs_file" if key_name not in d: continue @@ -922,6 +923,25 @@ def LoadInfoDict(input_file, repacking=False): "Failed to find %s base fs file: %s", part_name, base_fs_file) del d[key_name] + # Redirecting helper for optional properties like erofs_compress_hints + def redirect_file(prop, filename): + if prop not in d: + return + config_file = os.path.join(input_file, "META/" + filename) + if os.path.exists(config_file): + d[prop] = config_file + else: + logger.warning( + "Failed to find %s fro %s", filename, prop) + del d[prop] + + # Redirect erofs_[default_]compress_hints files + redirect_file("erofs_default_compress_hints", + "erofs_default_compress_hints.txt") + for part in partitions: + redirect_file(part + "_erofs_compress_hints", + part + "_erofs_compress_hints.txt") + def makeint(key): if key in d: d[key] = int(d[key], 0) @@ -2434,12 +2454,23 @@ def GetMinSdkVersion(apk_name): "Failed to obtain minSdkVersion for {}: aapt2 return code {}:\n{}\n{}".format( apk_name, proc.returncode, stdoutdata, stderrdata)) + is_split_apk = False for line in stdoutdata.split("\n"): + # See b/353837347 , split APKs do not have sdk version defined, + # so we default to 21 as split APKs are only supported since SDK + # 21. + if (re.search(r"split=[\"'].*[\"']", line)): + is_split_apk = True # Due to ag/24161708, looking for lines such as minSdkVersion:'23',minSdkVersion:'M' # or sdkVersion:'23', sdkVersion:'M'. m = re.match(r'(?:minSdkVersion|sdkVersion):\'([^\']*)\'', line) if m: return m.group(1) + if is_split_apk: + logger.info("%s is a split APK, it does not have minimum SDK version" + " defined. Defaulting to 21 because split APK isn't supported" + " before that.", apk_name) + return 21 raise ExternalError("No minSdkVersion returned by aapt2 for apk: {}".format(apk_name)) @@ -2977,7 +3008,7 @@ def ZipWrite(zip_file, filename, arcname=None, perms=0o644, zipfile.ZIP64_LIMIT = saved_zip64_limit -def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None, +def ZipWriteStr(zip_file: zipfile.ZipFile, zinfo_or_arcname, data, perms=None, compress_type=None): """Wrap zipfile.writestr() function to work around the zip64 limit. @@ -3207,7 +3238,9 @@ class File(object): return t def WriteToDir(self, d): - with open(os.path.join(d, self.name), "wb") as fp: + output_path = os.path.join(d, self.name) + os.makedirs(os.path.dirname(output_path), exist_ok=True) + with open(output_path, "wb") as fp: fp.write(self.data) def AddToZip(self, z, compression=None): diff --git a/tools/releasetools/ota_from_raw_img.py b/tools/releasetools/ota_from_raw_img.py index 03b44f15d6..3b9374ab13 100644 --- a/tools/releasetools/ota_from_raw_img.py +++ b/tools/releasetools/ota_from_raw_img.py @@ -105,9 +105,6 @@ def main(argv): if args.package_key: logger.info("Signing payload...") - # TODO: remove OPTIONS when no longer used as fallback in payload_signer - common.OPTIONS.payload_signer_args = None - common.OPTIONS.payload_signer_maximum_signature_size = None signer = PayloadSigner(args.package_key, args.private_key_suffix, key_passwords[args.package_key], payload_signer=args.payload_signer, diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py index 985cd56cb0..6446e1ff59 100755 --- a/tools/releasetools/ota_from_target_files.py +++ b/tools/releasetools/ota_from_target_files.py @@ -264,6 +264,10 @@ A/B OTA specific options --compression_factor Specify the maximum block size to be compressed at once during OTA. supported options: 4k, 8k, 16k, 32k, 64k, 128k, 256k + + --full_ota_partitions + Specify list of partitions should be updated in full OTA fashion, even if + an incremental OTA is about to be generated """ from __future__ import print_function @@ -283,7 +287,7 @@ import common import ota_utils import payload_signer from ota_utils import (VABC_COMPRESSION_PARAM_SUPPORT, FinalizeMetadata, GetPackageMetadata, - PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir) + PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir, TARGET_FILES_IMAGES_SUBDIR) from common import DoesInputFileContain, IsSparseImage import target_files_diff from non_ab_ota import GenerateNonAbOtaPackage @@ -337,6 +341,7 @@ OPTIONS.security_patch_level = None OPTIONS.max_threads = None OPTIONS.vabc_cow_version = None OPTIONS.compression_factor = None +OPTIONS.full_ota_partitions = None POSTINSTALL_CONFIG = 'META/postinstall_config.txt' @@ -892,6 +897,14 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): if source_file is not None: source_file = ExtractTargetFiles(source_file) + if OPTIONS.full_ota_partitions: + for partition in OPTIONS.full_ota_partitions: + for subdir in TARGET_FILES_IMAGES_SUBDIR: + image_path = os.path.join(source_file, subdir, partition + ".img") + if os.path.exists(image_path): + logger.info( + "Ignoring source image %s for partition %s because it is configured to use full OTA", image_path, partition) + os.remove(image_path) assert "ab_partitions" in OPTIONS.source_info_dict, \ "META/ab_partitions.txt is required for ab_update." assert "ab_partitions" in OPTIONS.target_info_dict, \ @@ -1193,7 +1206,7 @@ def GenerateAbOtaPackage(target_file, output_file, source_file=None): def main(argv): - def option_handler(o, a): + def option_handler(o, a: str): if o in ("-i", "--incremental_from"): OPTIONS.incremental_source = a elif o == "--full_radio": @@ -1320,6 +1333,9 @@ def main(argv): else: raise ValueError("Cannot parse value %r for option %r - only " "integers are allowed." % (a, o)) + elif o == "--full_ota_partitions": + OPTIONS.full_ota_partitions = set( + a.strip().strip("\"").strip("'").split(",")) else: return False return True @@ -1370,6 +1386,7 @@ def main(argv): "max_threads=", "vabc_cow_version=", "compression_factor=", + "full_ota_partitions=", ], extra_option_handler=[option_handler, payload_signer.signer_options]) common.InitLogging() diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py index b8f848fb2b..4ad97e0108 100755 --- a/tools/releasetools/sign_target_files_apks.py +++ b/tools/releasetools/sign_target_files_apks.py @@ -184,14 +184,17 @@ import re import shutil import stat import sys +import shlex import tempfile import zipfile from xml.etree import ElementTree import add_img_to_target_files +import ota_from_raw_img import apex_utils import common import payload_signer +import update_payload from payload_signer import SignOtaPackage, PAYLOAD_BIN @@ -221,6 +224,7 @@ OPTIONS.vendor_otatools = None OPTIONS.allow_gsi_debug_sepolicy = False OPTIONS.override_apk_keys = None OPTIONS.override_apex_keys = None +OPTIONS.input_tmp = None AVB_FOOTER_ARGS_BY_PARTITION = { @@ -579,7 +583,104 @@ def IsBuildPropFile(filename): filename.endswith("/prop.default") -def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, +def GetOtaSigningArgs(): + args = [] + if OPTIONS.package_key: + args.extend(["--package_key", OPTIONS.package_key]) + if OPTIONS.payload_signer: + args.extend(["--payload_signer=" + OPTIONS.payload_signer]) + if OPTIONS.payload_signer_args: + args.extend(["--payload_signer_args=" + shlex.join(OPTIONS.payload_signer_args)]) + if OPTIONS.search_path: + args.extend(["--search_path", OPTIONS.search_path]) + if OPTIONS.payload_signer_maximum_signature_size: + args.extend(["--payload_signer_maximum_signature_size", + OPTIONS.payload_signer_maximum_signature_size]) + if OPTIONS.private_key_suffix: + args.extend(["--private_key_suffix", OPTIONS.private_key_suffix]) + return args + + +def RegenerateKernelPartitions(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info): + """Re-generate boot and dtbo partitions using new signing configuration""" + files_to_unzip = [ + "PREBUILT_IMAGES/*", "BOOTABLE_IMAGES/*.img", "*/boot_16k.img", "*/dtbo_16k.img"] + if OPTIONS.input_tmp is None: + OPTIONS.input_tmp = common.UnzipTemp(input_tf_zip.filename, files_to_unzip) + else: + common.UnzipToDir(input_tf_zip.filename, OPTIONS.input_tmp, files_to_unzip) + unzip_dir = OPTIONS.input_tmp + os.makedirs(os.path.join(unzip_dir, "IMAGES"), exist_ok=True) + + boot_image = common.GetBootableImage( + "IMAGES/boot.img", "boot.img", unzip_dir, "BOOT", misc_info) + if boot_image: + boot_image.WriteToDir(unzip_dir) + boot_image = os.path.join(unzip_dir, boot_image.name) + common.ZipWrite(output_tf_zip, boot_image, "IMAGES/boot.img", + compress_type=zipfile.ZIP_STORED) + if misc_info.get("has_dtbo") == "true": + add_img_to_target_files.AddDtbo(output_tf_zip) + return unzip_dir + + +def RegenerateBootOTA(input_tf_zip: zipfile.ZipFile, filename, input_ota): + with input_tf_zip.open(filename, "r") as in_fp: + payload = update_payload.Payload(in_fp) + is_incremental = any([part.HasField('old_partition_info') + for part in payload.manifest.partitions]) + is_boot_ota = filename.startswith( + "VENDOR/boot_otas/") or filename.startswith("SYSTEM/boot_otas/") + if not is_boot_ota: + return + is_4k_boot_ota = filename in [ + "VENDOR/boot_otas/boot_ota_4k.zip", "SYSTEM/boot_otas/boot_ota_4k.zip"] + # Only 4K boot image is re-generated, so if 16K boot ota isn't incremental, + # we do not need to re-generate + if not is_4k_boot_ota and not is_incremental: + return + + timestamp = str(payload.manifest.max_timestamp) + partitions = [part.partition_name for part in payload.manifest.partitions] + unzip_dir = OPTIONS.input_tmp + signed_boot_image = os.path.join(unzip_dir, "IMAGES", "boot.img") + if not os.path.exists(signed_boot_image): + logger.warn("Need to re-generate boot OTA {} but failed to get signed boot image. 16K dev option will be impacted, after rolling back to 4K user would need to sideload/flash their device to continue receiving OTAs.") + return + signed_dtbo_image = os.path.join(unzip_dir, "IMAGES", "dtbo.img") + if "dtbo" in partitions and not os.path.exists(signed_dtbo_image): + raise ValueError( + "Boot OTA {} has dtbo partition, but no dtbo image found in target files.".format(filename)) + if is_incremental: + signed_16k_boot_image = os.path.join( + unzip_dir, "IMAGES", "boot_16k.img") + signed_16k_dtbo_image = os.path.join( + unzip_dir, "IMAGES", "dtbo_16k.img") + if is_4k_boot_ota: + if os.path.exists(signed_16k_boot_image): + signed_boot_image = signed_16k_boot_image + ":" + signed_boot_image + if os.path.exists(signed_16k_dtbo_image): + signed_dtbo_image = signed_16k_dtbo_image + ":" + signed_dtbo_image + else: + if os.path.exists(signed_16k_boot_image): + signed_boot_image += ":" + signed_16k_boot_image + if os.path.exists(signed_16k_dtbo_image): + signed_dtbo_image += ":" + signed_16k_dtbo_image + + args = ["ota_from_raw_img", + "--max_timestamp", timestamp, "--output", input_ota.name] + args.extend(GetOtaSigningArgs()) + if "dtbo" in partitions: + args.extend(["--partition_name", "boot,dtbo", + signed_boot_image, signed_dtbo_image]) + else: + args.extend(["--partition_name", "boot", signed_boot_image]) + logger.info( + "Re-generating boot OTA {} using cmd {}".format(filename, args)) + ota_from_raw_img.main(args) + + +def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip: zipfile.ZipFile, misc_info, apk_keys, apex_keys, key_passwords, platform_api_level, codename_to_api_level_map, compressed_extension): @@ -593,6 +694,16 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, # Sets this to zero for targets without APK files. maxsize = 0 + # Replace the AVB signing keys, if any. + ReplaceAvbSigningKeys(misc_info) + OPTIONS.info_dict = misc_info + + # Rewrite the props in AVB signing args. + if misc_info.get('avb_enable') == 'true': + RewriteAvbProps(misc_info) + + RegenerateKernelPartitions(input_tf_zip, output_tf_zip, misc_info) + for info in input_tf_zip.infolist(): filename = info.filename if filename.startswith("IMAGES/"): @@ -603,10 +714,10 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, if filename.startswith("OTA/") and filename.endswith(".img"): continue - data = input_tf_zip.read(filename) - out_info = copy.copy(info) (is_apk, is_compressed, should_be_skipped) = GetApkFileInfo( filename, compressed_extension, OPTIONS.skip_apks_with_path_prefix) + data = input_tf_zip.read(filename) + out_info = copy.copy(info) if is_apk and should_be_skipped: # Copy skipped APKs verbatim. @@ -670,9 +781,8 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, elif filename.endswith(".zip") and IsEntryOtaPackage(input_tf_zip, filename): logger.info("Re-signing OTA package {}".format(filename)) with tempfile.NamedTemporaryFile() as input_ota, tempfile.NamedTemporaryFile() as output_ota: - with input_tf_zip.open(filename, "r") as in_fp: - shutil.copyfileobj(in_fp, input_ota) - input_ota.flush() + RegenerateBootOTA(input_tf_zip, filename, input_ota) + SignOtaPackage(input_ota.name, output_ota.name) common.ZipWrite(output_tf_zip, output_ota.name, filename, compress_type=zipfile.ZIP_STORED) @@ -811,17 +921,18 @@ def ProcessTargetFiles(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info, common.ZipWrite(output_tf_zip, image.name, filename) # A non-APK file; copy it verbatim. else: - common.ZipWriteStr(output_tf_zip, out_info, data) + try: + entry = output_tf_zip.getinfo(filename) + if output_tf_zip.read(entry) != data: + logger.warn( + "Output zip contains duplicate entries for %s with different contents", filename) + continue + except KeyError: + common.ZipWriteStr(output_tf_zip, out_info, data) if OPTIONS.replace_ota_keys: ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info) - # Replace the AVB signing keys, if any. - ReplaceAvbSigningKeys(misc_info) - - # Rewrite the props in AVB signing args. - if misc_info.get('avb_enable') == 'true': - RewriteAvbProps(misc_info) # Write back misc_info with the latest values. ReplaceMiscInfoTxt(input_tf_zip, output_tf_zip, misc_info) @@ -1066,9 +1177,9 @@ def WriteOtacerts(output_zip, filename, keys): common.ZipWriteStr(output_zip, filename, temp_file.getvalue()) -def ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info): +def ReplaceOtaKeys(input_tf_zip: zipfile.ZipFile, output_tf_zip, misc_info): try: - keylist = input_tf_zip.read("META/otakeys.txt").split() + keylist = input_tf_zip.read("META/otakeys.txt").decode().split() except KeyError: raise common.ExternalError("can't read META/otakeys.txt from input") diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp index 2b2b573234..4f6d3b7863 100644 --- a/tools/sbom/Android.bp +++ b/tools/sbom/Android.bp @@ -34,6 +34,31 @@ python_binary_host { } python_library_host { + name: "compliance_metadata", + srcs: [ + "compliance_metadata.py", + ], +} + +python_binary_host { + name: "gen_sbom", + srcs: [ + "gen_sbom.py", + ], + version: { + py3: { + embedded_launcher: true, + }, + }, + libs: [ + "compliance_metadata", + "metadata_file_proto_py", + "libprotobuf-python", + "sbom_lib", + ], +} + +python_library_host { name: "sbom_lib", srcs: [ "sbom_data.py", @@ -91,4 +116,18 @@ python_binary_host { libs: [ "sbom_lib", ], -}
\ No newline at end of file +} + +python_binary_host { + name: "gen_notice_xml", + srcs: [ + "gen_notice_xml.py", + ], + version: { + py3: { + embedded_launcher: true, + }, + }, + libs: [ + ], +} diff --git a/tools/sbom/compliance_metadata.py b/tools/sbom/compliance_metadata.py new file mode 100644 index 0000000000..9910217bbe --- /dev/null +++ b/tools/sbom/compliance_metadata.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python3 +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sqlite3 + +class MetadataDb: + def __init__(self, db): + self.conn = sqlite3.connect(':memory') + self.conn.row_factory = sqlite3.Row + with sqlite3.connect(db) as c: + c.backup(self.conn) + self.reorg() + + def reorg(self): + # package_license table + self.conn.execute("create table package_license as " + "select name as package, pkg_default_applicable_licenses as license " + "from modules " + "where module_type = 'package' ") + cursor = self.conn.execute("select package,license from package_license where license like '% %'") + multi_licenses_packages = cursor.fetchall() + cursor.close() + rows = [] + for p in multi_licenses_packages: + licenses = p['license'].strip().split(' ') + for lic in licenses: + rows.append((p['package'], lic)) + self.conn.executemany('insert into package_license values (?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from package_license where license like '% %'") + self.conn.commit() + + # module_license table + self.conn.execute("create table module_license as " + "select distinct name as module, package, licenses as license " + "from modules " + "where licenses != '' ") + cursor = self.conn.execute("select module,package,license from module_license where license like '% %'") + multi_licenses_modules = cursor.fetchall() + cursor.close() + rows = [] + for m in multi_licenses_modules: + licenses = m['license'].strip().split(' ') + for lic in licenses: + rows.append((m['module'], m['package'],lic)) + self.conn.executemany('insert into module_license values (?, ?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from module_license where license like '% %'") + self.conn.commit() + + # module_installed_file table + self.conn.execute("create table module_installed_file as " + "select id as module_id, name as module_name, package, installed_files as installed_file " + "from modules " + "where installed_files != '' ") + cursor = self.conn.execute("select module_id, module_name, package, installed_file " + "from module_installed_file where installed_file like '% %'") + multi_installed_file_modules = cursor.fetchall() + cursor.close() + rows = [] + for m in multi_installed_file_modules: + installed_files = m['installed_file'].strip().split(' ') + for f in installed_files: + rows.append((m['module_id'], m['module_name'], m['package'], f)) + self.conn.executemany('insert into module_installed_file values (?, ?, ?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from module_installed_file where installed_file like '% %'") + self.conn.commit() + + # module_built_file table + self.conn.execute("create table module_built_file as " + "select id as module_id, name as module_name, package, built_files as built_file " + "from modules " + "where built_files != '' ") + cursor = self.conn.execute("select module_id, module_name, package, built_file " + "from module_built_file where built_file like '% %'") + multi_built_file_modules = cursor.fetchall() + cursor.close() + rows = [] + for m in multi_built_file_modules: + built_files = m['installed_file'].strip().split(' ') + for f in built_files: + rows.append((m['module_id'], m['module_name'], m['package'], f)) + self.conn.executemany('insert into module_built_file values (?, ?, ?, ?)', rows) + self.conn.commit() + + self.conn.execute("delete from module_built_file where built_file like '% %'") + self.conn.commit() + + + # Indexes + self.conn.execute('create index idx_modules_id on modules (id)') + self.conn.execute('create index idx_modules_name on modules (name)') + self.conn.execute('create index idx_package_licnese_package on package_license (package)') + self.conn.execute('create index idx_package_licnese_license on package_license (license)') + self.conn.execute('create index idx_module_licnese_module on module_license (module)') + self.conn.execute('create index idx_module_licnese_license on module_license (license)') + self.conn.execute('create index idx_module_installed_file_module_id on module_installed_file (module_id)') + self.conn.execute('create index idx_module_installed_file_installed_file on module_installed_file (installed_file)') + self.conn.execute('create index idx_module_built_file_module_id on module_built_file (module_id)') + self.conn.execute('create index idx_module_built_file_built_file on module_built_file (built_file)') + self.conn.commit() + + def dump_debug_db(self, debug_db): + with sqlite3.connect(debug_db) as c: + self.conn.backup(c) + + def get_installed_files(self): + # Get all records from table make_metadata, which contains all installed files and corresponding make modules' metadata + cursor = self.conn.execute('select installed_file, module_path, is_prebuilt_make_module, product_copy_files, kernel_module_copy_files, is_platform_generated, license_text from make_metadata') + rows = cursor.fetchall() + cursor.close() + installed_files_metadata = [] + for row in rows: + metadata = dict(zip(row.keys(), row)) + installed_files_metadata.append(metadata) + return installed_files_metadata + + def get_soong_modules(self): + # Get all records from table modules, which contains metadata of all soong modules + cursor = self.conn.execute('select name, package, package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files from modules') + rows = cursor.fetchall() + cursor.close() + soong_modules = [] + for row in rows: + soong_module = dict(zip(row.keys(), row)) + soong_modules.append(soong_module) + return soong_modules + + def get_package_licenses(self, package): + cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' + 'from package_license pl join modules m on pl.license = m.name ' + 'where pl.package = ?', + ('//' + package,)) + rows = cursor.fetchall() + licenses = {} + for r in rows: + licenses[r['name']] = r['license_text'] + return licenses + + def get_module_licenses(self, module_name, package): + licenses = {} + # If property "licenses" is defined on module + cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' + 'from module_license ml join modules m on ml.license = m.name ' + 'where ml.module = ? and ml.package = ?', + (module_name, package)) + rows = cursor.fetchall() + for r in rows: + licenses[r['name']] = r['license_text'] + if len(licenses) > 0: + return licenses + + # Use default package license + cursor = self.conn.execute('select m.name, m.package, m.lic_license_text as license_text ' + 'from package_license pl join modules m on pl.license = m.name ' + 'where pl.package = ?', + ('//' + package,)) + rows = cursor.fetchall() + for r in rows: + licenses[r['name']] = r['license_text'] + return licenses + + def get_soong_module_of_installed_file(self, installed_file): + cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files ' + 'from modules m join module_installed_file mif on m.id = mif.module_id ' + 'where mif.installed_file = ?', + (installed_file,)) + rows = cursor.fetchall() + cursor.close() + if rows: + soong_module = dict(zip(rows[0].keys(), rows[0])) + return soong_module + + return None + + def get_soong_module_of_built_file(self, built_file): + cursor = self.conn.execute('select name, m.package, m.package as module_path, module_type as soong_module_type, built_files, installed_files, static_dep_files, whole_static_dep_files ' + 'from modules m join module_built_file mbf on m.id = mbf.module_id ' + 'where mbf.built_file = ?', + (built_file,)) + rows = cursor.fetchall() + cursor.close() + if rows: + soong_module = dict(zip(rows[0].keys(), rows[0])) + return soong_module + + return None
\ No newline at end of file diff --git a/tools/sbom/gen_notice_xml.py b/tools/sbom/gen_notice_xml.py new file mode 100644 index 0000000000..eaa6e5a74d --- /dev/null +++ b/tools/sbom/gen_notice_xml.py @@ -0,0 +1,81 @@ +# !/usr/bin/env python3 +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generate NOTICE.xml.gz of a partition. +Usage example: + gen_notice_xml.py --output_file out/soong/.intermediate/.../NOTICE.xml.gz \ + --metadata out/soong/compliance-metadata/aosp_cf_x86_64_phone/compliance-metadata.db \ + --partition system \ + --product_out out/target/vsoc_x86_64 \ + --soong_out out/soong +""" + +import argparse + + +FILE_HEADER = '''\ +<?xml version="1.0" encoding="utf-8"?> +<licenses> +''' +FILE_FOOTER = '''\ +</licenses> +''' + + +def get_args(): + parser = argparse.ArgumentParser() + parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.') + parser.add_argument('-d', '--debug', action='store_true', default=True, help='Debug mode') + parser.add_argument('--output_file', required=True, help='The path of the generated NOTICE.xml.gz file.') + parser.add_argument('--partition', required=True, help='The name of partition for which the NOTICE.xml.gz is generated.') + parser.add_argument('--metadata', required=True, help='The path of compliance metadata DB file.') + parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.') + parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong') + + return parser.parse_args() + + +def log(*info): + if args.verbose: + for i in info: + print(i) + + +def new_file_name_tag(file_metadata, package_name): + file_path = file_metadata['installed_file'].removeprefix(args.product_out) + lib = 'Android' + if package_name: + lib = package_name + return f'<file-name contentId="" lib="{lib}">{file_path}</file-name>\n' + + +def new_file_content_tag(): + pass + + +def main(): + global args + args = get_args() + log('Args:', vars(args)) + + with open(args.output_file, 'w', encoding="utf-8") as notice_xml_file: + notice_xml_file.write(FILE_HEADER) + notice_xml_file.write(FILE_FOOTER) + + +if __name__ == '__main__': + main() diff --git a/tools/sbom/gen_sbom.py b/tools/sbom/gen_sbom.py new file mode 100644 index 0000000000..9c3a8be9ef --- /dev/null +++ b/tools/sbom/gen_sbom.py @@ -0,0 +1,740 @@ +# !/usr/bin/env python3 +# +# Copyright (C) 2024 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generate the SBOM of the current target product in SPDX format. +Usage example: + gen_sbom.py --output_file out/soong/sbom/aosp_cf_x86_64_phone/sbom.spdx \ + --metadata out/soong/metadata/aosp_cf_x86_64_phone/metadata.db \ + --product_out out/target/vsoc_x86_64 + --soong_out out/soong + --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \ + --product_mfr=Google +""" + +import argparse +import compliance_metadata +import datetime +import google.protobuf.text_format as text_format +import hashlib +import os +import pathlib +import queue +import metadata_file_pb2 +import sbom_data +import sbom_writers + +# Package type +PKG_SOURCE = 'SOURCE' +PKG_UPSTREAM = 'UPSTREAM' +PKG_PREBUILT = 'PREBUILT' + +# Security tag +NVD_CPE23 = 'NVD-CPE2.3:' + +# Report +ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:' +ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:' +ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:' +ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:' +ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-existent installed files:' +ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP = 'No module found for static dependency files:' +INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:' + +SOONG_PREBUILT_MODULE_TYPES = [ + 'android_app_import', + 'android_library_import', + 'cc_prebuilt_binary', + 'cc_prebuilt_library', + 'cc_prebuilt_library_headers', + 'cc_prebuilt_library_shared', + 'cc_prebuilt_library_static', + 'cc_prebuilt_object', + 'dex_import', + 'java_import', + 'java_sdk_library_import', + 'java_system_modules_import', + 'libclang_rt_prebuilt_library_static', + 'libclang_rt_prebuilt_library_shared', + 'llvm_prebuilt_library_static', + 'ndk_prebuilt_object', + 'ndk_prebuilt_shared_stl', + 'nkd_prebuilt_static_stl', + 'prebuilt_apex', + 'prebuilt_bootclasspath_fragment', + 'prebuilt_dsp', + 'prebuilt_firmware', + 'prebuilt_kernel_modules', + 'prebuilt_rfsa', + 'prebuilt_root', + 'rust_prebuilt_dylib', + 'rust_prebuilt_library', + 'rust_prebuilt_rlib', + 'vndk_prebuilt_shared', +] + +THIRD_PARTY_IDENTIFIER_TYPES = [ + # Types defined in metadata_file.proto + 'Git', + 'SVN', + 'Hg', + 'Darcs', + 'VCS', + 'Archive', + 'PrebuiltByAlphabet', + 'LocalSource', + 'Other', + # OSV ecosystems defined at https://ossf.github.io/osv-schema/#affectedpackage-field. + 'Go', + 'npm', + 'OSS-Fuzz', + 'PyPI', + 'RubyGems', + 'crates.io', + 'Hackage', + 'GHC', + 'Packagist', + 'Maven', + 'NuGet', + 'Linux', + 'Debian', + 'Alpine', + 'Hex', + 'Android', + 'GitHub Actions', + 'Pub', + 'ConanCenter', + 'Rocky Linux', + 'AlmaLinux', + 'Bitnami', + 'Photon OS', + 'CRAN', + 'Bioconductor', + 'SwiftURL' +] + + +def get_args(): + parser = argparse.ArgumentParser() + parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.') + parser.add_argument('-d', '--debug', action='store_true', default=False, help='Debug mode') + parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.') + parser.add_argument('--metadata', required=True, help='The metadata DB file path.') + parser.add_argument('--product_out', required=True, help='The path of PRODUCT_OUT, e.g. out/target/product/vsoc_x86_64.') + parser.add_argument('--soong_out', required=True, help='The path of Soong output directory, e.g. out/soong') + parser.add_argument('--build_version', required=True, help='The build version.') + parser.add_argument('--product_mfr', required=True, help='The product manufacturer.') + parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format') + + return parser.parse_args() + + +def log(*info): + if args.verbose: + for i in info: + print(i) + + +def new_package_id(package_name, type): + return f'SPDXRef-{type}-{sbom_data.encode_for_spdxid(package_name)}' + + +def new_file_id(file_path): + return f'SPDXRef-{sbom_data.encode_for_spdxid(file_path)}' + + +def new_license_id(license_name): + return f'LicenseRef-{sbom_data.encode_for_spdxid(license_name)}' + + +def checksum(file_path): + h = hashlib.sha1() + if os.path.islink(file_path): + h.update(os.readlink(file_path).encode('utf-8')) + else: + with open(file_path, 'rb') as f: + h.update(f.read()) + return f'SHA1: {h.hexdigest()}' + + +def is_soong_prebuilt_module(file_metadata): + return (file_metadata['soong_module_type'] and + file_metadata['soong_module_type'] in SOONG_PREBUILT_MODULE_TYPES) + + +def is_source_package(file_metadata): + module_path = file_metadata['module_path'] + return module_path.startswith('external/') and not is_prebuilt_package(file_metadata) + + +def is_prebuilt_package(file_metadata): + module_path = file_metadata['module_path'] + if module_path: + return (module_path.startswith('prebuilts/') or + is_soong_prebuilt_module(file_metadata) or + file_metadata['is_prebuilt_make_module']) + + kernel_module_copy_files = file_metadata['kernel_module_copy_files'] + if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'): + return True + + return False + + +def get_source_package_info(file_metadata, metadata_file_path): + """Return source package info exists in its METADATA file, currently including name, security tag + and external SBOM reference. + + See go/android-spdx and go/android-sbom-gen for more details. + """ + if not metadata_file_path: + return file_metadata['module_path'], [] + + metadata_proto = metadata_file_protos[metadata_file_path] + external_refs = [] + for tag in metadata_proto.third_party.security.tag: + if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()): + external_refs.append( + sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY, + type=sbom_data.PackageExternalRefType.cpe23Type, + locator=tag.removeprefix(NVD_CPE23))) + elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()): + external_refs.append( + sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY, + type=sbom_data.PackageExternalRefType.cpe22Type, + locator=tag.removeprefix(NVD_CPE23))) + + if metadata_proto.name: + return metadata_proto.name, external_refs + else: + return os.path.basename(metadata_file_path), external_refs # return the directory name only as package name + + +def get_prebuilt_package_name(file_metadata, metadata_file_path): + """Return name of a prebuilt package, which can be from the METADATA file, metadata file path, + module path or kernel module's source path if the installed file is a kernel module. + + See go/android-spdx and go/android-sbom-gen for more details. + """ + name = None + if metadata_file_path: + metadata_proto = metadata_file_protos[metadata_file_path] + if metadata_proto.name: + name = metadata_proto.name + else: + name = metadata_file_path + elif file_metadata['module_path']: + name = file_metadata['module_path'] + elif file_metadata['kernel_module_copy_files']: + src_path = file_metadata['kernel_module_copy_files'].split(':')[0] + name = os.path.dirname(src_path) + + return name.removeprefix('prebuilts/').replace('/', '-') + + +def get_metadata_file_path(file_metadata): + """Search for METADATA file of a package and return its path.""" + metadata_path = '' + if file_metadata['module_path']: + metadata_path = file_metadata['module_path'] + elif file_metadata['kernel_module_copy_files']: + metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0]) + + while metadata_path and not os.path.exists(metadata_path + '/METADATA'): + metadata_path = os.path.dirname(metadata_path) + + return metadata_path + + +def get_package_version(metadata_file_path): + """Return a package's version in its METADATA file.""" + if not metadata_file_path: + return None + metadata_proto = metadata_file_protos[metadata_file_path] + return metadata_proto.third_party.version + + +def get_package_homepage(metadata_file_path): + """Return a package's homepage URL in its METADATA file.""" + if not metadata_file_path: + return None + metadata_proto = metadata_file_protos[metadata_file_path] + if metadata_proto.third_party.homepage: + return metadata_proto.third_party.homepage + for url in metadata_proto.third_party.url: + if url.type == metadata_file_pb2.URL.Type.HOMEPAGE: + return url.value + + return None + + +def get_package_download_location(metadata_file_path): + """Return a package's code repository URL in its METADATA file.""" + if not metadata_file_path: + return None + metadata_proto = metadata_file_protos[metadata_file_path] + if metadata_proto.third_party.url: + urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type) + if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE: + return urls[0].value + elif len(urls) > 1: + return urls[1].value + + return None + + +def get_license_text(license_files): + license_text = '' + for license_file in license_files: + if args.debug: + license_text += '#### Content from ' + license_file + '\n' + else: + license_text += pathlib.Path(license_file).read_text(errors='replace') + '\n\n' + return license_text + + +def get_sbom_fragments(installed_file_metadata, metadata_file_path): + """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT + package, a UPSTREAM package and an external SBOM document reference if sbom_ref defined in its + METADATA file. + + See go/android-spdx and go/android-sbom-gen for more details. + """ + external_doc_ref = None + packages = [] + relationships = [] + licenses = [] + + # Info from METADATA file + homepage = get_package_homepage(metadata_file_path) + version = get_package_version(metadata_file_path) + download_location = get_package_download_location(metadata_file_path) + + lics = db.get_package_licenses(installed_file_metadata['module_path']) + if not lics: + lics = db.get_package_licenses(metadata_file_path) + + if lics: + for license_name, license_files in lics.items(): + if not license_files: + continue + license_id = new_license_id(license_name) + if license_name not in licenses_text: + licenses_text[license_name] = get_license_text(license_files.split(' ')) + licenses.append(sbom_data.License(id=license_id, name=license_name, text=licenses_text[license_name])) + + if is_source_package(installed_file_metadata): + # Source fork packages + name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path) + source_package_id = new_package_id(name, PKG_SOURCE) + source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version, + download_location=sbom_data.VALUE_NONE, + supplier='Organization: ' + args.product_mfr, + external_refs=external_refs) + + upstream_package_id = new_package_id(name, PKG_UPSTREAM) + upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version, + supplier=( + 'Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION, + download_location=download_location) + packages += [source_package, upstream_package] + relationships.append(sbom_data.Relationship(id1=source_package_id, + relationship=sbom_data.RelationshipType.VARIANT_OF, + id2=upstream_package_id)) + + for license in licenses: + source_package.declared_license_ids.append(license.id) + upstream_package.declared_license_ids.append(license.id) + + elif is_prebuilt_package(installed_file_metadata): + # Prebuilt fork packages + name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path) + prebuilt_package_id = new_package_id(name, PKG_PREBUILT) + prebuilt_package = sbom_data.Package(id=prebuilt_package_id, + name=name, + download_location=sbom_data.VALUE_NONE, + version=version if version else args.build_version, + supplier='Organization: ' + args.product_mfr) + + upstream_package_id = new_package_id(name, PKG_UPSTREAM) + upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version, + supplier=( + 'Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION, + download_location=download_location) + packages += [prebuilt_package, upstream_package] + relationships.append(sbom_data.Relationship(id1=prebuilt_package_id, + relationship=sbom_data.RelationshipType.VARIANT_OF, + id2=upstream_package_id)) + for license in licenses: + prebuilt_package.declared_license_ids.append(license.id) + upstream_package.declared_license_ids.append(license.id) + + if metadata_file_path: + metadata_proto = metadata_file_protos[metadata_file_path] + if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref': + sbom_url = metadata_proto.third_party.sbom_ref.url + sbom_checksum = metadata_proto.third_party.sbom_ref.checksum + upstream_element_id = metadata_proto.third_party.sbom_ref.element_id + if sbom_url and sbom_checksum and upstream_element_id: + doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{sbom_data.encode_for_spdxid(name)}' + external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id, + uri=sbom_url, + checksum=sbom_checksum) + relationships.append( + sbom_data.Relationship(id1=upstream_package_id, + relationship=sbom_data.RelationshipType.VARIANT_OF, + id2=doc_ref_id + ':' + upstream_element_id)) + + return external_doc_ref, packages, relationships, licenses + + +def save_report(report_file_path, report): + with open(report_file_path, 'w', encoding='utf-8') as report_file: + for type, issues in report.items(): + report_file.write(type + '\n') + for issue in issues: + report_file.write('\t' + issue + '\n') + report_file.write('\n') + + +# Validate the metadata generated by Make for installed files and report if there is no metadata. +def installed_file_has_metadata(installed_file_metadata, report): + installed_file = installed_file_metadata['installed_file'] + module_path = installed_file_metadata['module_path'] + product_copy_files = installed_file_metadata['product_copy_files'] + kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files'] + is_platform_generated = installed_file_metadata['is_platform_generated'] + + if (not module_path and + not product_copy_files and + not kernel_module_copy_files and + not is_platform_generated and + not installed_file.endswith('.fsv_meta')): + report[ISSUE_NO_METADATA].append(installed_file) + return False + + return True + + +# Validate identifiers in a package's METADATA. +# 1) Only known identifier type is allowed +# 2) Only one identifier's primary_source can be true +def validate_package_metadata(metadata_file_path, package_metadata): + primary_source_found = False + for identifier in package_metadata.third_party.identifier: + if identifier.type not in THIRD_PARTY_IDENTIFIER_TYPES: + sys.exit(f'Unknown value of third_party.identifier.type in {metadata_file_path}/METADATA: {identifier.type}.') + if primary_source_found and identifier.primary_source: + sys.exit( + f'Field "primary_source" is set to true in multiple third_party.identifier in {metadata_file_path}/METADATA.') + primary_source_found = identifier.primary_source + + +def report_metadata_file(metadata_file_path, installed_file_metadata, report): + if metadata_file_path: + report[INFO_METADATA_FOUND_FOR_PACKAGE].append( + 'installed_file: {}, module_path: {}, METADATA file: {}'.format( + installed_file_metadata['installed_file'], + installed_file_metadata['module_path'], + metadata_file_path + '/METADATA')) + + package_metadata = metadata_file_pb2.Metadata() + with open(metadata_file_path + '/METADATA', 'rt') as f: + text_format.Parse(f.read(), package_metadata) + + validate_package_metadata(metadata_file_path, package_metadata) + + if not metadata_file_path in metadata_file_protos: + metadata_file_protos[metadata_file_path] = package_metadata + if not package_metadata.name: + report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"') + + if not package_metadata.third_party.version: + report[ISSUE_METADATA_FILE_INCOMPLETE].append( + f'{metadata_file_path}/METADATA does not has "third_party.version"') + + for tag in package_metadata.third_party.security.tag: + if not tag.startswith(NVD_CPE23): + report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append( + f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA') + else: + report[ISSUE_NO_METADATA_FILE].append( + "installed_file: {}, module_path: {}".format( + installed_file_metadata['installed_file'], installed_file_metadata['module_path'])) + + +# If a file is from a source fork or prebuilt fork package, add its package information to SBOM +def add_package_of_file(file_id, file_metadata, doc, report): + metadata_file_path = get_metadata_file_path(file_metadata) + report_metadata_file(metadata_file_path, file_metadata, report) + + external_doc_ref, pkgs, rels, licenses = get_sbom_fragments(file_metadata, metadata_file_path) + if len(pkgs) > 0: + if external_doc_ref: + doc.add_external_ref(external_doc_ref) + for p in pkgs: + doc.add_package(p) + for rel in rels: + doc.add_relationship(rel) + fork_package_id = pkgs[0].id # The first package should be the source/prebuilt fork package + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.GENERATED_FROM, + id2=fork_package_id)) + for license in licenses: + doc.add_license(license) + + +# Add STATIC_LINK relationship for static dependencies of a file +def add_static_deps_of_file(file_id, file_metadata, doc): + if not file_metadata['static_dep_files'] and not file_metadata['whole_static_dep_files']: + return + static_dep_files = [] + if file_metadata['static_dep_files']: + static_dep_files += file_metadata['static_dep_files'].split(' ') + if file_metadata['whole_static_dep_files']: + static_dep_files += file_metadata['whole_static_dep_files'].split(' ') + + for dep_file in static_dep_files: + # Static libs are not shipped on devices, so names are derived from .intermediates paths. + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.STATIC_LINK, + id2=new_file_id( + dep_file.removeprefix(args.soong_out + '/.intermediates/')))) + + +def add_licenses_of_file(file_id, file_metadata, doc): + lics = db.get_module_licenses(file_metadata.get('name', ''), file_metadata['module_path']) + if lics: + file = next(f for f in doc.files if file_id == f.id) + for license_name, license_files in lics.items(): + if not license_files: + continue + license_id = new_license_id(license_name) + file.concluded_license_ids.append(license_id) + if license_name not in licenses_text: + license_text = get_license_text(license_files.split(' ')) + licenses_text[license_name] = license_text + + doc.add_license(sbom_data.License(id=license_id, name=license_name, text=licenses_text[license_name])) + + +def get_all_transitive_static_dep_files_of_installed_files(installed_files_metadata, db, report): + # Find all transitive static dep files of all installed files + q = queue.Queue() + for installed_file_metadata in installed_files_metadata: + if installed_file_metadata['static_dep_files']: + for f in installed_file_metadata['static_dep_files'].split(' '): + q.put(f) + if installed_file_metadata['whole_static_dep_files']: + for f in installed_file_metadata['whole_static_dep_files'].split(' '): + q.put(f) + + all_static_dep_files = {} + while not q.empty(): + dep_file = q.get() + if dep_file in all_static_dep_files: + # It has been processed + continue + + all_static_dep_files[dep_file] = True + soong_module = db.get_soong_module_of_built_file(dep_file) + if not soong_module: + # This should not happen, add to report[ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP] + report[ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP].append(f) + continue + + if soong_module['static_dep_files']: + for f in soong_module['static_dep_files'].split(' '): + if f not in all_static_dep_files: + q.put(f) + if soong_module['whole_static_dep_files']: + for f in soong_module['whole_static_dep_files'].split(' '): + if f not in all_static_dep_files: + q.put(f) + + return sorted(all_static_dep_files.keys()) + + +def main(): + global args + args = get_args() + log('Args:', vars(args)) + + global db + db = compliance_metadata.MetadataDb(args.metadata) + if args.debug: + db.dump_debug_db(os.path.dirname(args.output_file) + '/compliance-metadata-debug.db') + + global metadata_file_protos + metadata_file_protos = {} + global licenses_text + licenses_text = {} + + product_package_id = sbom_data.SPDXID_PRODUCT + product_package_name = sbom_data.PACKAGE_NAME_PRODUCT + product_package = sbom_data.Package(id=product_package_id, + name=product_package_name, + download_location=sbom_data.VALUE_NONE, + version=args.build_version, + supplier='Organization: ' + args.product_mfr, + files_analyzed=True) + doc_name = args.build_version + doc = sbom_data.Document(name=doc_name, + namespace=f'https://www.google.com/sbom/spdx/android/{doc_name}', + creators=['Organization: ' + args.product_mfr], + describes=product_package_id) + + doc.packages.append(product_package) + doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM, + name=sbom_data.PACKAGE_NAME_PLATFORM, + download_location=sbom_data.VALUE_NONE, + version=args.build_version, + supplier='Organization: ' + args.product_mfr, + declared_license_ids=[sbom_data.SPDXID_LICENSE_APACHE])) + + # Report on some issues and information + report = { + ISSUE_NO_METADATA: [], + ISSUE_NO_METADATA_FILE: [], + ISSUE_METADATA_FILE_INCOMPLETE: [], + ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [], + ISSUE_INSTALLED_FILE_NOT_EXIST: [], + ISSUE_NO_MODULE_FOUND_FOR_STATIC_DEP: [], + INFO_METADATA_FOUND_FOR_PACKAGE: [], + } + + # Get installed files and corresponding make modules' metadata if an installed file is from a make module. + installed_files_metadata = db.get_installed_files() + + # Find which Soong module an installed file is from and merge metadata from Make and Soong + for installed_file_metadata in installed_files_metadata: + soong_module = db.get_soong_module_of_installed_file(installed_file_metadata['installed_file']) + if soong_module: + # Merge soong metadata to make metadata + installed_file_metadata.update(soong_module) + else: + # For make modules soong_module_type should be empty + installed_file_metadata['soong_module_type'] = '' + installed_file_metadata['static_dep_files'] = '' + installed_file_metadata['whole_static_dep_files'] = '' + + # Scan the metadata and create the corresponding package and file records in SPDX + for installed_file_metadata in installed_files_metadata: + installed_file = installed_file_metadata['installed_file'] + module_path = installed_file_metadata['module_path'] + product_copy_files = installed_file_metadata['product_copy_files'] + kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files'] + build_output_path = installed_file + installed_file = installed_file.removeprefix(args.product_out) + + if not installed_file_has_metadata(installed_file_metadata, report): + continue + if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)): + report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file) + continue + + file_id = new_file_id(installed_file) + sha1 = checksum(build_output_path) + f = sbom_data.File(id=file_id, name=installed_file, checksum=sha1) + doc.files.append(f) + product_package.file_ids.append(file_id) + + if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata): + add_package_of_file(file_id, installed_file_metadata, doc, report) + + elif module_path or installed_file_metadata['is_platform_generated']: + # File from PLATFORM package + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.GENERATED_FROM, + id2=sbom_data.SPDXID_PLATFORM)) + if installed_file_metadata['is_platform_generated']: + f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE] + + elif product_copy_files: + # Format of product_copy_files: <source path>:<dest path> + src_path = product_copy_files.split(':')[0] + # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device, + # so process them as files from PLATFORM package + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.GENERATED_FROM, + id2=sbom_data.SPDXID_PLATFORM)) + if installed_file_metadata['license_text']: + if installed_file_metadata['license_text'] == 'build/soong/licenses/LICENSE': + f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE] + + elif installed_file.endswith('.fsv_meta'): + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.GENERATED_FROM, + id2=sbom_data.SPDXID_PLATFORM)) + f.concluded_license_ids = [sbom_data.SPDXID_LICENSE_APACHE] + + elif kernel_module_copy_files.startswith('ANDROID-GEN'): + # For the four files generated for _dlkm, _ramdisk partitions + doc.add_relationship(sbom_data.Relationship(id1=file_id, + relationship=sbom_data.RelationshipType.GENERATED_FROM, + id2=sbom_data.SPDXID_PLATFORM)) + + # Process static dependencies of the installed file + add_static_deps_of_file(file_id, installed_file_metadata, doc) + + # Add licenses of the installed file + add_licenses_of_file(file_id, installed_file_metadata, doc) + + # Add all static library files to SBOM + for dep_file in get_all_transitive_static_dep_files_of_installed_files(installed_files_metadata, db, report): + filepath = dep_file.removeprefix(args.soong_out + '/.intermediates/') + file_id = new_file_id(filepath) + # SHA1 of empty string. Sometimes .a files might not be built. + sha1 = 'SHA1: da39a3ee5e6b4b0d3255bfef95601890afd80709' + if os.path.islink(dep_file) or os.path.isfile(dep_file): + sha1 = checksum(dep_file) + doc.files.append(sbom_data.File(id=file_id, + name=filepath, + checksum=sha1)) + file_metadata = { + 'installed_file': dep_file, + 'is_prebuilt_make_module': False + } + file_metadata.update(db.get_soong_module_of_built_file(dep_file)) + add_package_of_file(file_id, file_metadata, doc, report) + + # Add relationships for static deps of static libraries + add_static_deps_of_file(file_id, file_metadata, doc) + + # Add licenses of the static lib + add_licenses_of_file(file_id, file_metadata, doc) + + # Save SBOM records to output file + doc.generate_packages_verification_code() + doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ') + prefix = args.output_file + if prefix.endswith('.spdx'): + prefix = prefix.removesuffix('.spdx') + elif prefix.endswith('.spdx.json'): + prefix = prefix.removesuffix('.spdx.json') + + output_file = prefix + '.spdx' + with open(output_file, 'w', encoding="utf-8") as file: + sbom_writers.TagValueWriter.write(doc, file) + if args.json: + with open(prefix + '.spdx.json', 'w', encoding="utf-8") as file: + sbom_writers.JSONWriter.write(doc, file) + + save_report(prefix + '-gen-report.txt', report) + + +if __name__ == '__main__': + main() diff --git a/tools/sbom/generate-sbom-framework_res.py b/tools/sbom/generate-sbom-framework_res.py index d0d232d635..27f3d2ebc1 100644 --- a/tools/sbom/generate-sbom-framework_res.py +++ b/tools/sbom/generate-sbom-framework_res.py @@ -80,7 +80,8 @@ def main(): resource_file_spdxids = [] for file in layoutlib_sbom[sbom_writers.PropNames.FILES]: - if file[sbom_writers.PropNames.FILE_NAME].startswith('data/res/'): + file_path = file[sbom_writers.PropNames.FILE_NAME] + if file_path.startswith('data/res/') or file_path.startswith('data/overlays/'): resource_file_spdxids.append(file[sbom_writers.PropNames.SPDXID]) doc.relationships = [ diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java index 6b2341bc80..654e19675d 100644 --- a/tools/signapk/src/com/android/signapk/SignApk.java +++ b/tools/signapk/src/com/android/signapk/SignApk.java @@ -302,7 +302,6 @@ class SignApk { final KeyStore keyStore, final String keyName) throws CertificateException, KeyStoreException, NoSuchAlgorithmException, UnrecoverableKeyException, UnrecoverableEntryException { - final Key key = keyStore.getKey(keyName, readPassword(keyName)); final PrivateKeyEntry privateKeyEntry = (PrivateKeyEntry) keyStore.getEntry(keyName, null); if (privateKeyEntry == null) { throw new Error( |